entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "= new Batman.Request data:\n user:\n name: 'Jim'\n equal req.hasFileUploads(), false\n\ntest 'hasFi",
"end": 439,
"score": 0.9995809197425842,
"start": 436,
"tag": "NAME",
"value": "Jim"
}
] | tests/batman/utilities/request_test.coffee | amco/batman | 0 | oldSend = Batman.Request::send
oldFile = Batman.container.File
QUnit.module 'Batman.Request',
setup: ->
@sendSpy = createSpy()
Batman.Request::send = @sendSpy
Batman.container.File = class File
teardown: ->
Batman.container.File = oldFile
Batman.Request::send = oldSend
test 'hasFileUploads() returns false when the request data has no file uploads', ->
req = new Batman.Request data:
user:
name: 'Jim'
equal req.hasFileUploads(), false
test 'hasFileUploads() returns true when the request data has a file upload in a nested object', ->
req = new Batman.Request data:
user:
avatar: new File()
equal req.hasFileUploads(), true
test 'hasFileUploads() returns true when the request data has a file upload in a nested array', ->
req = new Batman.Request data:
user:
avatars: [undefined, new File()]
equal req.hasFileUploads(), true
test 'should not fire if not given a url', ->
new Batman.Request
ok !@sendSpy.called
test 'should not send if autosend is false', ->
new Batman.Request(url: 'some/test/url', autosend: false)
ok !@sendSpy.called
test 'should not send if autosend is false and the url changes', ->
request = new Batman.Request(url: 'some/test/url', autosend: false)
request.set 'url', 'another/test/url'
ok !@sendSpy.called
test 'should request a url with default get', 2, ->
@request = new Batman.Request
url: 'some/test/url.html'
req = @sendSpy.lastCallContext
equal req.url, 'some/test/url.html'
equal req.method, 'GET'
test 'should request a url with a different method, converting the method to uppercase', 1, ->
@request = new Batman.Request
url: 'B/test/url.html'
method: 'post'
req = @sendSpy.lastCallContext
equal req.method, 'POST'
test 'should request a url with data', 1, ->
new Batman.Request
url: 'some/test/url.html'
data:
a: "b"
c: 1
req = @sendSpy.lastCallContext
deepEqual req.data, {a: "b", c: 1}
asyncTest 'should call the success callback if the request was successful', 2, ->
postInstantiationObserver = createSpy()
optionsHashObserver = createSpy()
req = new Batman.Request
url: 'some/test/url.html'
success: optionsHashObserver
req.on 'success', postInstantiationObserver
delay =>
req = @sendSpy.lastCallContext
req.fire 'success', 'some test data'
delay =>
deepEqual optionsHashObserver.lastCallArguments, ['some test data']
deepEqual postInstantiationObserver.lastCallArguments, ['some test data']
asyncTest 'should set headers', 2, ->
new Batman.Request
url: 'some/test/url.html'
headers: {'test_header': 'test-value'}
delay =>
req = @sendSpy.lastCallContext
notEqual req.headers.test_header, undefined
equal req.headers.test_header, 'test-value'
old = {}
for key in ['FormData', 'File']
old[key] = Batman.container[key] || {}
class MockFormData extends MockClass
constructor: ->
super
@appended = []
@appends = 0
append: (k, v) ->
@appends++
@appended.push [k, v]
class MockFile
QUnit.module 'Batman.Request: serializing to FormData',
setup: ->
Batman.container.FormData = MockFormData
Batman.container.File = MockFile
MockFormData.reset()
teardown: ->
Batman.extend Batman.container, old
test 'should serialize array data to FormData objects', ->
object =
foo: ["bar", "baz"]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[]", "bar"], ["foo[]", "baz"]]
test 'should serialize simple data to FormData objects', ->
object =
foo: "bar"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo", "bar"]]
test 'should serialize object data to FormData objects', ->
object =
foo:
bar: "baz"
qux: "corge"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[bar]", "baz"], ["foo[qux]", "corge"]]
test 'should serialize nested object and array data to FormData objects', ->
object =
foo:
bar: ["baz", null, "qux", undefined]
corge: [{ding: "dong"}, {walla: "walla"}, {null: null}, {undefined: undefined}]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [
["foo[bar][]", "baz"]
["foo[bar][]", ""]
["foo[bar][]", "qux"]
["foo[bar][]", ""]
["corge[][ding]", "dong"]
["corge[][walla]", "walla"]
["corge[][null]", ""]
["corge[][undefined]", ""]
]
test "should serialize files without touching them into FormData objects", ->
object =
image: new MockFile
formData = Batman.Request.objectToFormData(object)
equal formData.appended[0][0], 'image'
ok formData.appended[0][1] instanceof MockFile
| 3530 | oldSend = Batman.Request::send
oldFile = Batman.container.File
QUnit.module 'Batman.Request',
setup: ->
@sendSpy = createSpy()
Batman.Request::send = @sendSpy
Batman.container.File = class File
teardown: ->
Batman.container.File = oldFile
Batman.Request::send = oldSend
test 'hasFileUploads() returns false when the request data has no file uploads', ->
req = new Batman.Request data:
user:
name: '<NAME>'
equal req.hasFileUploads(), false
test 'hasFileUploads() returns true when the request data has a file upload in a nested object', ->
req = new Batman.Request data:
user:
avatar: new File()
equal req.hasFileUploads(), true
test 'hasFileUploads() returns true when the request data has a file upload in a nested array', ->
req = new Batman.Request data:
user:
avatars: [undefined, new File()]
equal req.hasFileUploads(), true
test 'should not fire if not given a url', ->
new Batman.Request
ok !@sendSpy.called
test 'should not send if autosend is false', ->
new Batman.Request(url: 'some/test/url', autosend: false)
ok !@sendSpy.called
test 'should not send if autosend is false and the url changes', ->
request = new Batman.Request(url: 'some/test/url', autosend: false)
request.set 'url', 'another/test/url'
ok !@sendSpy.called
test 'should request a url with default get', 2, ->
@request = new Batman.Request
url: 'some/test/url.html'
req = @sendSpy.lastCallContext
equal req.url, 'some/test/url.html'
equal req.method, 'GET'
test 'should request a url with a different method, converting the method to uppercase', 1, ->
@request = new Batman.Request
url: 'B/test/url.html'
method: 'post'
req = @sendSpy.lastCallContext
equal req.method, 'POST'
test 'should request a url with data', 1, ->
new Batman.Request
url: 'some/test/url.html'
data:
a: "b"
c: 1
req = @sendSpy.lastCallContext
deepEqual req.data, {a: "b", c: 1}
asyncTest 'should call the success callback if the request was successful', 2, ->
postInstantiationObserver = createSpy()
optionsHashObserver = createSpy()
req = new Batman.Request
url: 'some/test/url.html'
success: optionsHashObserver
req.on 'success', postInstantiationObserver
delay =>
req = @sendSpy.lastCallContext
req.fire 'success', 'some test data'
delay =>
deepEqual optionsHashObserver.lastCallArguments, ['some test data']
deepEqual postInstantiationObserver.lastCallArguments, ['some test data']
asyncTest 'should set headers', 2, ->
new Batman.Request
url: 'some/test/url.html'
headers: {'test_header': 'test-value'}
delay =>
req = @sendSpy.lastCallContext
notEqual req.headers.test_header, undefined
equal req.headers.test_header, 'test-value'
old = {}
for key in ['FormData', 'File']
old[key] = Batman.container[key] || {}
class MockFormData extends MockClass
constructor: ->
super
@appended = []
@appends = 0
append: (k, v) ->
@appends++
@appended.push [k, v]
class MockFile
QUnit.module 'Batman.Request: serializing to FormData',
setup: ->
Batman.container.FormData = MockFormData
Batman.container.File = MockFile
MockFormData.reset()
teardown: ->
Batman.extend Batman.container, old
test 'should serialize array data to FormData objects', ->
object =
foo: ["bar", "baz"]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[]", "bar"], ["foo[]", "baz"]]
test 'should serialize simple data to FormData objects', ->
object =
foo: "bar"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo", "bar"]]
test 'should serialize object data to FormData objects', ->
object =
foo:
bar: "baz"
qux: "corge"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[bar]", "baz"], ["foo[qux]", "corge"]]
test 'should serialize nested object and array data to FormData objects', ->
object =
foo:
bar: ["baz", null, "qux", undefined]
corge: [{ding: "dong"}, {walla: "walla"}, {null: null}, {undefined: undefined}]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [
["foo[bar][]", "baz"]
["foo[bar][]", ""]
["foo[bar][]", "qux"]
["foo[bar][]", ""]
["corge[][ding]", "dong"]
["corge[][walla]", "walla"]
["corge[][null]", ""]
["corge[][undefined]", ""]
]
test "should serialize files without touching them into FormData objects", ->
object =
image: new MockFile
formData = Batman.Request.objectToFormData(object)
equal formData.appended[0][0], 'image'
ok formData.appended[0][1] instanceof MockFile
| true | oldSend = Batman.Request::send
oldFile = Batman.container.File
QUnit.module 'Batman.Request',
setup: ->
@sendSpy = createSpy()
Batman.Request::send = @sendSpy
Batman.container.File = class File
teardown: ->
Batman.container.File = oldFile
Batman.Request::send = oldSend
test 'hasFileUploads() returns false when the request data has no file uploads', ->
req = new Batman.Request data:
user:
name: 'PI:NAME:<NAME>END_PI'
equal req.hasFileUploads(), false
test 'hasFileUploads() returns true when the request data has a file upload in a nested object', ->
req = new Batman.Request data:
user:
avatar: new File()
equal req.hasFileUploads(), true
test 'hasFileUploads() returns true when the request data has a file upload in a nested array', ->
req = new Batman.Request data:
user:
avatars: [undefined, new File()]
equal req.hasFileUploads(), true
test 'should not fire if not given a url', ->
new Batman.Request
ok !@sendSpy.called
test 'should not send if autosend is false', ->
new Batman.Request(url: 'some/test/url', autosend: false)
ok !@sendSpy.called
test 'should not send if autosend is false and the url changes', ->
request = new Batman.Request(url: 'some/test/url', autosend: false)
request.set 'url', 'another/test/url'
ok !@sendSpy.called
test 'should request a url with default get', 2, ->
@request = new Batman.Request
url: 'some/test/url.html'
req = @sendSpy.lastCallContext
equal req.url, 'some/test/url.html'
equal req.method, 'GET'
test 'should request a url with a different method, converting the method to uppercase', 1, ->
@request = new Batman.Request
url: 'B/test/url.html'
method: 'post'
req = @sendSpy.lastCallContext
equal req.method, 'POST'
test 'should request a url with data', 1, ->
new Batman.Request
url: 'some/test/url.html'
data:
a: "b"
c: 1
req = @sendSpy.lastCallContext
deepEqual req.data, {a: "b", c: 1}
asyncTest 'should call the success callback if the request was successful', 2, ->
postInstantiationObserver = createSpy()
optionsHashObserver = createSpy()
req = new Batman.Request
url: 'some/test/url.html'
success: optionsHashObserver
req.on 'success', postInstantiationObserver
delay =>
req = @sendSpy.lastCallContext
req.fire 'success', 'some test data'
delay =>
deepEqual optionsHashObserver.lastCallArguments, ['some test data']
deepEqual postInstantiationObserver.lastCallArguments, ['some test data']
asyncTest 'should set headers', 2, ->
new Batman.Request
url: 'some/test/url.html'
headers: {'test_header': 'test-value'}
delay =>
req = @sendSpy.lastCallContext
notEqual req.headers.test_header, undefined
equal req.headers.test_header, 'test-value'
old = {}
for key in ['FormData', 'File']
old[key] = Batman.container[key] || {}
class MockFormData extends MockClass
constructor: ->
super
@appended = []
@appends = 0
append: (k, v) ->
@appends++
@appended.push [k, v]
class MockFile
QUnit.module 'Batman.Request: serializing to FormData',
setup: ->
Batman.container.FormData = MockFormData
Batman.container.File = MockFile
MockFormData.reset()
teardown: ->
Batman.extend Batman.container, old
test 'should serialize array data to FormData objects', ->
object =
foo: ["bar", "baz"]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[]", "bar"], ["foo[]", "baz"]]
test 'should serialize simple data to FormData objects', ->
object =
foo: "bar"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo", "bar"]]
test 'should serialize object data to FormData objects', ->
object =
foo:
bar: "baz"
qux: "corge"
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [["foo[bar]", "baz"], ["foo[qux]", "corge"]]
test 'should serialize nested object and array data to FormData objects', ->
object =
foo:
bar: ["baz", null, "qux", undefined]
corge: [{ding: "dong"}, {walla: "walla"}, {null: null}, {undefined: undefined}]
formData = Batman.Request.objectToFormData(object)
deepEqual formData.appended, [
["foo[bar][]", "baz"]
["foo[bar][]", ""]
["foo[bar][]", "qux"]
["foo[bar][]", ""]
["corge[][ding]", "dong"]
["corge[][walla]", "walla"]
["corge[][null]", ""]
["corge[][undefined]", ""]
]
test "should serialize files without touching them into FormData objects", ->
object =
image: new MockFile
formData = Batman.Request.objectToFormData(object)
equal formData.appended[0][0], 'image'
ok formData.appended[0][1] instanceof MockFile
|
[
{
"context": ".de/?url=https:%2F%2Flobid.org%2Fgnd%2F5004690-1\n# Albrecht Dürer - https://jsontojsonp.gbv.de/?url=https:%2F%2Flob",
"end": 123,
"score": 0.9997757077217102,
"start": 109,
"tag": "NAME",
"value": "Albrecht Dürer"
},
{
"context": "auberflöte . http://lobid.org/gnd/7599114-7.json\n# Edvard Grieg - https://jsontojsonp.gbv.de/?url=https:%2F%2Flob",
"end": 279,
"score": 0.9997431039810181,
"start": 267,
"tag": "NAME",
"value": "Edvard Grieg"
}
] | src/webfrontend/UBHDGNDUtil.coffee | ssciwr/easydb-custom-data-type-ubhdgnd | 0 | # examples:
# VolksWagenStiftung - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F5004690-1
# Albrecht Dürer - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F11852786X
# Entdeckung der Zauberflöte . http://lobid.org/gnd/7599114-7.json
# Edvard Grieg - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F118697641
# some fields are missing, thats on purpose. This is a curated selection, because not all fields make sense
# "# ++" --> doublechecked
# "# + checked" --> should theoretically work, but needs more explicit testing
class ez5.UBHDGNDUtil
@getFullTextFromEntityFactsJSON: (efJSON) ->
_fulltext = ''
# ++
_fulltext += efJSON['id'] + ' '
# ++
_fulltext = efJSON['gndIdentifier'] + ' '
# ++
if efJSON?.oldAuthorityNumber
for entry in efJSON.oldAuthorityNumber
_fulltext += entry + ' '
# ++
if efJSON?.gndSubjectCategory
for entry in efJSON.gndSubjectCategory
if entry.label
_fulltext += entry.label + ' '
# ++
_fulltext += efJSON['preferredName'] + ' '
# ++
if efJSON?.variantName
for entry in efJSON.variantName
_fulltext += entry + ' '
# ++
if efJSON?.biographicalOrHistoricalInformation
for entry in efJSON.biographicalOrHistoricalInformation
_fulltext += entry + ' '
# ++
if efJSON?.dateOfEstablishment
for entry in efJSON.dateOfEstablishment
_fulltext += entry + ' '
# ++
if efJSON?.dateOfPublication
for entry in efJSON.dateOfPublication
_fulltext += entry + ' '
# ++
if efJSON?.dateOfBirth
_fulltext += efJSON.dateOfBirth + ' '
if efJSON?.dateOfProduction
_fulltext += efJSON.dateOfProduction + ' '
# ++
if efJSON?.dateOfDeath
_fulltext += efJSON.dateOfDeath + ' '
# + checked
if efJSON?.dateOfTermination
_fulltext += efJSON.dateOfTermination + ' '
# ++
if efJSON?.author
for entry in efJSON.author
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.firstAuthor
for entry in efJSON.firstAuthor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.gender
for entry in efJSON.gender
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBirth
for entry in efJSON.placeOfBirth
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfDeath
for entry in efJSON.placeOfDeath
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBusiness
for entry in efJSON.placeOfBusiness
if entry.label
_fulltext += entry.label + ' '
if efJSON?.associatedPlace
for entry in efJSON.associatedPlace
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.topic
for entry in efJSON.topic
if entry.label
_fulltext += entry.label + ' '
if efJSON?.predecessor
for entry in efJSON.predecessor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingCorporateBody
for entry in efJSON.precedingCorporateBody
if entry.label
_fulltext += entry.label + ' '
if efJSON?.isA
for entry in efJSON.isA
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.composer
for entry in efJSON.composer
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedWork
for entry in efJSON.relatedWork
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedPerson
for entry in efJSON.relatedPerson
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingPlaceOrGeographicName
for entry in efJSON.precedingPlaceOrGeographicName
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.hierarchicalSuperiorOfTheCorporateBody
for entry in efJSON.hierarchicalSuperiorOfTheCorporateBody
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermInstantial
for entry in efJSON.broaderTermInstantial
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermGeneral
for entry in efJSON.broaderTermGeneral
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.professionOrOccupation
for entry in efJSON.professionOrOccupation
if entry.label
_fulltext += entry.label + ' '
if efJSON?.architect
for entry in efJSON.architect
if entry.label
_fulltext += entry.label + ' '
if efJSON?.opusNumericDesignationOfMusicalWork
for entry in efJSON.opusNumericDesignationOfMusicalWork
_fulltext += entry + ' '
# ++
if efJSON?.definition
for entry in efJSON.definition
_fulltext += entry + ' '
return _fulltext
| 12341 | # examples:
# VolksWagenStiftung - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F5004690-1
# <NAME> - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F11852786X
# Entdeckung der Zauberflöte . http://lobid.org/gnd/7599114-7.json
# <NAME> - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F118697641
# some fields are missing, thats on purpose. This is a curated selection, because not all fields make sense
# "# ++" --> doublechecked
# "# + checked" --> should theoretically work, but needs more explicit testing
class ez5.UBHDGNDUtil
@getFullTextFromEntityFactsJSON: (efJSON) ->
_fulltext = ''
# ++
_fulltext += efJSON['id'] + ' '
# ++
_fulltext = efJSON['gndIdentifier'] + ' '
# ++
if efJSON?.oldAuthorityNumber
for entry in efJSON.oldAuthorityNumber
_fulltext += entry + ' '
# ++
if efJSON?.gndSubjectCategory
for entry in efJSON.gndSubjectCategory
if entry.label
_fulltext += entry.label + ' '
# ++
_fulltext += efJSON['preferredName'] + ' '
# ++
if efJSON?.variantName
for entry in efJSON.variantName
_fulltext += entry + ' '
# ++
if efJSON?.biographicalOrHistoricalInformation
for entry in efJSON.biographicalOrHistoricalInformation
_fulltext += entry + ' '
# ++
if efJSON?.dateOfEstablishment
for entry in efJSON.dateOfEstablishment
_fulltext += entry + ' '
# ++
if efJSON?.dateOfPublication
for entry in efJSON.dateOfPublication
_fulltext += entry + ' '
# ++
if efJSON?.dateOfBirth
_fulltext += efJSON.dateOfBirth + ' '
if efJSON?.dateOfProduction
_fulltext += efJSON.dateOfProduction + ' '
# ++
if efJSON?.dateOfDeath
_fulltext += efJSON.dateOfDeath + ' '
# + checked
if efJSON?.dateOfTermination
_fulltext += efJSON.dateOfTermination + ' '
# ++
if efJSON?.author
for entry in efJSON.author
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.firstAuthor
for entry in efJSON.firstAuthor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.gender
for entry in efJSON.gender
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBirth
for entry in efJSON.placeOfBirth
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfDeath
for entry in efJSON.placeOfDeath
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBusiness
for entry in efJSON.placeOfBusiness
if entry.label
_fulltext += entry.label + ' '
if efJSON?.associatedPlace
for entry in efJSON.associatedPlace
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.topic
for entry in efJSON.topic
if entry.label
_fulltext += entry.label + ' '
if efJSON?.predecessor
for entry in efJSON.predecessor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingCorporateBody
for entry in efJSON.precedingCorporateBody
if entry.label
_fulltext += entry.label + ' '
if efJSON?.isA
for entry in efJSON.isA
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.composer
for entry in efJSON.composer
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedWork
for entry in efJSON.relatedWork
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedPerson
for entry in efJSON.relatedPerson
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingPlaceOrGeographicName
for entry in efJSON.precedingPlaceOrGeographicName
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.hierarchicalSuperiorOfTheCorporateBody
for entry in efJSON.hierarchicalSuperiorOfTheCorporateBody
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermInstantial
for entry in efJSON.broaderTermInstantial
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermGeneral
for entry in efJSON.broaderTermGeneral
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.professionOrOccupation
for entry in efJSON.professionOrOccupation
if entry.label
_fulltext += entry.label + ' '
if efJSON?.architect
for entry in efJSON.architect
if entry.label
_fulltext += entry.label + ' '
if efJSON?.opusNumericDesignationOfMusicalWork
for entry in efJSON.opusNumericDesignationOfMusicalWork
_fulltext += entry + ' '
# ++
if efJSON?.definition
for entry in efJSON.definition
_fulltext += entry + ' '
return _fulltext
| true | # examples:
# VolksWagenStiftung - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F5004690-1
# PI:NAME:<NAME>END_PI - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F11852786X
# Entdeckung der Zauberflöte . http://lobid.org/gnd/7599114-7.json
# PI:NAME:<NAME>END_PI - https://jsontojsonp.gbv.de/?url=https:%2F%2Flobid.org%2Fgnd%2F118697641
# some fields are missing, thats on purpose. This is a curated selection, because not all fields make sense
# "# ++" --> doublechecked
# "# + checked" --> should theoretically work, but needs more explicit testing
class ez5.UBHDGNDUtil
@getFullTextFromEntityFactsJSON: (efJSON) ->
_fulltext = ''
# ++
_fulltext += efJSON['id'] + ' '
# ++
_fulltext = efJSON['gndIdentifier'] + ' '
# ++
if efJSON?.oldAuthorityNumber
for entry in efJSON.oldAuthorityNumber
_fulltext += entry + ' '
# ++
if efJSON?.gndSubjectCategory
for entry in efJSON.gndSubjectCategory
if entry.label
_fulltext += entry.label + ' '
# ++
_fulltext += efJSON['preferredName'] + ' '
# ++
if efJSON?.variantName
for entry in efJSON.variantName
_fulltext += entry + ' '
# ++
if efJSON?.biographicalOrHistoricalInformation
for entry in efJSON.biographicalOrHistoricalInformation
_fulltext += entry + ' '
# ++
if efJSON?.dateOfEstablishment
for entry in efJSON.dateOfEstablishment
_fulltext += entry + ' '
# ++
if efJSON?.dateOfPublication
for entry in efJSON.dateOfPublication
_fulltext += entry + ' '
# ++
if efJSON?.dateOfBirth
_fulltext += efJSON.dateOfBirth + ' '
if efJSON?.dateOfProduction
_fulltext += efJSON.dateOfProduction + ' '
# ++
if efJSON?.dateOfDeath
_fulltext += efJSON.dateOfDeath + ' '
# + checked
if efJSON?.dateOfTermination
_fulltext += efJSON.dateOfTermination + ' '
# ++
if efJSON?.author
for entry in efJSON.author
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.firstAuthor
for entry in efJSON.firstAuthor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.gender
for entry in efJSON.gender
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBirth
for entry in efJSON.placeOfBirth
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfDeath
for entry in efJSON.placeOfDeath
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.placeOfBusiness
for entry in efJSON.placeOfBusiness
if entry.label
_fulltext += entry.label + ' '
if efJSON?.associatedPlace
for entry in efJSON.associatedPlace
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.topic
for entry in efJSON.topic
if entry.label
_fulltext += entry.label + ' '
if efJSON?.predecessor
for entry in efJSON.predecessor
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingCorporateBody
for entry in efJSON.precedingCorporateBody
if entry.label
_fulltext += entry.label + ' '
if efJSON?.isA
for entry in efJSON.isA
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.composer
for entry in efJSON.composer
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedWork
for entry in efJSON.relatedWork
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.relatedPerson
for entry in efJSON.relatedPerson
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.precedingPlaceOrGeographicName
for entry in efJSON.precedingPlaceOrGeographicName
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.hierarchicalSuperiorOfTheCorporateBody
for entry in efJSON.hierarchicalSuperiorOfTheCorporateBody
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermInstantial
for entry in efJSON.broaderTermInstantial
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.broaderTermGeneral
for entry in efJSON.broaderTermGeneral
if entry.label
_fulltext += entry.label + ' '
# ++
if efJSON?.professionOrOccupation
for entry in efJSON.professionOrOccupation
if entry.label
_fulltext += entry.label + ' '
if efJSON?.architect
for entry in efJSON.architect
if entry.label
_fulltext += entry.label + ' '
if efJSON?.opusNumericDesignationOfMusicalWork
for entry in efJSON.opusNumericDesignationOfMusicalWork
_fulltext += entry + ' '
# ++
if efJSON?.definition
for entry in efJSON.definition
_fulltext += entry + ' '
return _fulltext
|
[
{
"context": "@mrfogg = {}\nmrfogg = @mrfogg\n\nmodules = [\n # Angular ",
"end": 7,
"score": 0.9552292227745056,
"start": 0,
"tag": "USERNAME",
"value": "@mrfogg"
},
{
"context": "@mrfogg = {}\nmrfogg = @mrfogg\n\nmodules = [\n # Angular addons\n \"ngRoute\",\n",
"end": 29,
"score": 0.9893447756767273,
"start": 22,
"tag": "USERNAME",
"value": "@mrfogg"
},
{
"context": "'mrfogg.config', []).value('config', {\n host: \"144.76.249.158:8080\"\n # host: \"mrfogg.apiary.io\"\n # host: ",
"end": 2284,
"score": 0.9997721910476685,
"start": 2270,
"tag": "IP_ADDRESS",
"value": "144.76.249.158"
}
] | mrfogg-front/app/coffee/app.coffee | PIWEEK/mrfogg | 0 | @mrfogg = {}
mrfogg = @mrfogg
modules = [
# Angular addons
"ngRoute",
"ngAnimate",
"ngSanitize",
# Controller
"mrfogg.controllers.main",
# Services
"mrfogg.services.resource",
"mrfogg.services.common",
"mrfogg.services.model",
# Modules
"mrKeypress",
# Widgets
"mrfogg.widgets",
# Greenmine Plugins
"gmUrls",
"gmFlash",
"gmModal",
"gmStorage",
"gmConfirm",
"gmOverlay",
"i18next"
]
configCallback = ($routeProvider, $locationProvider, $httpProvider, $provide, $compileProvider, $gmUrlsProvider, $sceDelegateProvider)->
# Activate HTML 5 routing without hash symbol
# $locationProvider.html5Mode(true)
$routeProvider.when('/',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId/tasks/:taskId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/login',
{templateUrl: '/views/login.html', controller:"MrLoginController"})
$routeProvider.when('/users',
{templateUrl: '/views/userlist.html', controller:"UserListController"})
apiUrls = {
"root": "/"
"login": "/auth/login"
"logout": "/auth/logout"
"users": "/users"
"trips": "/trips"
"cards": "/trips/%s/tasks/%s/cards"
}
$gmUrlsProvider.setUrls("api", apiUrls)
$sceDelegateProvider.resourceUrlWhitelist(['self', 'http://localhost:8080/**'])
return
init = ($rootScope, $gmStorage, $gmAuth, $gmUrls, $location, config)->
$rootScope.auth = $gmAuth.getUser()
$gmUrls.setHost("api", config.host,config.scheme)
$rootScope.logout = () ->
$gmStorage.clear()
$location.url("/login")
return
module = angular.module('mrfogg', modules)
module.config(['$routeProvider', '$locationProvider', '$httpProvider', '$provide', '$compileProvider', '$gmUrlsProvider', '$sceDelegateProvider', configCallback])
module.run(["$rootScope","$gmStorage", "$gmAuth", "$gmUrls", "$location", 'config', init])
angular.module('mrfogg.config', []).value('config', {
host: "144.76.249.158:8080"
# host: "mrfogg.apiary.io"
# host: "localhost:8080"
scheme: "http"
defaultLanguage: "en"
debug: false
})
angular.module("mrfogg.widgets", [])
| 200828 | @mrfogg = {}
mrfogg = @mrfogg
modules = [
# Angular addons
"ngRoute",
"ngAnimate",
"ngSanitize",
# Controller
"mrfogg.controllers.main",
# Services
"mrfogg.services.resource",
"mrfogg.services.common",
"mrfogg.services.model",
# Modules
"mrKeypress",
# Widgets
"mrfogg.widgets",
# Greenmine Plugins
"gmUrls",
"gmFlash",
"gmModal",
"gmStorage",
"gmConfirm",
"gmOverlay",
"i18next"
]
configCallback = ($routeProvider, $locationProvider, $httpProvider, $provide, $compileProvider, $gmUrlsProvider, $sceDelegateProvider)->
# Activate HTML 5 routing without hash symbol
# $locationProvider.html5Mode(true)
$routeProvider.when('/',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId/tasks/:taskId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/login',
{templateUrl: '/views/login.html', controller:"MrLoginController"})
$routeProvider.when('/users',
{templateUrl: '/views/userlist.html', controller:"UserListController"})
apiUrls = {
"root": "/"
"login": "/auth/login"
"logout": "/auth/logout"
"users": "/users"
"trips": "/trips"
"cards": "/trips/%s/tasks/%s/cards"
}
$gmUrlsProvider.setUrls("api", apiUrls)
$sceDelegateProvider.resourceUrlWhitelist(['self', 'http://localhost:8080/**'])
return
init = ($rootScope, $gmStorage, $gmAuth, $gmUrls, $location, config)->
$rootScope.auth = $gmAuth.getUser()
$gmUrls.setHost("api", config.host,config.scheme)
$rootScope.logout = () ->
$gmStorage.clear()
$location.url("/login")
return
module = angular.module('mrfogg', modules)
module.config(['$routeProvider', '$locationProvider', '$httpProvider', '$provide', '$compileProvider', '$gmUrlsProvider', '$sceDelegateProvider', configCallback])
module.run(["$rootScope","$gmStorage", "$gmAuth", "$gmUrls", "$location", 'config', init])
angular.module('mrfogg.config', []).value('config', {
host: "172.16.58.3:8080"
# host: "mrfogg.apiary.io"
# host: "localhost:8080"
scheme: "http"
defaultLanguage: "en"
debug: false
})
angular.module("mrfogg.widgets", [])
| true | @mrfogg = {}
mrfogg = @mrfogg
modules = [
# Angular addons
"ngRoute",
"ngAnimate",
"ngSanitize",
# Controller
"mrfogg.controllers.main",
# Services
"mrfogg.services.resource",
"mrfogg.services.common",
"mrfogg.services.model",
# Modules
"mrKeypress",
# Widgets
"mrfogg.widgets",
# Greenmine Plugins
"gmUrls",
"gmFlash",
"gmModal",
"gmStorage",
"gmConfirm",
"gmOverlay",
"i18next"
]
configCallback = ($routeProvider, $locationProvider, $httpProvider, $provide, $compileProvider, $gmUrlsProvider, $sceDelegateProvider)->
# Activate HTML 5 routing without hash symbol
# $locationProvider.html5Mode(true)
$routeProvider.when('/',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/trips/:tripId/tasks/:taskId',
{templateUrl: '/views/container.html', controller: "MainController"})
$routeProvider.when('/login',
{templateUrl: '/views/login.html', controller:"MrLoginController"})
$routeProvider.when('/users',
{templateUrl: '/views/userlist.html', controller:"UserListController"})
apiUrls = {
"root": "/"
"login": "/auth/login"
"logout": "/auth/logout"
"users": "/users"
"trips": "/trips"
"cards": "/trips/%s/tasks/%s/cards"
}
$gmUrlsProvider.setUrls("api", apiUrls)
$sceDelegateProvider.resourceUrlWhitelist(['self', 'http://localhost:8080/**'])
return
init = ($rootScope, $gmStorage, $gmAuth, $gmUrls, $location, config)->
$rootScope.auth = $gmAuth.getUser()
$gmUrls.setHost("api", config.host,config.scheme)
$rootScope.logout = () ->
$gmStorage.clear()
$location.url("/login")
return
module = angular.module('mrfogg', modules)
module.config(['$routeProvider', '$locationProvider', '$httpProvider', '$provide', '$compileProvider', '$gmUrlsProvider', '$sceDelegateProvider', configCallback])
module.run(["$rootScope","$gmStorage", "$gmAuth", "$gmUrls", "$location", 'config', init])
angular.module('mrfogg.config', []).value('config', {
host: "PI:IP_ADDRESS:172.16.58.3END_PI:8080"
# host: "mrfogg.apiary.io"
# host: "localhost:8080"
scheme: "http"
defaultLanguage: "en"
debug: false
})
angular.module("mrfogg.widgets", [])
|
[
{
"context": " using sync methods for comparison:\n password = 'topsecret'\n notPassword = 'somethingelse'\n encrypted = bc",
"end": 1083,
"score": 0.9993492364883423,
"start": 1074,
"tag": "PASSWORD",
"value": "topsecret"
},
{
"context": "arison:\n password = 'topsecret'\n notPassword = 'somethingelse'\n encrypted = bcrypt.hashSync password, 10\n\n it",
"end": 1115,
"score": 0.99930739402771,
"start": 1102,
"tag": "PASSWORD",
"value": "somethingelse"
}
] | test/domain.coffee | sehrope/node-bcrypt-domain-aware | 0 | bcrypt = require './app'
domain = require 'domain'
{assert, expect} = require 'chai'
describe 'generating a salt within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.genSalt 10, (err, salt) ->
expect(err).to.be.not.ok
expect(salt).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'generating a hash within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.hash 'foobar', 10, (err, encrypted) ->
expect(err).to.be.not.ok
expect(encrypted).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'comparing a password within a domain', () ->
# Generate using sync methods for comparison:
password = 'topsecret'
notPassword = 'somethingelse'
encrypted = bcrypt.hashSync password, 10
it 'should maintain the active domain when invoking the callback when it matches', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare password, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.true
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
it 'should maintain the active domain when invoking the callback when it does not match', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare notPassword, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.false
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
| 20890 | bcrypt = require './app'
domain = require 'domain'
{assert, expect} = require 'chai'
describe 'generating a salt within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.genSalt 10, (err, salt) ->
expect(err).to.be.not.ok
expect(salt).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'generating a hash within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.hash 'foobar', 10, (err, encrypted) ->
expect(err).to.be.not.ok
expect(encrypted).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'comparing a password within a domain', () ->
# Generate using sync methods for comparison:
password = '<PASSWORD>'
notPassword = '<PASSWORD>'
encrypted = bcrypt.hashSync password, 10
it 'should maintain the active domain when invoking the callback when it matches', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare password, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.true
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
it 'should maintain the active domain when invoking the callback when it does not match', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare notPassword, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.false
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
| true | bcrypt = require './app'
domain = require 'domain'
{assert, expect} = require 'chai'
describe 'generating a salt within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.genSalt 10, (err, salt) ->
expect(err).to.be.not.ok
expect(salt).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'generating a hash within a domain', () ->
it 'should maintain the active domain when invoking the callback', (done) ->
d = domain.create()
d.run () ->
bcrypt.hash 'foobar', 10, (err, encrypted) ->
expect(err).to.be.not.ok
expect(encrypted).to.be.ok
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
describe 'comparing a password within a domain', () ->
# Generate using sync methods for comparison:
password = 'PI:PASSWORD:<PASSWORD>END_PI'
notPassword = 'PI:PASSWORD:<PASSWORD>END_PI'
encrypted = bcrypt.hashSync password, 10
it 'should maintain the active domain when invoking the callback when it matches', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare password, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.true
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
it 'should maintain the active domain when invoking the callback when it does not match', (done) ->
d = domain.create()
d.run () ->
bcrypt.compare notPassword, encrypted, (err, same) ->
expect(err).to.be.not.ok
expect(same).to.be.false
activeDomain = process.domain
expect(activeDomain).to.be.ok
expect(activeDomain).to.be.equal(d)
d.exit()
done()
|
[
{
"context": "# Alexander Danylchenko\n# Test grammar. Check highlight and colors.\n# Use",
"end": 23,
"score": 0.9997908473014832,
"start": 2,
"tag": "NAME",
"value": "Alexander Danylchenko"
}
] | grammars/test_grammar.cson | triaglesis/language-tplpre | 1 | # Alexander Danylchenko
# Test grammar. Check highlight and colors.
# Used on "Monokai" best
'scopeName': 'source.grammar_check'
'name': 'GrammarCheck'
'fileTypes': [
'grammar_check'
]
'firstLineMatch': '.*'
'patterns': [
# Includes
# Initial
{ 'include': '#test_colors' }
{ 'include': '#check' }
]
# Repository
'repository':
# Small blocks:
'test_colors':
'patterns': [
{ 'include': "#line_set" }
{ 'include': "#constant_set" }
{ 'include': "#entity_set" }
{ 'include': "#invalid_set" }
{ 'include': "#keyword_set" }
{ 'include': "#markup_set" }
{ 'include': "#meta_set" }
{ 'include': "#storage_set" }
{ 'include': "#string_set" }
{ 'include': "#support_set" }
{ 'include': "#variable_set" }
]
# Elements:
'check':
'name': 'keyword.control'
'match': '\\b(check\\.this)\\b'
# All standard:
# comment.line
'line':
'name': 'comment.line'
'match': '\\(comment\\.line\\)'
'double_slash':
'name': 'comment.line.double-slash'
'match': '\\(comment\\.line\\.double-slash\\)'
'double_dash':
'name': 'comment.line.double-dash'
'match': '\\(comment\\.line\\.double-dash\\)'
'number_sign':
'name': 'comment.line.number-sign'
'match': '\\(comment\\.line\\.number-sign\\)'
'percentage':
'name': 'comment.line.percentage'
'match': '\\(comment\\.line\\.percentage\\)'
'character':
'name': 'comment.line.character'
'match': '\\(comment\\.line\\.character\\)'
'block_multi-line':
'name': 'comment.block.multiline'
'match': '\\(comment\\.block\\.multiline\\)'
'block_multi-line_doc':
'name': 'comment.block.multiline.documentation'
'match': '\\(comment\\.block\\.multiline\\.documentation\\)'
'line_set':
'patterns': [
{ 'include': "#line" }
{ 'include': "#double_slash" }
{ 'include': "#double_dash" }
{ 'include': "#number_sign" }
{ 'include': "#percentage" }
{ 'include': "#character" }
{ 'include': "#block_multi-line" }
{ 'include': "#block_multi-line_doc" }
]
# constant
'constant':
'name': 'constant'
'match': '\\(constant\\)'
'constant_numeric':
'name': 'constant.numeric'
'match': '\\(constant\\.numeric\\)'
'constant_character':
'name': 'constant.character'
'match': '\\(constant\\.character\\)'
'constant_character_escape':
'name': 'constant.character.escape'
'match': '\\(constant\\.character\\.escape\\)'
'constant_language':
'name': 'constant.language'
'match': '\\(constant\\.language\\)'
'constant_other':
'name': 'constant.other'
'match': '\\(constant\\.other\\)'
'constant_set':
'patterns': [
{ 'include': "#constant" }
{ 'include': "#constant_numeric" }
{ 'include': "#constant_character" }
{ 'include': "#constant_character_escape" }
{ 'include': "#constant_language" }
{ 'include': "#constant_other" }
]
# entity
'entity':
'name': 'entity'
'match': '\\(entity\\)'
'entity_name':
'name': 'entity.name'
'match': '\\(entity\\.name\\)'
'entity_function':
'name': 'entity.name.function'
'match': '\\(entity\\.name\\.function\\)'
'entity_type':
'name': 'entity.name.type'
'match': '\\(entity\\.name\\.type\\)'
'entity_tag':
'name': 'entity.name.tag'
'match': '\\(entity\\.name\\.tag\\)'
'entity_section':
'name': 'entity.name.section'
'match': '\\(entity\\.name\\.section\\)'
'entity_other':
'name': 'entity.other'
'match': '\\(entity\\.other\\)'
'entity_inherited-class':
'name': 'entity.other.inherited-class'
'match': '\\(entity\\.other\\.inherited-class\\)'
'entity_attribute-name':
'name': 'entity.other.attribute-name'
'match': '\\(entity\\.other\\.attribute-name\\)'
'entity_set':
'patterns': [
{ 'include': "#entity" }
{ 'include': "#entity_name" }
{ 'include': "#entity_function" }
{ 'include': "#entity_type" }
{ 'include': "#entity_tag" }
{ 'include': "#entity_section" }
{ 'include': "#entity_other" }
{ 'include': "#entity_inherited-class" }
{ 'include': "#entity_attribute-name" }
]
# invalid
'invalid':
'name': 'invalid'
'match': '\\(invalid\\)'
'invalid_illegal':
'name': 'invalid.illegal'
'match': '\\(invalid\\.illegal\\)'
'invalid_deprecated':
'name': 'invalid.deprecated'
'match': '\\(invalid\\.deprecated\\)'
'invalid_set':
'patterns': [
{ 'include': "#invalid" }
{ 'include': "#invalid_illegal" }
{ 'include': "#invalid_deprecated" }
]
# keyword
'keyword':
'name': 'keyword'
'match': '\\(keyword\\)'
'keyword_control':
'name': 'keyword.control'
'match': '\\(keyword\\.control\\)'
'keyword_operator':
'name': 'keyword.operator'
'match': '\\(keyword\\.operator\\)'
'keyword_other':
'name': 'keyword.other'
'match': '\\(keyword\\.other\\)'
'keyword_set':
'patterns': [
{ 'include': "#keyword" }
{ 'include': "#keyword_control" }
{ 'include': "#keyword_operator" }
{ 'include': "#keyword_other" }
]
# markup
'markup':
'name': 'markup'
'match': '\\(markup\\)'
'markup_underline':
'name': 'markup.underline'
'match': '\\(markup\\.underline\\)'
'markup_underline_link':
'name': 'markup.underline.link'
'match': '\\(markup\\.underline\\.link\\)'
'markup_bold':
'name': 'markup.bold'
'match': '\\(markup\\.bold\\)'
'markup_heading':
'name': 'markup.heading'
'match': '\\(markup\\.heading\\)'
'markup_italic':
'name': 'markup.italic'
'match': '\\(markup\\.italic\\)'
'markup_list':
'name': 'markup.list'
'match': '\\(markup\\.list\\)'
'markup_list_numbered':
'name': 'markup.list.numbered'
'match': '\\(markup\\.list\\.numbered\\)'
'markup_list_unnumbered':
'name': 'markup.list.unnumbered'
'match': '\\(markup\\.list\\.unnumbered\\)'
'markup_quote':
'name': 'markup.quote'
'match': '\\(markup\\.quote\\)'
'markup_raw':
'name': 'markup.raw'
'match': '\\(markup\\.raw\\)'
'markup_other':
'name': 'markup.other'
'match': '\\(markup\\.other\\)'
'markup_set':
'patterns': [
{ 'include': "#markup" }
{ 'include': "#markup_underline" }
{ 'include': "#markup_underline_link" }
{ 'include': "#markup_bold" }
{ 'include': "#markup_heading" }
{ 'include': "#markup_italic" }
{ 'include': "#markup_list" }
{ 'include': "#markup_list_numbered" }
{ 'include': "#markup_list_unnumbered" }
{ 'include': "#markup_quote" }
{ 'include': "#markup_raw" }
{ 'include': "#markup_other" }
]
# meta
'meta':
'name': 'meta'
'match': '\\(meta\\)'
'meta_function':
'name': 'meta.function'
'match': '\\(meta.function\\)'
'meta_empty-string_single':
'name': 'meta.empty-string.single'
'match': '\\(meta.empty-string\\.single\\)'
'meta_set':
'patterns': [
{ 'include': "#meta" }
{ 'include': "#meta_function" }
{ 'include': "#string_single" }
]
# storage
'storage':
'name': 'storage'
'match': '\\(storage\\)'
'storage_type':
'name': 'storage.type'
'match': '\\(storage\\.type\\)'
'storage_modifier':
'name': 'storage.modifier'
'match': '\\(storage\\.modifier\\)'
'storage_set':
'patterns': [
{ 'include': "#storage" }
{ 'include': "#storage_type" }
{ 'include': "#storage_modifier" }
]
# string
'string':
'name': 'string'
'match': '\\(string\\)'
'string_quoted':
'name': 'string.quoted'
'match': '\\(string\\.quoted\\)'
'string_quoted_single':
'name': 'string.quoted.single'
'match': '\\(string.\\quoted\\.single\\)'
'string_quoted_double':
'name': 'string.quoted.double'
'match': '\\(string.\\quoted\\.double\\)'
'string_quoted_triple':
'name': 'string.quoted.triple'
'match': '\\(string.\\quoted\\.triple\\)'
'string_quoted_other':
'name': 'string.quoted.other'
'match': '\\(string\\.quoted\\.other\\)'
'string_unquoted':
'name': 'string.unquoted'
'match': '\\(string\\.unquoted\\)'
'string_interpolated':
'name': 'string.interpolated'
'match': '\\(string\\.interpolated\\)'
'string_regexp':
'name': 'string.regexp'
'match': '\\(string\\.regexp\\)'
'string_other':
'name': 'string.other'
'match': '\\(string\\.other\\)'
'string_set':
'patterns': [
{ 'include': "#string" }
{ 'include': "#string_quoted" }
{ 'include': "#string_quoted_single" }
{ 'include': "#string_quoted_double" }
{ 'include': "#string_quoted_triple" }
{ 'include': "#string_quoted_other" }
{ 'include': "#string_unquoted" }
{ 'include': "#string_interpolated" }
{ 'include': "#string_regexp" }
{ 'include': "#string_other" }
]
# support
'support':
'name': 'support'
'match': '\\(support\\)'
'support_function':
'name': 'support.function'
'match': '\\(support\\.function\\)'
'support_class':
'name': 'support.class'
'match': '\\(support\\.class\\)'
'support_type':
'name': 'support.type'
'match': '\\(support\\.type\\)'
'support_constant':
'name': 'support.constant'
'match': '\\(support\\.constant\\)'
'support_variable':
'name': 'support.variable'
'match': '\\(support\\.variable\\)'
'support_other':
'name': 'support.other'
'match': '\\(support\\.other\\)'
'support_set':
'patterns': [
{ 'include': "#support" }
{ 'include': "#support_function" }
{ 'include': "#support_class" }
{ 'include': "#support_type" }
{ 'include': "#support_constant" }
{ 'include': "#support_variable" }
{ 'include': "#support_other" }
]
# variable
'variable':
'name': 'variable'
'match': '\\(variable\\)'
'variable_parameter':
'name': 'variable.parameter'
'match': '\\(variable\\.parameter\\)'
'variable_language':
'name': 'variable.language'
'match': '\\(variable\\.language\\)'
'variable_other':
'name': 'variable.other'
'match': '\\(variable\\.other\\)'
'variable_set':
'patterns': [
{ 'include': "#variable" }
{ 'include': "#variable_parameter" }
{ 'include': "#variable_language" }
{ 'include': "#variable_other" }
]
| 151843 | # <NAME>
# Test grammar. Check highlight and colors.
# Used on "Monokai" best
'scopeName': 'source.grammar_check'
'name': 'GrammarCheck'
'fileTypes': [
'grammar_check'
]
'firstLineMatch': '.*'
'patterns': [
# Includes
# Initial
{ 'include': '#test_colors' }
{ 'include': '#check' }
]
# Repository
'repository':
# Small blocks:
'test_colors':
'patterns': [
{ 'include': "#line_set" }
{ 'include': "#constant_set" }
{ 'include': "#entity_set" }
{ 'include': "#invalid_set" }
{ 'include': "#keyword_set" }
{ 'include': "#markup_set" }
{ 'include': "#meta_set" }
{ 'include': "#storage_set" }
{ 'include': "#string_set" }
{ 'include': "#support_set" }
{ 'include': "#variable_set" }
]
# Elements:
'check':
'name': 'keyword.control'
'match': '\\b(check\\.this)\\b'
# All standard:
# comment.line
'line':
'name': 'comment.line'
'match': '\\(comment\\.line\\)'
'double_slash':
'name': 'comment.line.double-slash'
'match': '\\(comment\\.line\\.double-slash\\)'
'double_dash':
'name': 'comment.line.double-dash'
'match': '\\(comment\\.line\\.double-dash\\)'
'number_sign':
'name': 'comment.line.number-sign'
'match': '\\(comment\\.line\\.number-sign\\)'
'percentage':
'name': 'comment.line.percentage'
'match': '\\(comment\\.line\\.percentage\\)'
'character':
'name': 'comment.line.character'
'match': '\\(comment\\.line\\.character\\)'
'block_multi-line':
'name': 'comment.block.multiline'
'match': '\\(comment\\.block\\.multiline\\)'
'block_multi-line_doc':
'name': 'comment.block.multiline.documentation'
'match': '\\(comment\\.block\\.multiline\\.documentation\\)'
'line_set':
'patterns': [
{ 'include': "#line" }
{ 'include': "#double_slash" }
{ 'include': "#double_dash" }
{ 'include': "#number_sign" }
{ 'include': "#percentage" }
{ 'include': "#character" }
{ 'include': "#block_multi-line" }
{ 'include': "#block_multi-line_doc" }
]
# constant
'constant':
'name': 'constant'
'match': '\\(constant\\)'
'constant_numeric':
'name': 'constant.numeric'
'match': '\\(constant\\.numeric\\)'
'constant_character':
'name': 'constant.character'
'match': '\\(constant\\.character\\)'
'constant_character_escape':
'name': 'constant.character.escape'
'match': '\\(constant\\.character\\.escape\\)'
'constant_language':
'name': 'constant.language'
'match': '\\(constant\\.language\\)'
'constant_other':
'name': 'constant.other'
'match': '\\(constant\\.other\\)'
'constant_set':
'patterns': [
{ 'include': "#constant" }
{ 'include': "#constant_numeric" }
{ 'include': "#constant_character" }
{ 'include': "#constant_character_escape" }
{ 'include': "#constant_language" }
{ 'include': "#constant_other" }
]
# entity
'entity':
'name': 'entity'
'match': '\\(entity\\)'
'entity_name':
'name': 'entity.name'
'match': '\\(entity\\.name\\)'
'entity_function':
'name': 'entity.name.function'
'match': '\\(entity\\.name\\.function\\)'
'entity_type':
'name': 'entity.name.type'
'match': '\\(entity\\.name\\.type\\)'
'entity_tag':
'name': 'entity.name.tag'
'match': '\\(entity\\.name\\.tag\\)'
'entity_section':
'name': 'entity.name.section'
'match': '\\(entity\\.name\\.section\\)'
'entity_other':
'name': 'entity.other'
'match': '\\(entity\\.other\\)'
'entity_inherited-class':
'name': 'entity.other.inherited-class'
'match': '\\(entity\\.other\\.inherited-class\\)'
'entity_attribute-name':
'name': 'entity.other.attribute-name'
'match': '\\(entity\\.other\\.attribute-name\\)'
'entity_set':
'patterns': [
{ 'include': "#entity" }
{ 'include': "#entity_name" }
{ 'include': "#entity_function" }
{ 'include': "#entity_type" }
{ 'include': "#entity_tag" }
{ 'include': "#entity_section" }
{ 'include': "#entity_other" }
{ 'include': "#entity_inherited-class" }
{ 'include': "#entity_attribute-name" }
]
# invalid
'invalid':
'name': 'invalid'
'match': '\\(invalid\\)'
'invalid_illegal':
'name': 'invalid.illegal'
'match': '\\(invalid\\.illegal\\)'
'invalid_deprecated':
'name': 'invalid.deprecated'
'match': '\\(invalid\\.deprecated\\)'
'invalid_set':
'patterns': [
{ 'include': "#invalid" }
{ 'include': "#invalid_illegal" }
{ 'include': "#invalid_deprecated" }
]
# keyword
'keyword':
'name': 'keyword'
'match': '\\(keyword\\)'
'keyword_control':
'name': 'keyword.control'
'match': '\\(keyword\\.control\\)'
'keyword_operator':
'name': 'keyword.operator'
'match': '\\(keyword\\.operator\\)'
'keyword_other':
'name': 'keyword.other'
'match': '\\(keyword\\.other\\)'
'keyword_set':
'patterns': [
{ 'include': "#keyword" }
{ 'include': "#keyword_control" }
{ 'include': "#keyword_operator" }
{ 'include': "#keyword_other" }
]
# markup
'markup':
'name': 'markup'
'match': '\\(markup\\)'
'markup_underline':
'name': 'markup.underline'
'match': '\\(markup\\.underline\\)'
'markup_underline_link':
'name': 'markup.underline.link'
'match': '\\(markup\\.underline\\.link\\)'
'markup_bold':
'name': 'markup.bold'
'match': '\\(markup\\.bold\\)'
'markup_heading':
'name': 'markup.heading'
'match': '\\(markup\\.heading\\)'
'markup_italic':
'name': 'markup.italic'
'match': '\\(markup\\.italic\\)'
'markup_list':
'name': 'markup.list'
'match': '\\(markup\\.list\\)'
'markup_list_numbered':
'name': 'markup.list.numbered'
'match': '\\(markup\\.list\\.numbered\\)'
'markup_list_unnumbered':
'name': 'markup.list.unnumbered'
'match': '\\(markup\\.list\\.unnumbered\\)'
'markup_quote':
'name': 'markup.quote'
'match': '\\(markup\\.quote\\)'
'markup_raw':
'name': 'markup.raw'
'match': '\\(markup\\.raw\\)'
'markup_other':
'name': 'markup.other'
'match': '\\(markup\\.other\\)'
'markup_set':
'patterns': [
{ 'include': "#markup" }
{ 'include': "#markup_underline" }
{ 'include': "#markup_underline_link" }
{ 'include': "#markup_bold" }
{ 'include': "#markup_heading" }
{ 'include': "#markup_italic" }
{ 'include': "#markup_list" }
{ 'include': "#markup_list_numbered" }
{ 'include': "#markup_list_unnumbered" }
{ 'include': "#markup_quote" }
{ 'include': "#markup_raw" }
{ 'include': "#markup_other" }
]
# meta
'meta':
'name': 'meta'
'match': '\\(meta\\)'
'meta_function':
'name': 'meta.function'
'match': '\\(meta.function\\)'
'meta_empty-string_single':
'name': 'meta.empty-string.single'
'match': '\\(meta.empty-string\\.single\\)'
'meta_set':
'patterns': [
{ 'include': "#meta" }
{ 'include': "#meta_function" }
{ 'include': "#string_single" }
]
# storage
'storage':
'name': 'storage'
'match': '\\(storage\\)'
'storage_type':
'name': 'storage.type'
'match': '\\(storage\\.type\\)'
'storage_modifier':
'name': 'storage.modifier'
'match': '\\(storage\\.modifier\\)'
'storage_set':
'patterns': [
{ 'include': "#storage" }
{ 'include': "#storage_type" }
{ 'include': "#storage_modifier" }
]
# string
'string':
'name': 'string'
'match': '\\(string\\)'
'string_quoted':
'name': 'string.quoted'
'match': '\\(string\\.quoted\\)'
'string_quoted_single':
'name': 'string.quoted.single'
'match': '\\(string.\\quoted\\.single\\)'
'string_quoted_double':
'name': 'string.quoted.double'
'match': '\\(string.\\quoted\\.double\\)'
'string_quoted_triple':
'name': 'string.quoted.triple'
'match': '\\(string.\\quoted\\.triple\\)'
'string_quoted_other':
'name': 'string.quoted.other'
'match': '\\(string\\.quoted\\.other\\)'
'string_unquoted':
'name': 'string.unquoted'
'match': '\\(string\\.unquoted\\)'
'string_interpolated':
'name': 'string.interpolated'
'match': '\\(string\\.interpolated\\)'
'string_regexp':
'name': 'string.regexp'
'match': '\\(string\\.regexp\\)'
'string_other':
'name': 'string.other'
'match': '\\(string\\.other\\)'
'string_set':
'patterns': [
{ 'include': "#string" }
{ 'include': "#string_quoted" }
{ 'include': "#string_quoted_single" }
{ 'include': "#string_quoted_double" }
{ 'include': "#string_quoted_triple" }
{ 'include': "#string_quoted_other" }
{ 'include': "#string_unquoted" }
{ 'include': "#string_interpolated" }
{ 'include': "#string_regexp" }
{ 'include': "#string_other" }
]
# support
'support':
'name': 'support'
'match': '\\(support\\)'
'support_function':
'name': 'support.function'
'match': '\\(support\\.function\\)'
'support_class':
'name': 'support.class'
'match': '\\(support\\.class\\)'
'support_type':
'name': 'support.type'
'match': '\\(support\\.type\\)'
'support_constant':
'name': 'support.constant'
'match': '\\(support\\.constant\\)'
'support_variable':
'name': 'support.variable'
'match': '\\(support\\.variable\\)'
'support_other':
'name': 'support.other'
'match': '\\(support\\.other\\)'
'support_set':
'patterns': [
{ 'include': "#support" }
{ 'include': "#support_function" }
{ 'include': "#support_class" }
{ 'include': "#support_type" }
{ 'include': "#support_constant" }
{ 'include': "#support_variable" }
{ 'include': "#support_other" }
]
# variable
'variable':
'name': 'variable'
'match': '\\(variable\\)'
'variable_parameter':
'name': 'variable.parameter'
'match': '\\(variable\\.parameter\\)'
'variable_language':
'name': 'variable.language'
'match': '\\(variable\\.language\\)'
'variable_other':
'name': 'variable.other'
'match': '\\(variable\\.other\\)'
'variable_set':
'patterns': [
{ 'include': "#variable" }
{ 'include': "#variable_parameter" }
{ 'include': "#variable_language" }
{ 'include': "#variable_other" }
]
| true | # PI:NAME:<NAME>END_PI
# Test grammar. Check highlight and colors.
# Used on "Monokai" best
'scopeName': 'source.grammar_check'
'name': 'GrammarCheck'
'fileTypes': [
'grammar_check'
]
'firstLineMatch': '.*'
'patterns': [
# Includes
# Initial
{ 'include': '#test_colors' }
{ 'include': '#check' }
]
# Repository
'repository':
# Small blocks:
'test_colors':
'patterns': [
{ 'include': "#line_set" }
{ 'include': "#constant_set" }
{ 'include': "#entity_set" }
{ 'include': "#invalid_set" }
{ 'include': "#keyword_set" }
{ 'include': "#markup_set" }
{ 'include': "#meta_set" }
{ 'include': "#storage_set" }
{ 'include': "#string_set" }
{ 'include': "#support_set" }
{ 'include': "#variable_set" }
]
# Elements:
'check':
'name': 'keyword.control'
'match': '\\b(check\\.this)\\b'
# All standard:
# comment.line
'line':
'name': 'comment.line'
'match': '\\(comment\\.line\\)'
'double_slash':
'name': 'comment.line.double-slash'
'match': '\\(comment\\.line\\.double-slash\\)'
'double_dash':
'name': 'comment.line.double-dash'
'match': '\\(comment\\.line\\.double-dash\\)'
'number_sign':
'name': 'comment.line.number-sign'
'match': '\\(comment\\.line\\.number-sign\\)'
'percentage':
'name': 'comment.line.percentage'
'match': '\\(comment\\.line\\.percentage\\)'
'character':
'name': 'comment.line.character'
'match': '\\(comment\\.line\\.character\\)'
'block_multi-line':
'name': 'comment.block.multiline'
'match': '\\(comment\\.block\\.multiline\\)'
'block_multi-line_doc':
'name': 'comment.block.multiline.documentation'
'match': '\\(comment\\.block\\.multiline\\.documentation\\)'
'line_set':
'patterns': [
{ 'include': "#line" }
{ 'include': "#double_slash" }
{ 'include': "#double_dash" }
{ 'include': "#number_sign" }
{ 'include': "#percentage" }
{ 'include': "#character" }
{ 'include': "#block_multi-line" }
{ 'include': "#block_multi-line_doc" }
]
# constant
'constant':
'name': 'constant'
'match': '\\(constant\\)'
'constant_numeric':
'name': 'constant.numeric'
'match': '\\(constant\\.numeric\\)'
'constant_character':
'name': 'constant.character'
'match': '\\(constant\\.character\\)'
'constant_character_escape':
'name': 'constant.character.escape'
'match': '\\(constant\\.character\\.escape\\)'
'constant_language':
'name': 'constant.language'
'match': '\\(constant\\.language\\)'
'constant_other':
'name': 'constant.other'
'match': '\\(constant\\.other\\)'
'constant_set':
'patterns': [
{ 'include': "#constant" }
{ 'include': "#constant_numeric" }
{ 'include': "#constant_character" }
{ 'include': "#constant_character_escape" }
{ 'include': "#constant_language" }
{ 'include': "#constant_other" }
]
# entity
'entity':
'name': 'entity'
'match': '\\(entity\\)'
'entity_name':
'name': 'entity.name'
'match': '\\(entity\\.name\\)'
'entity_function':
'name': 'entity.name.function'
'match': '\\(entity\\.name\\.function\\)'
'entity_type':
'name': 'entity.name.type'
'match': '\\(entity\\.name\\.type\\)'
'entity_tag':
'name': 'entity.name.tag'
'match': '\\(entity\\.name\\.tag\\)'
'entity_section':
'name': 'entity.name.section'
'match': '\\(entity\\.name\\.section\\)'
'entity_other':
'name': 'entity.other'
'match': '\\(entity\\.other\\)'
'entity_inherited-class':
'name': 'entity.other.inherited-class'
'match': '\\(entity\\.other\\.inherited-class\\)'
'entity_attribute-name':
'name': 'entity.other.attribute-name'
'match': '\\(entity\\.other\\.attribute-name\\)'
'entity_set':
'patterns': [
{ 'include': "#entity" }
{ 'include': "#entity_name" }
{ 'include': "#entity_function" }
{ 'include': "#entity_type" }
{ 'include': "#entity_tag" }
{ 'include': "#entity_section" }
{ 'include': "#entity_other" }
{ 'include': "#entity_inherited-class" }
{ 'include': "#entity_attribute-name" }
]
# invalid
'invalid':
'name': 'invalid'
'match': '\\(invalid\\)'
'invalid_illegal':
'name': 'invalid.illegal'
'match': '\\(invalid\\.illegal\\)'
'invalid_deprecated':
'name': 'invalid.deprecated'
'match': '\\(invalid\\.deprecated\\)'
'invalid_set':
'patterns': [
{ 'include': "#invalid" }
{ 'include': "#invalid_illegal" }
{ 'include': "#invalid_deprecated" }
]
# keyword
'keyword':
'name': 'keyword'
'match': '\\(keyword\\)'
'keyword_control':
'name': 'keyword.control'
'match': '\\(keyword\\.control\\)'
'keyword_operator':
'name': 'keyword.operator'
'match': '\\(keyword\\.operator\\)'
'keyword_other':
'name': 'keyword.other'
'match': '\\(keyword\\.other\\)'
'keyword_set':
'patterns': [
{ 'include': "#keyword" }
{ 'include': "#keyword_control" }
{ 'include': "#keyword_operator" }
{ 'include': "#keyword_other" }
]
# markup
'markup':
'name': 'markup'
'match': '\\(markup\\)'
'markup_underline':
'name': 'markup.underline'
'match': '\\(markup\\.underline\\)'
'markup_underline_link':
'name': 'markup.underline.link'
'match': '\\(markup\\.underline\\.link\\)'
'markup_bold':
'name': 'markup.bold'
'match': '\\(markup\\.bold\\)'
'markup_heading':
'name': 'markup.heading'
'match': '\\(markup\\.heading\\)'
'markup_italic':
'name': 'markup.italic'
'match': '\\(markup\\.italic\\)'
'markup_list':
'name': 'markup.list'
'match': '\\(markup\\.list\\)'
'markup_list_numbered':
'name': 'markup.list.numbered'
'match': '\\(markup\\.list\\.numbered\\)'
'markup_list_unnumbered':
'name': 'markup.list.unnumbered'
'match': '\\(markup\\.list\\.unnumbered\\)'
'markup_quote':
'name': 'markup.quote'
'match': '\\(markup\\.quote\\)'
'markup_raw':
'name': 'markup.raw'
'match': '\\(markup\\.raw\\)'
'markup_other':
'name': 'markup.other'
'match': '\\(markup\\.other\\)'
'markup_set':
'patterns': [
{ 'include': "#markup" }
{ 'include': "#markup_underline" }
{ 'include': "#markup_underline_link" }
{ 'include': "#markup_bold" }
{ 'include': "#markup_heading" }
{ 'include': "#markup_italic" }
{ 'include': "#markup_list" }
{ 'include': "#markup_list_numbered" }
{ 'include': "#markup_list_unnumbered" }
{ 'include': "#markup_quote" }
{ 'include': "#markup_raw" }
{ 'include': "#markup_other" }
]
# meta
'meta':
'name': 'meta'
'match': '\\(meta\\)'
'meta_function':
'name': 'meta.function'
'match': '\\(meta.function\\)'
'meta_empty-string_single':
'name': 'meta.empty-string.single'
'match': '\\(meta.empty-string\\.single\\)'
'meta_set':
'patterns': [
{ 'include': "#meta" }
{ 'include': "#meta_function" }
{ 'include': "#string_single" }
]
# storage
'storage':
'name': 'storage'
'match': '\\(storage\\)'
'storage_type':
'name': 'storage.type'
'match': '\\(storage\\.type\\)'
'storage_modifier':
'name': 'storage.modifier'
'match': '\\(storage\\.modifier\\)'
'storage_set':
'patterns': [
{ 'include': "#storage" }
{ 'include': "#storage_type" }
{ 'include': "#storage_modifier" }
]
# string
'string':
'name': 'string'
'match': '\\(string\\)'
'string_quoted':
'name': 'string.quoted'
'match': '\\(string\\.quoted\\)'
'string_quoted_single':
'name': 'string.quoted.single'
'match': '\\(string.\\quoted\\.single\\)'
'string_quoted_double':
'name': 'string.quoted.double'
'match': '\\(string.\\quoted\\.double\\)'
'string_quoted_triple':
'name': 'string.quoted.triple'
'match': '\\(string.\\quoted\\.triple\\)'
'string_quoted_other':
'name': 'string.quoted.other'
'match': '\\(string\\.quoted\\.other\\)'
'string_unquoted':
'name': 'string.unquoted'
'match': '\\(string\\.unquoted\\)'
'string_interpolated':
'name': 'string.interpolated'
'match': '\\(string\\.interpolated\\)'
'string_regexp':
'name': 'string.regexp'
'match': '\\(string\\.regexp\\)'
'string_other':
'name': 'string.other'
'match': '\\(string\\.other\\)'
'string_set':
'patterns': [
{ 'include': "#string" }
{ 'include': "#string_quoted" }
{ 'include': "#string_quoted_single" }
{ 'include': "#string_quoted_double" }
{ 'include': "#string_quoted_triple" }
{ 'include': "#string_quoted_other" }
{ 'include': "#string_unquoted" }
{ 'include': "#string_interpolated" }
{ 'include': "#string_regexp" }
{ 'include': "#string_other" }
]
# support
'support':
'name': 'support'
'match': '\\(support\\)'
'support_function':
'name': 'support.function'
'match': '\\(support\\.function\\)'
'support_class':
'name': 'support.class'
'match': '\\(support\\.class\\)'
'support_type':
'name': 'support.type'
'match': '\\(support\\.type\\)'
'support_constant':
'name': 'support.constant'
'match': '\\(support\\.constant\\)'
'support_variable':
'name': 'support.variable'
'match': '\\(support\\.variable\\)'
'support_other':
'name': 'support.other'
'match': '\\(support\\.other\\)'
'support_set':
'patterns': [
{ 'include': "#support" }
{ 'include': "#support_function" }
{ 'include': "#support_class" }
{ 'include': "#support_type" }
{ 'include': "#support_constant" }
{ 'include': "#support_variable" }
{ 'include': "#support_other" }
]
# variable
'variable':
'name': 'variable'
'match': '\\(variable\\)'
'variable_parameter':
'name': 'variable.parameter'
'match': '\\(variable\\.parameter\\)'
'variable_language':
'name': 'variable.language'
'match': '\\(variable\\.language\\)'
'variable_other':
'name': 'variable.other'
'match': '\\(variable\\.other\\)'
'variable_set':
'patterns': [
{ 'include': "#variable" }
{ 'include': "#variable_parameter" }
{ 'include': "#variable_language" }
{ 'include': "#variable_other" }
]
|
[
{
"context": "age-json\"\n version: \"0.1.1\"\n author:\n name: \"Isaac Z. Schlueter\"\n email: \"i@izs.me\"\n url: \"http://blog.izs.",
"end": 225,
"score": 0.9998819231987,
"start": 207,
"tag": "NAME",
"value": "Isaac Z. Schlueter"
},
{
"context": "uthor:\n name: \"Isaac Z. Schlueter\"\n email: \"i@izs.me\"\n url: \"http://blog.izs.me/\"\n\n description: \"",
"end": 247,
"score": 0.9999292492866516,
"start": 239,
"tag": "EMAIL",
"value": "i@izs.me"
},
{
"context": "itory:\n type: \"git\"\n url: \"git://github.com/isaacs/read-package-json.git\"\n\n bugs:\n url: \"https:/",
"end": 450,
"score": 0.9914843440055847,
"start": 444,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "e-json.git\"\n\n bugs:\n url: \"https://github.com/isaacs/read-package-json/issues\"\n\n main: \"read-json.js\"",
"end": 518,
"score": 0.9868986010551453,
"start": 512,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": " tap: \"~0.2.5\"\n\n homepage: \"https://github.com/isaacs/read-package-json\"\n optionalDependencies:\n np",
"end": 831,
"score": 0.938685417175293,
"start": 825,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "ead-package-json\",\n\"version\": \"0.1.1\",\n\"author\": \"Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)\",\n\"description\":",
"end": 1368,
"score": 0.999884843826294,
"start": 1350,
"tag": "NAME",
"value": "Isaac Z. Schlueter"
},
{
"context": "version\": \"0.1.1\",\n\"author\": \"Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)\",\n\"description\": \"The thin",
"end": 1378,
"score": 0.9999299049377441,
"start": 1370,
"tag": "EMAIL",
"value": "i@izs.me"
},
{
"context": "itory\": {\n\"type\": \"git\",\n\"url\": \"git://github.com/isaacs/read-package-json.git\"\n},\n\"main\": \"read-json.js\",",
"end": 1573,
"score": 0.9993335008621216,
"start": 1567,
"tag": "USERNAME",
"value": "isaacs"
}
] | deps/npm/node_modules/read-package-json/test/non-json.coffee | lxe/io.coffee | 0 | # vim: set softtabstop=16 shiftwidth=16:
tap = require("tap")
readJson = require("../")
path = require("path")
fs = require("fs")
expect =
name: "read-package-json"
version: "0.1.1"
author:
name: "Isaac Z. Schlueter"
email: "i@izs.me"
url: "http://blog.izs.me/"
description: "The thing npm uses to read package.json files with semantics and defaults and validation"
repository:
type: "git"
url: "git://github.com/isaacs/read-package-json.git"
bugs:
url: "https://github.com/isaacs/read-package-json/issues"
main: "read-json.js"
scripts:
test: "tap test/*.js"
dependencies:
glob: "~3.1.9"
"lru-cache": "~1.1.0"
semver: "~1.0.14"
slide: "~1.1.3"
npmlog: "0"
"graceful-fs": "~1.1.8"
devDependencies:
tap: "~0.2.5"
homepage: "https://github.com/isaacs/read-package-json"
optionalDependencies:
npmlog: "0"
"graceful-fs": "~1.1.8"
_id: "read-package-json@0.1.1"
readme: "ERROR: No README data found!"
tap.test "from css", (t) ->
c = path.join(__dirname, "fixtures", "not-json.css")
readJson c, (er, d) ->
t.same d, expect
t.end()
return
return
tap.test "from js", (t) ->
readJson __filename, (er, d) ->
t.same d, expect
t.end()
return
return
###*
package
{
"name": "read-package-json",
"version": "0.1.1",
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"description": "The thing npm uses to read package.json files with semantics and defaults and validation",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/read-package-json.git"
},
"main": "read-json.js",
"scripts": {
"test": "tap test/*.js"
},
"dependencies": {
"glob": "~3.1.9",
"lru-cache": "~1.1.0",
"semver": "~1.0.14",
"slide": "~1.1.3"
},
"devDependencies": {
"tap": "~0.2.5"
},
"optionalDependencies": {
"npmlog": "0",
"graceful-fs": "~1.1.8"
}
}
###
| 8338 | # vim: set softtabstop=16 shiftwidth=16:
tap = require("tap")
readJson = require("../")
path = require("path")
fs = require("fs")
expect =
name: "read-package-json"
version: "0.1.1"
author:
name: "<NAME>"
email: "<EMAIL>"
url: "http://blog.izs.me/"
description: "The thing npm uses to read package.json files with semantics and defaults and validation"
repository:
type: "git"
url: "git://github.com/isaacs/read-package-json.git"
bugs:
url: "https://github.com/isaacs/read-package-json/issues"
main: "read-json.js"
scripts:
test: "tap test/*.js"
dependencies:
glob: "~3.1.9"
"lru-cache": "~1.1.0"
semver: "~1.0.14"
slide: "~1.1.3"
npmlog: "0"
"graceful-fs": "~1.1.8"
devDependencies:
tap: "~0.2.5"
homepage: "https://github.com/isaacs/read-package-json"
optionalDependencies:
npmlog: "0"
"graceful-fs": "~1.1.8"
_id: "read-package-json@0.1.1"
readme: "ERROR: No README data found!"
tap.test "from css", (t) ->
c = path.join(__dirname, "fixtures", "not-json.css")
readJson c, (er, d) ->
t.same d, expect
t.end()
return
return
tap.test "from js", (t) ->
readJson __filename, (er, d) ->
t.same d, expect
t.end()
return
return
###*
package
{
"name": "read-package-json",
"version": "0.1.1",
"author": "<NAME> <<EMAIL>> (http://blog.izs.me/)",
"description": "The thing npm uses to read package.json files with semantics and defaults and validation",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/read-package-json.git"
},
"main": "read-json.js",
"scripts": {
"test": "tap test/*.js"
},
"dependencies": {
"glob": "~3.1.9",
"lru-cache": "~1.1.0",
"semver": "~1.0.14",
"slide": "~1.1.3"
},
"devDependencies": {
"tap": "~0.2.5"
},
"optionalDependencies": {
"npmlog": "0",
"graceful-fs": "~1.1.8"
}
}
###
| true | # vim: set softtabstop=16 shiftwidth=16:
tap = require("tap")
readJson = require("../")
path = require("path")
fs = require("fs")
expect =
name: "read-package-json"
version: "0.1.1"
author:
name: "PI:NAME:<NAME>END_PI"
email: "PI:EMAIL:<EMAIL>END_PI"
url: "http://blog.izs.me/"
description: "The thing npm uses to read package.json files with semantics and defaults and validation"
repository:
type: "git"
url: "git://github.com/isaacs/read-package-json.git"
bugs:
url: "https://github.com/isaacs/read-package-json/issues"
main: "read-json.js"
scripts:
test: "tap test/*.js"
dependencies:
glob: "~3.1.9"
"lru-cache": "~1.1.0"
semver: "~1.0.14"
slide: "~1.1.3"
npmlog: "0"
"graceful-fs": "~1.1.8"
devDependencies:
tap: "~0.2.5"
homepage: "https://github.com/isaacs/read-package-json"
optionalDependencies:
npmlog: "0"
"graceful-fs": "~1.1.8"
_id: "read-package-json@0.1.1"
readme: "ERROR: No README data found!"
tap.test "from css", (t) ->
c = path.join(__dirname, "fixtures", "not-json.css")
readJson c, (er, d) ->
t.same d, expect
t.end()
return
return
tap.test "from js", (t) ->
readJson __filename, (er, d) ->
t.same d, expect
t.end()
return
return
###*
package
{
"name": "read-package-json",
"version": "0.1.1",
"author": "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (http://blog.izs.me/)",
"description": "The thing npm uses to read package.json files with semantics and defaults and validation",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/read-package-json.git"
},
"main": "read-json.js",
"scripts": {
"test": "tap test/*.js"
},
"dependencies": {
"glob": "~3.1.9",
"lru-cache": "~1.1.0",
"semver": "~1.0.14",
"slide": "~1.1.3"
},
"devDependencies": {
"tap": "~0.2.5"
},
"optionalDependencies": {
"npmlog": "0",
"graceful-fs": "~1.1.8"
}
}
###
|
[
{
"context": " x.rangeBand());\n }\n\n // Inspired by Lee Byron's test data generator.\n function bumpLayer",
"end": 8890,
"score": 0.9993712902069092,
"start": 8881,
"tag": "NAME",
"value": "Lee Byron"
}
] | frontend/src/coffee/main.coffee | hotpxl/myap.ml | 1 | $ ->
server = 'http://api.myap.ml'
# id = location.search.match(/[^=]+$/)[0]
width = Math.min window.innerWidth, 1280
match = location.search.match /[^=]+$/
if match
id = match[0]
else
id = 233
d3.json "#{server}/summary/#{id}", (err, data) ->
origin = data
data = $.map data.data, (d, k) ->
s = {}
s.category = k
s.periods = d.map (p) ->
p.start = new Date p.start
p.end = new Date p.end
p
s.open = new Date Math.min.apply null, d.map (p) -> p.start
s.close = new Date Math.max.apply null, d.map (p) -> p.end
s.total = d.reduce (prev, curr) ->
curr.count.reduce((prev, curr) ->
prev + curr
, 0) + prev
,0
s.time = d.reduce (prev, curr) ->
(curr.end - curr.start) + prev
, 0
s
data.sort (a, b) ->
b.total - a.total
l = data.length
colors = randomColor count: l, hue: 'random', luminosity: 'light'
#total
(->
svg = d3.select '#total'
.append 'svg'
.attr 'width', width
.append 'g'
height = 30 * l
x = d3.scale.linear().range [0, width]
.domain [0, d3.max data.map (d) -> d.total]
y = d3.scale.ordinal().rangeRoundBands [0, height], 0.01
.domain data.map (d) -> d.category
svg.selectAll '.bar'
.data data
.enter()
.append 'g'
.attr 'class', 'bar'
svg.selectAll '.bar'
.append 'rect'
.attr 'class', 'rect'
.attr 'y', (d) -> y d.category
.attr 'height', y.rangeBand()
.attr 'x', 0
.attr 'width', (d) ->
x(d.total) / 2
.attr 'fill', (d) ->
colors[data.indexOf d]
svg.selectAll '.bar'
.append 'text'
.text (d) -> "#{d.total}(#{(d.total / d.time).toFixed 2}/s) #{d.category}"
.attr 'class', 'text'
.attr 'y', (d) -> y.rangeBand() / 2 + y d.category
.attr 'x', (d) ->
x(d.total) / 2 + 10
)()
#frequency
(->
svg = d3.select '#frequency'
.append 'svg'
.append 'g'
earliest = Math.min.apply null, data.map (d) -> d.open
last = Math.max.apply null, data.map (d) -> d.close
count = {}
for d in data
for p in d.periods
for ci in [0 .. p.count.length - 1]
count[p.start.getTime() + ci * 5] ?= 0
count[p.start.getTime() + ci * 5] += p.count[ci]
count = $.map count, (v, i) ->
date: parseInt(i), count: v
count.sort (a, b) ->
a.date - b.date
height = 100
x = d3.scale.linear().range [0, width]
.domain [earliest, last]
y = d3.scale.linear().range [height, 0]
.domain [0, d3.max count.map (d) -> d.count]
line = d3.svg.line()
.x (d) -> x d.date
.y (d) -> y d.count
svg.append 'path'
.datum count
.attr 'class', 'line'
.attr 'd', line
.attr 'fill', 'none'
.attr 'stroke', 'blue'
)()
#pie
(->
height = 300
pie = d3.layout.pie()
.sort (a, b) ->
b.time - a.time
.value (d) -> d.time
sdata = pie data
console.log sdata
svg = d3.select '#pie'
.append 'svg'
.attr 'width', width
.attr 'height', height
.append 'g'
.attr 'transform', "translate(#{width / 2}, #{height / 2})"
arc = d3.svg.arc()
.outerRadius height / 2
.innerRadius height / 6
g = svg.selectAll '.arc'
.data sdata
.enter()
.append 'g'
.attr 'class', 'arc'
g.append 'path'
.attr 'd', arc
.style 'fill', (d) ->
colors[sdata.indexOf d]
g.append 'text'
.attr 'transform', (d) -> "translate(#{arc.centroid d})"
.attr 'dy', '.35em'
.style 'text-anchor', 'middle'
.text (d) -> d.data.category
)()
#stacked
(->
stack = `
function(data) {
var startPoints = [];
var endPoints = [];
var apps = [];
for (var key in data.data) {
apps.push(key);
for (var i = 0; i < data.data[key].length; ++i) {
startPoints.push(data.data[key][i].start);
endPoints.push(data.data[key][i].end);
}
}
var appKeyIndex = {};
for (var i = 0; i < apps.length; ++i) {
appKeyIndex[apps[i]] = i;
}
var globalStartTime = Math.min.apply(null, startPoints);
var globalEndTime = Math.max.apply(null, endPoints);
var duration = globalEndTime - globalStartTime;
var bucketSize = Math.ceil(duration / 10);
var interval = 5;
var layers = [];
for (var app in data.data) {
layers[appKeyIndex[app]] = [];
for (var i = 0; i < 10; ++i) {
layers[appKeyIndex[app]][i] = {
x: i,
y: 0.1,
y0: 0
};
}
for (var i = 0; i < data.data[app].length; ++i) {
var eventBatch = data.data[app][i];
for (var j = 0; j < eventBatch.count.length; ++j) {
var bucket = Math.floor((j * interval + eventBatch.start.getTime() - globalStartTime) / bucketSize);
console.log(bucket);
layers[appKeyIndex[app]][bucket].y += eventBatch.count[j];
}
}
}
console.dir(layers);
var n = 6, // number of layers
m = 10, // number of samples per layer
stack = d3.layout.stack(),
// layers = stack(d3.range(n).map(function() { return bumpLayer(m, .1); })),
yGroupMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y; }); }),
yStackMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y0 + d.y; }); });
console.dir(layers);
var height = 200;
var x = d3.scale.ordinal()
.domain(d3.range(m))
.rangeRoundBands([0, width], .08);
var y = d3.scale.linear()
.domain([0, yStackMax])
.range([height, 0]);
var color = d3.scale.linear()
.domain([0, n - 1])
.range(["#aad", "#556"]);
var xAxis = d3.svg.axis()
.scale(x)
.tickSize(0)
.tickPadding(6)
.orient("bottom");
var svg = d3.select("#stacked").append("svg")
.attr("width", width)
.attr("height", height)
.append("g");
var layer = svg.selectAll(".layer")
.data(layers)
.enter().append("g")
.attr("class", "layer")
.style("fill", function(d, i) { return color(i); });
var rect = layer.selectAll("rect")
.data(function(d) { return d; })
.enter().append("rect")
.attr("x", function(d) { return x(d.x); })
.attr("y", height)
.attr("width", x.rangeBand())
.attr("height", 0);
rect.transition()
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); });
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis);
d3.selectAll("input").on("change", change);
var timeout = setTimeout(function() {
d3.select("input[value=\"grouped\"]").property("checked", true).each(change);
}, 2000);
function change() {
clearTimeout(timeout);
if (this.value === "grouped") transitionGrouped();
else transitionStacked();
}
function transitionGrouped() {
y.domain([0, yGroupMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("x", function(d, i, j) { return x(d.x) + x.rangeBand() / n * j; })
.attr("width", x.rangeBand() / n)
.transition()
.attr("y", function(d) { return y(d.y); })
.attr("height", function(d) { return height - y(d.y); });
}
function transitionStacked() {
y.domain([0, yStackMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); })
.transition()
.attr("x", function(d) { return x(d.x); })
.attr("width", x.rangeBand());
}
// Inspired by Lee Byron's test data generator.
function bumpLayer(n, o) {
function bump(a) {
var x = 1 / (.1 + Math.random()),
y = 2 * Math.random() - .5,
z = 10 / (.1 + Math.random());
for (var i = 0; i < n; i++) {
var w = (i / n - y) * z;
a[i] += x * Math.exp(-w * w);
}
}
var a = [], i;
for (i = 0; i < n; ++i) a[i] = o + o * Math.random();
for (i = 0; i < 5; ++i) bump(a);
return a.map(function(d, i) { return {x: i, y: Math.max(0, d)}; });
}
};
`
console.log origin
stack origin
)()
| 125982 | $ ->
server = 'http://api.myap.ml'
# id = location.search.match(/[^=]+$/)[0]
width = Math.min window.innerWidth, 1280
match = location.search.match /[^=]+$/
if match
id = match[0]
else
id = 233
d3.json "#{server}/summary/#{id}", (err, data) ->
origin = data
data = $.map data.data, (d, k) ->
s = {}
s.category = k
s.periods = d.map (p) ->
p.start = new Date p.start
p.end = new Date p.end
p
s.open = new Date Math.min.apply null, d.map (p) -> p.start
s.close = new Date Math.max.apply null, d.map (p) -> p.end
s.total = d.reduce (prev, curr) ->
curr.count.reduce((prev, curr) ->
prev + curr
, 0) + prev
,0
s.time = d.reduce (prev, curr) ->
(curr.end - curr.start) + prev
, 0
s
data.sort (a, b) ->
b.total - a.total
l = data.length
colors = randomColor count: l, hue: 'random', luminosity: 'light'
#total
(->
svg = d3.select '#total'
.append 'svg'
.attr 'width', width
.append 'g'
height = 30 * l
x = d3.scale.linear().range [0, width]
.domain [0, d3.max data.map (d) -> d.total]
y = d3.scale.ordinal().rangeRoundBands [0, height], 0.01
.domain data.map (d) -> d.category
svg.selectAll '.bar'
.data data
.enter()
.append 'g'
.attr 'class', 'bar'
svg.selectAll '.bar'
.append 'rect'
.attr 'class', 'rect'
.attr 'y', (d) -> y d.category
.attr 'height', y.rangeBand()
.attr 'x', 0
.attr 'width', (d) ->
x(d.total) / 2
.attr 'fill', (d) ->
colors[data.indexOf d]
svg.selectAll '.bar'
.append 'text'
.text (d) -> "#{d.total}(#{(d.total / d.time).toFixed 2}/s) #{d.category}"
.attr 'class', 'text'
.attr 'y', (d) -> y.rangeBand() / 2 + y d.category
.attr 'x', (d) ->
x(d.total) / 2 + 10
)()
#frequency
(->
svg = d3.select '#frequency'
.append 'svg'
.append 'g'
earliest = Math.min.apply null, data.map (d) -> d.open
last = Math.max.apply null, data.map (d) -> d.close
count = {}
for d in data
for p in d.periods
for ci in [0 .. p.count.length - 1]
count[p.start.getTime() + ci * 5] ?= 0
count[p.start.getTime() + ci * 5] += p.count[ci]
count = $.map count, (v, i) ->
date: parseInt(i), count: v
count.sort (a, b) ->
a.date - b.date
height = 100
x = d3.scale.linear().range [0, width]
.domain [earliest, last]
y = d3.scale.linear().range [height, 0]
.domain [0, d3.max count.map (d) -> d.count]
line = d3.svg.line()
.x (d) -> x d.date
.y (d) -> y d.count
svg.append 'path'
.datum count
.attr 'class', 'line'
.attr 'd', line
.attr 'fill', 'none'
.attr 'stroke', 'blue'
)()
#pie
(->
height = 300
pie = d3.layout.pie()
.sort (a, b) ->
b.time - a.time
.value (d) -> d.time
sdata = pie data
console.log sdata
svg = d3.select '#pie'
.append 'svg'
.attr 'width', width
.attr 'height', height
.append 'g'
.attr 'transform', "translate(#{width / 2}, #{height / 2})"
arc = d3.svg.arc()
.outerRadius height / 2
.innerRadius height / 6
g = svg.selectAll '.arc'
.data sdata
.enter()
.append 'g'
.attr 'class', 'arc'
g.append 'path'
.attr 'd', arc
.style 'fill', (d) ->
colors[sdata.indexOf d]
g.append 'text'
.attr 'transform', (d) -> "translate(#{arc.centroid d})"
.attr 'dy', '.35em'
.style 'text-anchor', 'middle'
.text (d) -> d.data.category
)()
#stacked
(->
stack = `
function(data) {
var startPoints = [];
var endPoints = [];
var apps = [];
for (var key in data.data) {
apps.push(key);
for (var i = 0; i < data.data[key].length; ++i) {
startPoints.push(data.data[key][i].start);
endPoints.push(data.data[key][i].end);
}
}
var appKeyIndex = {};
for (var i = 0; i < apps.length; ++i) {
appKeyIndex[apps[i]] = i;
}
var globalStartTime = Math.min.apply(null, startPoints);
var globalEndTime = Math.max.apply(null, endPoints);
var duration = globalEndTime - globalStartTime;
var bucketSize = Math.ceil(duration / 10);
var interval = 5;
var layers = [];
for (var app in data.data) {
layers[appKeyIndex[app]] = [];
for (var i = 0; i < 10; ++i) {
layers[appKeyIndex[app]][i] = {
x: i,
y: 0.1,
y0: 0
};
}
for (var i = 0; i < data.data[app].length; ++i) {
var eventBatch = data.data[app][i];
for (var j = 0; j < eventBatch.count.length; ++j) {
var bucket = Math.floor((j * interval + eventBatch.start.getTime() - globalStartTime) / bucketSize);
console.log(bucket);
layers[appKeyIndex[app]][bucket].y += eventBatch.count[j];
}
}
}
console.dir(layers);
var n = 6, // number of layers
m = 10, // number of samples per layer
stack = d3.layout.stack(),
// layers = stack(d3.range(n).map(function() { return bumpLayer(m, .1); })),
yGroupMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y; }); }),
yStackMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y0 + d.y; }); });
console.dir(layers);
var height = 200;
var x = d3.scale.ordinal()
.domain(d3.range(m))
.rangeRoundBands([0, width], .08);
var y = d3.scale.linear()
.domain([0, yStackMax])
.range([height, 0]);
var color = d3.scale.linear()
.domain([0, n - 1])
.range(["#aad", "#556"]);
var xAxis = d3.svg.axis()
.scale(x)
.tickSize(0)
.tickPadding(6)
.orient("bottom");
var svg = d3.select("#stacked").append("svg")
.attr("width", width)
.attr("height", height)
.append("g");
var layer = svg.selectAll(".layer")
.data(layers)
.enter().append("g")
.attr("class", "layer")
.style("fill", function(d, i) { return color(i); });
var rect = layer.selectAll("rect")
.data(function(d) { return d; })
.enter().append("rect")
.attr("x", function(d) { return x(d.x); })
.attr("y", height)
.attr("width", x.rangeBand())
.attr("height", 0);
rect.transition()
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); });
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis);
d3.selectAll("input").on("change", change);
var timeout = setTimeout(function() {
d3.select("input[value=\"grouped\"]").property("checked", true).each(change);
}, 2000);
function change() {
clearTimeout(timeout);
if (this.value === "grouped") transitionGrouped();
else transitionStacked();
}
function transitionGrouped() {
y.domain([0, yGroupMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("x", function(d, i, j) { return x(d.x) + x.rangeBand() / n * j; })
.attr("width", x.rangeBand() / n)
.transition()
.attr("y", function(d) { return y(d.y); })
.attr("height", function(d) { return height - y(d.y); });
}
function transitionStacked() {
y.domain([0, yStackMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); })
.transition()
.attr("x", function(d) { return x(d.x); })
.attr("width", x.rangeBand());
}
// Inspired by <NAME>'s test data generator.
function bumpLayer(n, o) {
function bump(a) {
var x = 1 / (.1 + Math.random()),
y = 2 * Math.random() - .5,
z = 10 / (.1 + Math.random());
for (var i = 0; i < n; i++) {
var w = (i / n - y) * z;
a[i] += x * Math.exp(-w * w);
}
}
var a = [], i;
for (i = 0; i < n; ++i) a[i] = o + o * Math.random();
for (i = 0; i < 5; ++i) bump(a);
return a.map(function(d, i) { return {x: i, y: Math.max(0, d)}; });
}
};
`
console.log origin
stack origin
)()
| true | $ ->
server = 'http://api.myap.ml'
# id = location.search.match(/[^=]+$/)[0]
width = Math.min window.innerWidth, 1280
match = location.search.match /[^=]+$/
if match
id = match[0]
else
id = 233
d3.json "#{server}/summary/#{id}", (err, data) ->
origin = data
data = $.map data.data, (d, k) ->
s = {}
s.category = k
s.periods = d.map (p) ->
p.start = new Date p.start
p.end = new Date p.end
p
s.open = new Date Math.min.apply null, d.map (p) -> p.start
s.close = new Date Math.max.apply null, d.map (p) -> p.end
s.total = d.reduce (prev, curr) ->
curr.count.reduce((prev, curr) ->
prev + curr
, 0) + prev
,0
s.time = d.reduce (prev, curr) ->
(curr.end - curr.start) + prev
, 0
s
data.sort (a, b) ->
b.total - a.total
l = data.length
colors = randomColor count: l, hue: 'random', luminosity: 'light'
#total
(->
svg = d3.select '#total'
.append 'svg'
.attr 'width', width
.append 'g'
height = 30 * l
x = d3.scale.linear().range [0, width]
.domain [0, d3.max data.map (d) -> d.total]
y = d3.scale.ordinal().rangeRoundBands [0, height], 0.01
.domain data.map (d) -> d.category
svg.selectAll '.bar'
.data data
.enter()
.append 'g'
.attr 'class', 'bar'
svg.selectAll '.bar'
.append 'rect'
.attr 'class', 'rect'
.attr 'y', (d) -> y d.category
.attr 'height', y.rangeBand()
.attr 'x', 0
.attr 'width', (d) ->
x(d.total) / 2
.attr 'fill', (d) ->
colors[data.indexOf d]
svg.selectAll '.bar'
.append 'text'
.text (d) -> "#{d.total}(#{(d.total / d.time).toFixed 2}/s) #{d.category}"
.attr 'class', 'text'
.attr 'y', (d) -> y.rangeBand() / 2 + y d.category
.attr 'x', (d) ->
x(d.total) / 2 + 10
)()
#frequency
(->
svg = d3.select '#frequency'
.append 'svg'
.append 'g'
earliest = Math.min.apply null, data.map (d) -> d.open
last = Math.max.apply null, data.map (d) -> d.close
count = {}
for d in data
for p in d.periods
for ci in [0 .. p.count.length - 1]
count[p.start.getTime() + ci * 5] ?= 0
count[p.start.getTime() + ci * 5] += p.count[ci]
count = $.map count, (v, i) ->
date: parseInt(i), count: v
count.sort (a, b) ->
a.date - b.date
height = 100
x = d3.scale.linear().range [0, width]
.domain [earliest, last]
y = d3.scale.linear().range [height, 0]
.domain [0, d3.max count.map (d) -> d.count]
line = d3.svg.line()
.x (d) -> x d.date
.y (d) -> y d.count
svg.append 'path'
.datum count
.attr 'class', 'line'
.attr 'd', line
.attr 'fill', 'none'
.attr 'stroke', 'blue'
)()
#pie
(->
height = 300
pie = d3.layout.pie()
.sort (a, b) ->
b.time - a.time
.value (d) -> d.time
sdata = pie data
console.log sdata
svg = d3.select '#pie'
.append 'svg'
.attr 'width', width
.attr 'height', height
.append 'g'
.attr 'transform', "translate(#{width / 2}, #{height / 2})"
arc = d3.svg.arc()
.outerRadius height / 2
.innerRadius height / 6
g = svg.selectAll '.arc'
.data sdata
.enter()
.append 'g'
.attr 'class', 'arc'
g.append 'path'
.attr 'd', arc
.style 'fill', (d) ->
colors[sdata.indexOf d]
g.append 'text'
.attr 'transform', (d) -> "translate(#{arc.centroid d})"
.attr 'dy', '.35em'
.style 'text-anchor', 'middle'
.text (d) -> d.data.category
)()
#stacked
(->
stack = `
function(data) {
var startPoints = [];
var endPoints = [];
var apps = [];
for (var key in data.data) {
apps.push(key);
for (var i = 0; i < data.data[key].length; ++i) {
startPoints.push(data.data[key][i].start);
endPoints.push(data.data[key][i].end);
}
}
var appKeyIndex = {};
for (var i = 0; i < apps.length; ++i) {
appKeyIndex[apps[i]] = i;
}
var globalStartTime = Math.min.apply(null, startPoints);
var globalEndTime = Math.max.apply(null, endPoints);
var duration = globalEndTime - globalStartTime;
var bucketSize = Math.ceil(duration / 10);
var interval = 5;
var layers = [];
for (var app in data.data) {
layers[appKeyIndex[app]] = [];
for (var i = 0; i < 10; ++i) {
layers[appKeyIndex[app]][i] = {
x: i,
y: 0.1,
y0: 0
};
}
for (var i = 0; i < data.data[app].length; ++i) {
var eventBatch = data.data[app][i];
for (var j = 0; j < eventBatch.count.length; ++j) {
var bucket = Math.floor((j * interval + eventBatch.start.getTime() - globalStartTime) / bucketSize);
console.log(bucket);
layers[appKeyIndex[app]][bucket].y += eventBatch.count[j];
}
}
}
console.dir(layers);
var n = 6, // number of layers
m = 10, // number of samples per layer
stack = d3.layout.stack(),
// layers = stack(d3.range(n).map(function() { return bumpLayer(m, .1); })),
yGroupMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y; }); }),
yStackMax = d3.max(layers, function(layer) { return d3.max(layer, function(d) { return d.y0 + d.y; }); });
console.dir(layers);
var height = 200;
var x = d3.scale.ordinal()
.domain(d3.range(m))
.rangeRoundBands([0, width], .08);
var y = d3.scale.linear()
.domain([0, yStackMax])
.range([height, 0]);
var color = d3.scale.linear()
.domain([0, n - 1])
.range(["#aad", "#556"]);
var xAxis = d3.svg.axis()
.scale(x)
.tickSize(0)
.tickPadding(6)
.orient("bottom");
var svg = d3.select("#stacked").append("svg")
.attr("width", width)
.attr("height", height)
.append("g");
var layer = svg.selectAll(".layer")
.data(layers)
.enter().append("g")
.attr("class", "layer")
.style("fill", function(d, i) { return color(i); });
var rect = layer.selectAll("rect")
.data(function(d) { return d; })
.enter().append("rect")
.attr("x", function(d) { return x(d.x); })
.attr("y", height)
.attr("width", x.rangeBand())
.attr("height", 0);
rect.transition()
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); });
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis);
d3.selectAll("input").on("change", change);
var timeout = setTimeout(function() {
d3.select("input[value=\"grouped\"]").property("checked", true).each(change);
}, 2000);
function change() {
clearTimeout(timeout);
if (this.value === "grouped") transitionGrouped();
else transitionStacked();
}
function transitionGrouped() {
y.domain([0, yGroupMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("x", function(d, i, j) { return x(d.x) + x.rangeBand() / n * j; })
.attr("width", x.rangeBand() / n)
.transition()
.attr("y", function(d) { return y(d.y); })
.attr("height", function(d) { return height - y(d.y); });
}
function transitionStacked() {
y.domain([0, yStackMax]);
rect.transition()
.duration(500)
.delay(function(d, i) { return i * 10; })
.attr("y", function(d) { return y(d.y0 + d.y); })
.attr("height", function(d) { return y(d.y0) - y(d.y0 + d.y); })
.transition()
.attr("x", function(d) { return x(d.x); })
.attr("width", x.rangeBand());
}
// Inspired by PI:NAME:<NAME>END_PI's test data generator.
function bumpLayer(n, o) {
function bump(a) {
var x = 1 / (.1 + Math.random()),
y = 2 * Math.random() - .5,
z = 10 / (.1 + Math.random());
for (var i = 0; i < n; i++) {
var w = (i / n - y) * z;
a[i] += x * Math.exp(-w * w);
}
}
var a = [], i;
for (i = 0; i < n; ++i) a[i] = o + o * Math.random();
for (i = 0; i < 5; ++i) bump(a);
return a.map(function(d, i) { return {x: i, y: Math.max(0, d)}; });
}
};
`
console.log origin
stack origin
)()
|
[
{
"context": "'#show-follow-artists').html().should.containEql 'Pablo Picasso'\n\ndescribe 'index', ->\n describe 'show with no e",
"end": 6141,
"score": 0.999750554561615,
"start": 6128,
"tag": "NAME",
"value": "Pablo Picasso"
},
{
"context": "w\n fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'\n location: new FairLocation fabricate 'lo",
"end": 6427,
"score": 0.7037374973297119,
"start": 6415,
"tag": "NAME",
"value": "Foo Bar Fair"
}
] | src/mobile/apps/show/test/templates/index.coffee | kanaabe/force | 1 | jade = require 'jade'
path = require 'path'
fs = require 'fs'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Show = require '../../../../models/show.coffee'
Location = require '../../../../models/location.coffee'
FairLocation = require '../../../../models/fair_location.coffee'
Artworks = require '../../../../collections/artworks.coffee'
Fair = require '../../../../models/fair.coffee'
InstallShots = require '../../../../collections/install_shots.coffee'
cheerio = require 'cheerio'
render = (templateName) ->
filename = path.resolve __dirname, "../../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'header', ->
describe 'fair booth with install shots', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair', published: true, has_full_feature: true
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for fair booth', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql 'Gagosian Gallery'
$('.show-page-title').html().should.containEql 'Foo Bar Fair'
$('.show-page-location-address').html().should.containEql 'Foo Bar Fair'
it 'does not display installshots on fair booth', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-installation-shot-carousel'
it 'links to the fair if possible', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql 'a href="/the-armory-show"'
describe 'gallery show with install shots', ->
before ->
@show = new Show fabricate 'show', name: "Test Gallery Show"
@template = render('index')(
show: @show
fair: []
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for gallery show', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql "Test Gallery Show"
$('.show-page-location-address').text().should.containEql "529 W 20th St."
it 'formats the running dates correctly', ->
$ = cheerio.load @template
$('.show-page-running-dates').text().should.containEql "Jul 12th – Aug 23rd, 2013"
it 'displays the install shots carousel', ->
$ = cheerio.load @template
$.html().should.containEql 'show-installation-shot-carousel'
it 'displays the correct number of install shots', ->
$ = cheerio.load @template
$('#carousel-track').children().length.should.equal 1
it 'displays the correct google maps link', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql "q=529%20W%2020th%20St.%2C%20New%20York"
describe 'artworks', ->
describe 'show with less than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not have an artwork slider', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-page-artworks-slider'
describe 'show with more than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [
fabricate('artwork', id: 1),
fabricate('artwork', id: 2),
fabricate('artwork', id: 3),
fabricate('artwork', id: 4),
fabricate('artwork', id: 5),
fabricate('artwork', id: 6),
fabricate('artwork', id: 7),
fabricate('artwork', id: 8),
fabricate('artwork', id: 9),
]
sd: {}
)
xit 'should have an artwork slider', ->
$ = cheerio.load @template
$.html().should.containEql 'show-page-artworks-slider'
it 'displays the correct number of artworks', ->
$ = cheerio.load @template
$('.show-page-artworks-title').html().should.containEql "9 Works"
describe 'index', ->
describe 'show with events, press release and associated artist', ->
before ->
@show = new Show fabricate 'show', press_release: "The gallery is proud to present Inez and Vinoodh."
@show.related().artists.set [fabricate 'artist']
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays events if they are present', ->
$ = cheerio.load @template
$('.show-page-event').first().text().should.containEql "Opening Reception: Inez and Vinoodh Opening, Jan 7th, 8 – 9pm"
it 'renders the correct information from the press release', ->
$ = cheerio.load @template
$('.show-page-press-release-content').text().should.containEql "The gallery is proud to present Inez and Vinoodh."
it 'renders the artists with follow buttons', ->
$ = cheerio.load @template
$('#show-follow-artists').html().should.containEql 'Pablo Picasso'
describe 'index', ->
describe 'show with no events, no press release and no associated artists', ->
before ->
@show = new Show fabricate 'show', events: []
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not render the events container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-event'
it 'should not load the press release container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-press-release-content'
it 'should not load the artist follow buttons', ->
$ = cheerio.load @template
$.html().should.not.containEql '#show-follow-artists'
| 163185 | jade = require 'jade'
path = require 'path'
fs = require 'fs'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Show = require '../../../../models/show.coffee'
Location = require '../../../../models/location.coffee'
FairLocation = require '../../../../models/fair_location.coffee'
Artworks = require '../../../../collections/artworks.coffee'
Fair = require '../../../../models/fair.coffee'
InstallShots = require '../../../../collections/install_shots.coffee'
cheerio = require 'cheerio'
render = (templateName) ->
filename = path.resolve __dirname, "../../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'header', ->
describe 'fair booth with install shots', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair', published: true, has_full_feature: true
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for fair booth', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql 'Gagosian Gallery'
$('.show-page-title').html().should.containEql 'Foo Bar Fair'
$('.show-page-location-address').html().should.containEql 'Foo Bar Fair'
it 'does not display installshots on fair booth', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-installation-shot-carousel'
it 'links to the fair if possible', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql 'a href="/the-armory-show"'
describe 'gallery show with install shots', ->
before ->
@show = new Show fabricate 'show', name: "Test Gallery Show"
@template = render('index')(
show: @show
fair: []
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for gallery show', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql "Test Gallery Show"
$('.show-page-location-address').text().should.containEql "529 W 20th St."
it 'formats the running dates correctly', ->
$ = cheerio.load @template
$('.show-page-running-dates').text().should.containEql "Jul 12th – Aug 23rd, 2013"
it 'displays the install shots carousel', ->
$ = cheerio.load @template
$.html().should.containEql 'show-installation-shot-carousel'
it 'displays the correct number of install shots', ->
$ = cheerio.load @template
$('#carousel-track').children().length.should.equal 1
it 'displays the correct google maps link', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql "q=529%20W%2020th%20St.%2C%20New%20York"
describe 'artworks', ->
describe 'show with less than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not have an artwork slider', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-page-artworks-slider'
describe 'show with more than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [
fabricate('artwork', id: 1),
fabricate('artwork', id: 2),
fabricate('artwork', id: 3),
fabricate('artwork', id: 4),
fabricate('artwork', id: 5),
fabricate('artwork', id: 6),
fabricate('artwork', id: 7),
fabricate('artwork', id: 8),
fabricate('artwork', id: 9),
]
sd: {}
)
xit 'should have an artwork slider', ->
$ = cheerio.load @template
$.html().should.containEql 'show-page-artworks-slider'
it 'displays the correct number of artworks', ->
$ = cheerio.load @template
$('.show-page-artworks-title').html().should.containEql "9 Works"
describe 'index', ->
describe 'show with events, press release and associated artist', ->
before ->
@show = new Show fabricate 'show', press_release: "The gallery is proud to present Inez and Vinoodh."
@show.related().artists.set [fabricate 'artist']
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays events if they are present', ->
$ = cheerio.load @template
$('.show-page-event').first().text().should.containEql "Opening Reception: Inez and Vinoodh Opening, Jan 7th, 8 – 9pm"
it 'renders the correct information from the press release', ->
$ = cheerio.load @template
$('.show-page-press-release-content').text().should.containEql "The gallery is proud to present Inez and Vinoodh."
it 'renders the artists with follow buttons', ->
$ = cheerio.load @template
$('#show-follow-artists').html().should.containEql '<NAME>'
describe 'index', ->
describe 'show with no events, no press release and no associated artists', ->
before ->
@show = new Show fabricate 'show', events: []
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: '<NAME>'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not render the events container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-event'
it 'should not load the press release container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-press-release-content'
it 'should not load the artist follow buttons', ->
$ = cheerio.load @template
$.html().should.not.containEql '#show-follow-artists'
| true | jade = require 'jade'
path = require 'path'
fs = require 'fs'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Show = require '../../../../models/show.coffee'
Location = require '../../../../models/location.coffee'
FairLocation = require '../../../../models/fair_location.coffee'
Artworks = require '../../../../collections/artworks.coffee'
Fair = require '../../../../models/fair.coffee'
InstallShots = require '../../../../collections/install_shots.coffee'
cheerio = require 'cheerio'
render = (templateName) ->
filename = path.resolve __dirname, "../../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'header', ->
describe 'fair booth with install shots', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair', published: true, has_full_feature: true
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for fair booth', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql 'Gagosian Gallery'
$('.show-page-title').html().should.containEql 'Foo Bar Fair'
$('.show-page-location-address').html().should.containEql 'Foo Bar Fair'
it 'does not display installshots on fair booth', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-installation-shot-carousel'
it 'links to the fair if possible', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql 'a href="/the-armory-show"'
describe 'gallery show with install shots', ->
before ->
@show = new Show fabricate 'show', name: "Test Gallery Show"
@template = render('index')(
show: @show
fair: []
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays information for gallery show', ->
$ = cheerio.load @template
$('.show-page-title').html().should.containEql "Test Gallery Show"
$('.show-page-location-address').text().should.containEql "529 W 20th St."
it 'formats the running dates correctly', ->
$ = cheerio.load @template
$('.show-page-running-dates').text().should.containEql "Jul 12th – Aug 23rd, 2013"
it 'displays the install shots carousel', ->
$ = cheerio.load @template
$.html().should.containEql 'show-installation-shot-carousel'
it 'displays the correct number of install shots', ->
$ = cheerio.load @template
$('#carousel-track').children().length.should.equal 1
it 'displays the correct google maps link', ->
$ = cheerio.load @template
$('.show-page-location-map').html().should.containEql "q=529%20W%2020th%20St.%2C%20New%20York"
describe 'artworks', ->
describe 'show with less than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not have an artwork slider', ->
$ = cheerio.load @template
$.html().should.not.containEql 'show-page-artworks-slider'
describe 'show with more than 8 artworks', ->
before ->
@show = new Show fabricate 'show', fair: fabricate 'fair'
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [
fabricate('artwork', id: 1),
fabricate('artwork', id: 2),
fabricate('artwork', id: 3),
fabricate('artwork', id: 4),
fabricate('artwork', id: 5),
fabricate('artwork', id: 6),
fabricate('artwork', id: 7),
fabricate('artwork', id: 8),
fabricate('artwork', id: 9),
]
sd: {}
)
xit 'should have an artwork slider', ->
$ = cheerio.load @template
$.html().should.containEql 'show-page-artworks-slider'
it 'displays the correct number of artworks', ->
$ = cheerio.load @template
$('.show-page-artworks-title').html().should.containEql "9 Works"
describe 'index', ->
describe 'show with events, press release and associated artist', ->
before ->
@show = new Show fabricate 'show', press_release: "The gallery is proud to present Inez and Vinoodh."
@show.related().artists.set [fabricate 'artist']
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'Foo Bar Fair'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'displays events if they are present', ->
$ = cheerio.load @template
$('.show-page-event').first().text().should.containEql "Opening Reception: Inez and Vinoodh Opening, Jan 7th, 8 – 9pm"
it 'renders the correct information from the press release', ->
$ = cheerio.load @template
$('.show-page-press-release-content').text().should.containEql "The gallery is proud to present Inez and Vinoodh."
it 'renders the artists with follow buttons', ->
$ = cheerio.load @template
$('#show-follow-artists').html().should.containEql 'PI:NAME:<NAME>END_PI'
describe 'index', ->
describe 'show with no events, no press release and no associated artists', ->
before ->
@show = new Show fabricate 'show', events: []
@template = render('index')(
show: @show
fair: new Fair fabricate 'fair', name: 'PI:NAME:<NAME>END_PI'
location: new FairLocation fabricate 'location'
installShots: new InstallShots [fabricate 'show_install_shot']
artworks: new Artworks [ fabricate 'artwork', partner: fabricate 'partner' ]
sd: {}
)
it 'should not render the events container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-event'
it 'should not load the press release container', ->
$ = cheerio.load @template
$.html().should.not.containEql '.show-page-press-release-content'
it 'should not load the artist follow buttons', ->
$ = cheerio.load @template
$.html().should.not.containEql '#show-follow-artists'
|
[
{
"context": "E OR OTHER DEALINGS IN\n# THE SOFTWARE.\n#\n# Author: Mark Lee\n\nclass FlashAnalytics extends Analytics\n\n @defau",
"end": 1141,
"score": 0.9997639656066895,
"start": 1133,
"tag": "NAME",
"value": "Mark Lee"
}
] | flash_stats.coffee | dreamboxlearning/google-analytics-formatter | 0 | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Mark Lee
class FlashAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:flashVersion',
'ga:operatingSystem',
'ga:operatingSystemVersion',
]
generate_flash_counts_from_data: (data, flash_counts) ->
flash_counts.by_version = {} unless flash_counts.by_version
flash_counts.monthly_totals = {} unless flash_counts.monthly_totals
flash_counts.time_slices = [] unless flash_counts.time_slices
total = 0
for row in data
[year, month, flash_full_version, os_name, os_version, count] = row
flash_data = /^(\d+\.\d+) (.+)$/.exec(flash_full_version)
if flash_version isnt '(not set)' and flash_data is null
continue
else if flash_version is '(not set)'
flash_version = flash_full_version
else
flash_version = flash_data[1]
year_month = "#{year}/#{month}"
if year_month not in flash_counts.time_slices
flash_counts.time_slices.push(year_month)
flash_counts.monthly_totals[year_month] = 0
flash_counts.by_version[flash_version] = {} if flash_version not of flash_counts.by_version
flash_counts.by_version[flash_version][os_name] = {} if os_name not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version].total = {} if 'total' not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version][os_name][year_month] = 0 if year_month not of flash_counts.by_version[flash_version][os_name]
flash_counts.by_version[flash_version].total[year_month] = 0 if year_month not of flash_counts.by_version[flash_version].total
n_ct = Number(count)
flash_counts.by_version[flash_version][os_name][year_month] += n_ct
flash_counts.by_version[flash_version].total[year_month] += n_ct
flash_counts.monthly_totals[year_month] += n_ct
return flash_counts
counts_to_wikitext: (lefthand_header, counts, time_slices, monthly_totals) ->
wikitext = '{|border=1 style="text-align: center"\n'
wikitext += "! Time Period / #{lefthand_header}\n"
for time_slice in time_slices
wikitext += "! #{time_slice}\n"
totals = {}
for flash_version, fv_data of counts
totals[flash_version] = {}
valid_percentages = 0
totals[flash_version].total = {}
for flash_os, fo_data of fv_data
totals[flash_version][flash_os] = {}
for time_slice in time_slices
count = fo_data[time_slice] || 0
percentage = Math.round(count / monthly_totals[time_slice] * 10000) / 100
totals[flash_version][flash_os][time_slice] = percentage
totals[flash_version].total[time_slice] = 0 unless totals[flash_version].total[time_slice]
valid_percentages++ if percentage >= 1
delete totals[flash_version] unless valid_percentages
for flash_version, fv_data of totals
wikitext += '|-\n'
wikitext += "! #{flash_version}\n"
for time_slice in time_slices
wikitext += '| valign="top" |\n'
if fv_data.total[time_slice] isnt 0
wikitext += ' {|\n'
wikitext += ' ! TOT\n'
wikitext += " ! #{Math.round(fv_data.total[time_slice] * 100) / 100}\n"
for flash_os, fo_data of fv_data
switch flash_os
when 'total' then continue
when 'Macintosh' then flash_os = 'Mac'
when 'Windows' then flash_os = 'Win'
when 'Linux' then flash_os = 'Lin'
when 'Chrome OS' then flash_os = 'C OS'
when 'BlackBerry' then flash_os = 'BB'
when 'Google TV' then flash_os = 'GTV'
when 'Android' then flash_os = 'Adrd'
when 'Playstation 3' then flash_os = 'PS3'
when 'Firefox OS' then flash_os = 'FF OS'
when 'FreeBSD' then flash_os = 'FBSD'
when '(not set)' then flash_os = '??'
percentage = fo_data[time_slice]
continue if percentage is 0
wikitext += ' |-\n'
wikitext += " ! #{flash_os}\n"
wikitext += " | #{percentage}\n"
wikitext += ' |}\n'
wikitext += '|}\n'
return wikitext
| 102658 | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: <NAME>
class FlashAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:flashVersion',
'ga:operatingSystem',
'ga:operatingSystemVersion',
]
generate_flash_counts_from_data: (data, flash_counts) ->
flash_counts.by_version = {} unless flash_counts.by_version
flash_counts.monthly_totals = {} unless flash_counts.monthly_totals
flash_counts.time_slices = [] unless flash_counts.time_slices
total = 0
for row in data
[year, month, flash_full_version, os_name, os_version, count] = row
flash_data = /^(\d+\.\d+) (.+)$/.exec(flash_full_version)
if flash_version isnt '(not set)' and flash_data is null
continue
else if flash_version is '(not set)'
flash_version = flash_full_version
else
flash_version = flash_data[1]
year_month = "#{year}/#{month}"
if year_month not in flash_counts.time_slices
flash_counts.time_slices.push(year_month)
flash_counts.monthly_totals[year_month] = 0
flash_counts.by_version[flash_version] = {} if flash_version not of flash_counts.by_version
flash_counts.by_version[flash_version][os_name] = {} if os_name not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version].total = {} if 'total' not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version][os_name][year_month] = 0 if year_month not of flash_counts.by_version[flash_version][os_name]
flash_counts.by_version[flash_version].total[year_month] = 0 if year_month not of flash_counts.by_version[flash_version].total
n_ct = Number(count)
flash_counts.by_version[flash_version][os_name][year_month] += n_ct
flash_counts.by_version[flash_version].total[year_month] += n_ct
flash_counts.monthly_totals[year_month] += n_ct
return flash_counts
counts_to_wikitext: (lefthand_header, counts, time_slices, monthly_totals) ->
wikitext = '{|border=1 style="text-align: center"\n'
wikitext += "! Time Period / #{lefthand_header}\n"
for time_slice in time_slices
wikitext += "! #{time_slice}\n"
totals = {}
for flash_version, fv_data of counts
totals[flash_version] = {}
valid_percentages = 0
totals[flash_version].total = {}
for flash_os, fo_data of fv_data
totals[flash_version][flash_os] = {}
for time_slice in time_slices
count = fo_data[time_slice] || 0
percentage = Math.round(count / monthly_totals[time_slice] * 10000) / 100
totals[flash_version][flash_os][time_slice] = percentage
totals[flash_version].total[time_slice] = 0 unless totals[flash_version].total[time_slice]
valid_percentages++ if percentage >= 1
delete totals[flash_version] unless valid_percentages
for flash_version, fv_data of totals
wikitext += '|-\n'
wikitext += "! #{flash_version}\n"
for time_slice in time_slices
wikitext += '| valign="top" |\n'
if fv_data.total[time_slice] isnt 0
wikitext += ' {|\n'
wikitext += ' ! TOT\n'
wikitext += " ! #{Math.round(fv_data.total[time_slice] * 100) / 100}\n"
for flash_os, fo_data of fv_data
switch flash_os
when 'total' then continue
when 'Macintosh' then flash_os = 'Mac'
when 'Windows' then flash_os = 'Win'
when 'Linux' then flash_os = 'Lin'
when 'Chrome OS' then flash_os = 'C OS'
when 'BlackBerry' then flash_os = 'BB'
when 'Google TV' then flash_os = 'GTV'
when 'Android' then flash_os = 'Adrd'
when 'Playstation 3' then flash_os = 'PS3'
when 'Firefox OS' then flash_os = 'FF OS'
when 'FreeBSD' then flash_os = 'FBSD'
when '(not set)' then flash_os = '??'
percentage = fo_data[time_slice]
continue if percentage is 0
wikitext += ' |-\n'
wikitext += " ! #{flash_os}\n"
wikitext += " | #{percentage}\n"
wikitext += ' |}\n'
wikitext += '|}\n'
return wikitext
| true | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: PI:NAME:<NAME>END_PI
class FlashAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:flashVersion',
'ga:operatingSystem',
'ga:operatingSystemVersion',
]
generate_flash_counts_from_data: (data, flash_counts) ->
flash_counts.by_version = {} unless flash_counts.by_version
flash_counts.monthly_totals = {} unless flash_counts.monthly_totals
flash_counts.time_slices = [] unless flash_counts.time_slices
total = 0
for row in data
[year, month, flash_full_version, os_name, os_version, count] = row
flash_data = /^(\d+\.\d+) (.+)$/.exec(flash_full_version)
if flash_version isnt '(not set)' and flash_data is null
continue
else if flash_version is '(not set)'
flash_version = flash_full_version
else
flash_version = flash_data[1]
year_month = "#{year}/#{month}"
if year_month not in flash_counts.time_slices
flash_counts.time_slices.push(year_month)
flash_counts.monthly_totals[year_month] = 0
flash_counts.by_version[flash_version] = {} if flash_version not of flash_counts.by_version
flash_counts.by_version[flash_version][os_name] = {} if os_name not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version].total = {} if 'total' not of flash_counts.by_version[flash_version]
flash_counts.by_version[flash_version][os_name][year_month] = 0 if year_month not of flash_counts.by_version[flash_version][os_name]
flash_counts.by_version[flash_version].total[year_month] = 0 if year_month not of flash_counts.by_version[flash_version].total
n_ct = Number(count)
flash_counts.by_version[flash_version][os_name][year_month] += n_ct
flash_counts.by_version[flash_version].total[year_month] += n_ct
flash_counts.monthly_totals[year_month] += n_ct
return flash_counts
counts_to_wikitext: (lefthand_header, counts, time_slices, monthly_totals) ->
wikitext = '{|border=1 style="text-align: center"\n'
wikitext += "! Time Period / #{lefthand_header}\n"
for time_slice in time_slices
wikitext += "! #{time_slice}\n"
totals = {}
for flash_version, fv_data of counts
totals[flash_version] = {}
valid_percentages = 0
totals[flash_version].total = {}
for flash_os, fo_data of fv_data
totals[flash_version][flash_os] = {}
for time_slice in time_slices
count = fo_data[time_slice] || 0
percentage = Math.round(count / monthly_totals[time_slice] * 10000) / 100
totals[flash_version][flash_os][time_slice] = percentage
totals[flash_version].total[time_slice] = 0 unless totals[flash_version].total[time_slice]
valid_percentages++ if percentage >= 1
delete totals[flash_version] unless valid_percentages
for flash_version, fv_data of totals
wikitext += '|-\n'
wikitext += "! #{flash_version}\n"
for time_slice in time_slices
wikitext += '| valign="top" |\n'
if fv_data.total[time_slice] isnt 0
wikitext += ' {|\n'
wikitext += ' ! TOT\n'
wikitext += " ! #{Math.round(fv_data.total[time_slice] * 100) / 100}\n"
for flash_os, fo_data of fv_data
switch flash_os
when 'total' then continue
when 'Macintosh' then flash_os = 'Mac'
when 'Windows' then flash_os = 'Win'
when 'Linux' then flash_os = 'Lin'
when 'Chrome OS' then flash_os = 'C OS'
when 'BlackBerry' then flash_os = 'BB'
when 'Google TV' then flash_os = 'GTV'
when 'Android' then flash_os = 'Adrd'
when 'Playstation 3' then flash_os = 'PS3'
when 'Firefox OS' then flash_os = 'FF OS'
when 'FreeBSD' then flash_os = 'FBSD'
when '(not set)' then flash_os = '??'
percentage = fo_data[time_slice]
continue if percentage is 0
wikitext += ' |-\n'
wikitext += " ! #{flash_os}\n"
wikitext += " | #{percentage}\n"
wikitext += ' |}\n'
wikitext += '|}\n'
return wikitext
|
[
{
"context": "Cell - tableCell base data manage class\n# Coded by Hajime Oh-yake 2013.09.10\n#*************************************",
"end": 121,
"score": 0.9998949766159058,
"start": 107,
"tag": "NAME",
"value": "Hajime Oh-yake"
}
] | JSKit/03_JSTableViewCell.coffee | digitarhythm/codeJS | 0 | #*****************************************
# JSTableViewCell - tableCell base data manage class
# Coded by Hajime Oh-yake 2013.09.10
#*****************************************
class JSTableViewCell extends JSView
constructor:->
super()
@_image = null
@_imageview = null
@_alpha = 1.0
@_text = ""
@_textColor = JSColor("black")
@_textSize = 12
@_textAlignment = "JSTextAlignmentLeft"
@_bgColor = JSColor("clearColor")
@_borderColor = JSColor("#d0d8e0")
@_borderWidth = 1
sidesize = @_frame.size.height
@tag = "<div id='"+@_objectID+"_cell' style='position:absolute;left:0px;top:0px;width:"+@_frame.size.width+"px;height:"+@_frame.size.height+"px;z-index:1; opacity:"+@_alpha+";'><div id='"+@_objectID+"_text' style='position:relative;left:"+@_frame.size.height+"px;top:0px;width:"+(@_frame.size.width-sidesize-4)+"px;height:"+@_frame.size.height+"px;display:table-cell;vertical-align:middle;'></div></div>"
setText:(@_text)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").html(@_text)
setTextSize:(@_textSize)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("font-size", @_textSize)
setTextColor:(@_textColor)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("color", @_textColor)
setTextAlignment:(@_textAlignment)->
switch @_textAlignment
when "JSTextAlignmentLeft"
$(@_viewSelector+"_text").css("text-align", "left")
when "JSTextAlignmentCenter"
$(@_viewSelector+"_text").css("text-align", "center")
when "JSTextAlignmentRight"
$(@_viewSelector+"_text").css("text-align", "right")
setImage:(@_image)->
if (!$(@_viewSelector+"_cell").length)
return
if (!@_image?)
return
if ($(@_viewSelector+"_image").length)
$(@_viewSelector+"_image").remove()
sidesize = @_frame.size.height
$(@_viewSelector+"_cell").append("<img id='"+@_objectID+"_image' border='0' src='"+@_image._imagepath+"' style='position:absolute;left:0px;top:0px;width:"+sidesize+"px;height:"+sidesize+"px;'>")
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_cell").length)
$(@_viewSelector+"_cell").css("width", frame.size.width)
$(@_viewSelector+"_cell").css("height", frame.size.height)
$(@_viewSelector+"_image").css("width", frame.size.height)
$(@_viewSelector+"_image").css("height", frame.size.height)
$(@_viewSelector+"_text").css("left", frame.size.height)
$(@_viewSelector+"_text").css("width", frame.size.width-frame.size.height)
$(@_viewSelector+"_text").css("height", frame.size.height)
viewDidAppear:->
super()
@_frame.origin.x = 1
$(@_viewSelector).append(@tag)
@setFrame(@_frame)
@setAlpha(@_alpha)
@setText(@_text)
@setTextColor(@_textColor)
@setTextSize(@_textSize)
@setTextAlignment(@_textAlignment)
@setImage(@_image)
$(@_viewSelector).on 'tap', (event)=>
if (typeof @delegate.didSelectRowAtIndexPath == 'function')
@_tableview.deselectRowAtIndexPath()
@setBackgroundColor(JSColor("#87cefa"))
@delegate.didSelectRowAtIndexPath(@_cellnum, event)
| 18504 | #*****************************************
# JSTableViewCell - tableCell base data manage class
# Coded by <NAME> 2013.09.10
#*****************************************
class JSTableViewCell extends JSView
constructor:->
super()
@_image = null
@_imageview = null
@_alpha = 1.0
@_text = ""
@_textColor = JSColor("black")
@_textSize = 12
@_textAlignment = "JSTextAlignmentLeft"
@_bgColor = JSColor("clearColor")
@_borderColor = JSColor("#d0d8e0")
@_borderWidth = 1
sidesize = @_frame.size.height
@tag = "<div id='"+@_objectID+"_cell' style='position:absolute;left:0px;top:0px;width:"+@_frame.size.width+"px;height:"+@_frame.size.height+"px;z-index:1; opacity:"+@_alpha+";'><div id='"+@_objectID+"_text' style='position:relative;left:"+@_frame.size.height+"px;top:0px;width:"+(@_frame.size.width-sidesize-4)+"px;height:"+@_frame.size.height+"px;display:table-cell;vertical-align:middle;'></div></div>"
setText:(@_text)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").html(@_text)
setTextSize:(@_textSize)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("font-size", @_textSize)
setTextColor:(@_textColor)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("color", @_textColor)
setTextAlignment:(@_textAlignment)->
switch @_textAlignment
when "JSTextAlignmentLeft"
$(@_viewSelector+"_text").css("text-align", "left")
when "JSTextAlignmentCenter"
$(@_viewSelector+"_text").css("text-align", "center")
when "JSTextAlignmentRight"
$(@_viewSelector+"_text").css("text-align", "right")
setImage:(@_image)->
if (!$(@_viewSelector+"_cell").length)
return
if (!@_image?)
return
if ($(@_viewSelector+"_image").length)
$(@_viewSelector+"_image").remove()
sidesize = @_frame.size.height
$(@_viewSelector+"_cell").append("<img id='"+@_objectID+"_image' border='0' src='"+@_image._imagepath+"' style='position:absolute;left:0px;top:0px;width:"+sidesize+"px;height:"+sidesize+"px;'>")
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_cell").length)
$(@_viewSelector+"_cell").css("width", frame.size.width)
$(@_viewSelector+"_cell").css("height", frame.size.height)
$(@_viewSelector+"_image").css("width", frame.size.height)
$(@_viewSelector+"_image").css("height", frame.size.height)
$(@_viewSelector+"_text").css("left", frame.size.height)
$(@_viewSelector+"_text").css("width", frame.size.width-frame.size.height)
$(@_viewSelector+"_text").css("height", frame.size.height)
viewDidAppear:->
super()
@_frame.origin.x = 1
$(@_viewSelector).append(@tag)
@setFrame(@_frame)
@setAlpha(@_alpha)
@setText(@_text)
@setTextColor(@_textColor)
@setTextSize(@_textSize)
@setTextAlignment(@_textAlignment)
@setImage(@_image)
$(@_viewSelector).on 'tap', (event)=>
if (typeof @delegate.didSelectRowAtIndexPath == 'function')
@_tableview.deselectRowAtIndexPath()
@setBackgroundColor(JSColor("#87cefa"))
@delegate.didSelectRowAtIndexPath(@_cellnum, event)
| true | #*****************************************
# JSTableViewCell - tableCell base data manage class
# Coded by PI:NAME:<NAME>END_PI 2013.09.10
#*****************************************
class JSTableViewCell extends JSView
constructor:->
super()
@_image = null
@_imageview = null
@_alpha = 1.0
@_text = ""
@_textColor = JSColor("black")
@_textSize = 12
@_textAlignment = "JSTextAlignmentLeft"
@_bgColor = JSColor("clearColor")
@_borderColor = JSColor("#d0d8e0")
@_borderWidth = 1
sidesize = @_frame.size.height
@tag = "<div id='"+@_objectID+"_cell' style='position:absolute;left:0px;top:0px;width:"+@_frame.size.width+"px;height:"+@_frame.size.height+"px;z-index:1; opacity:"+@_alpha+";'><div id='"+@_objectID+"_text' style='position:relative;left:"+@_frame.size.height+"px;top:0px;width:"+(@_frame.size.width-sidesize-4)+"px;height:"+@_frame.size.height+"px;display:table-cell;vertical-align:middle;'></div></div>"
setText:(@_text)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").html(@_text)
setTextSize:(@_textSize)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("font-size", @_textSize)
setTextColor:(@_textColor)->
if ($(@_viewSelector+"_text").length)
$(@_viewSelector+"_text").css("color", @_textColor)
setTextAlignment:(@_textAlignment)->
switch @_textAlignment
when "JSTextAlignmentLeft"
$(@_viewSelector+"_text").css("text-align", "left")
when "JSTextAlignmentCenter"
$(@_viewSelector+"_text").css("text-align", "center")
when "JSTextAlignmentRight"
$(@_viewSelector+"_text").css("text-align", "right")
setImage:(@_image)->
if (!$(@_viewSelector+"_cell").length)
return
if (!@_image?)
return
if ($(@_viewSelector+"_image").length)
$(@_viewSelector+"_image").remove()
sidesize = @_frame.size.height
$(@_viewSelector+"_cell").append("<img id='"+@_objectID+"_image' border='0' src='"+@_image._imagepath+"' style='position:absolute;left:0px;top:0px;width:"+sidesize+"px;height:"+sidesize+"px;'>")
setFrame:(frame)->
super(frame)
if ($(@_viewSelector+"_cell").length)
$(@_viewSelector+"_cell").css("width", frame.size.width)
$(@_viewSelector+"_cell").css("height", frame.size.height)
$(@_viewSelector+"_image").css("width", frame.size.height)
$(@_viewSelector+"_image").css("height", frame.size.height)
$(@_viewSelector+"_text").css("left", frame.size.height)
$(@_viewSelector+"_text").css("width", frame.size.width-frame.size.height)
$(@_viewSelector+"_text").css("height", frame.size.height)
viewDidAppear:->
super()
@_frame.origin.x = 1
$(@_viewSelector).append(@tag)
@setFrame(@_frame)
@setAlpha(@_alpha)
@setText(@_text)
@setTextColor(@_textColor)
@setTextSize(@_textSize)
@setTextAlignment(@_textAlignment)
@setImage(@_image)
$(@_viewSelector).on 'tap', (event)=>
if (typeof @delegate.didSelectRowAtIndexPath == 'function')
@_tableview.deselectRowAtIndexPath()
@setBackgroundColor(JSColor("#87cefa"))
@delegate.didSelectRowAtIndexPath(@_cellnum, event)
|
[
{
"context": "->\n console.log \"Initialized Name\"\n first: \"Bob\"\n last: \"Defaultson\"\n\n Prepare: (value) ->\n ",
"end": 1056,
"score": 0.9998420476913452,
"start": 1053,
"tag": "NAME",
"value": "Bob"
},
{
"context": "og \"Initialized Name\"\n first: \"Bob\"\n last: \"Defaultson\"\n\n Prepare: (value) ->\n wrapper = Object.crea",
"end": 1079,
"score": 0.9994571208953857,
"start": 1069,
"tag": "NAME",
"value": "Defaultson"
}
] | tests/data/db-classes/litexa/game.coffee | Symbitic/litexa | 34 | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
# When used to type a database variable in Litexa,
# this class will be constructed only when the database
# value is still undefined. Otherwise, on subsequent
# skill requests, the database object will have its
# prototype patched back to this class each time.
class Game
constructor: ->
@type = "Game"
console.log "Constructed game"
@score = 0
@constructed = @constructed ? 0
@constructed += 1
greeting: ->
"hello!"
setFlag: (value) -> @flag = value
getFlag: -> @flag ? false
saveScore: (s) -> @score = s
getScore: -> @score ? 0
WrapperPrototype =
name: -> "#{@data.first} #{@data.last}"
set: (f, l) ->
@data.first = f
@data.last = l
NameWrapper =
Initialize: ->
console.log "Initialized Name"
first: "Bob"
last: "Defaultson"
Prepare: (value) ->
wrapper = Object.create WrapperPrototype
wrapper.data = value
return wrapper
class FrenchGame
constructor: ->
@type = "FrenchGame"
greeting: ->
setFlag: ->
getFlag: ->
saveScore: ->
getScore: ->
| 79529 | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
# When used to type a database variable in Litexa,
# this class will be constructed only when the database
# value is still undefined. Otherwise, on subsequent
# skill requests, the database object will have its
# prototype patched back to this class each time.
class Game
constructor: ->
@type = "Game"
console.log "Constructed game"
@score = 0
@constructed = @constructed ? 0
@constructed += 1
greeting: ->
"hello!"
setFlag: (value) -> @flag = value
getFlag: -> @flag ? false
saveScore: (s) -> @score = s
getScore: -> @score ? 0
WrapperPrototype =
name: -> "#{@data.first} #{@data.last}"
set: (f, l) ->
@data.first = f
@data.last = l
NameWrapper =
Initialize: ->
console.log "Initialized Name"
first: "<NAME>"
last: "<NAME>"
Prepare: (value) ->
wrapper = Object.create WrapperPrototype
wrapper.data = value
return wrapper
class FrenchGame
constructor: ->
@type = "FrenchGame"
greeting: ->
setFlag: ->
getFlag: ->
saveScore: ->
getScore: ->
| true | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
# When used to type a database variable in Litexa,
# this class will be constructed only when the database
# value is still undefined. Otherwise, on subsequent
# skill requests, the database object will have its
# prototype patched back to this class each time.
class Game
constructor: ->
@type = "Game"
console.log "Constructed game"
@score = 0
@constructed = @constructed ? 0
@constructed += 1
greeting: ->
"hello!"
setFlag: (value) -> @flag = value
getFlag: -> @flag ? false
saveScore: (s) -> @score = s
getScore: -> @score ? 0
WrapperPrototype =
name: -> "#{@data.first} #{@data.last}"
set: (f, l) ->
@data.first = f
@data.last = l
NameWrapper =
Initialize: ->
console.log "Initialized Name"
first: "PI:NAME:<NAME>END_PI"
last: "PI:NAME:<NAME>END_PI"
Prepare: (value) ->
wrapper = Object.create WrapperPrototype
wrapper.data = value
return wrapper
class FrenchGame
constructor: ->
@type = "FrenchGame"
greeting: ->
setFlag: ->
getFlag: ->
saveScore: ->
getScore: ->
|
[
{
"context": "helper\n\nprocess.env.HUBOT_MSTRANSLATE_APIKEY ||= \"0xDEADBEEF\"\n\n# start up a danger room for hubt speak\ndanger ",
"end": 175,
"score": 0.9969437122344971,
"start": 165,
"tag": "KEY",
"value": "0xDEADBEEF"
}
] | test/scripts/speak_test.coffee | neilprosser/hubot-scripts | 2 | Tests = require('../tests')
assert = require 'assert'
helper = Tests.helper()
require('../../src/scripts/speak') helper
process.env.HUBOT_MSTRANSLATE_APIKEY ||= "0xDEADBEEF"
# start up a danger room for hubt speak
danger = Tests.danger helper, (req, res, url) ->
res.writeHead 200
res.end JSON.stringify(
{responseData: {results: [
{unescapedUrl: url.query }
]}}
)
# callbacks for when hubot sends messages
tests = [
(msg) -> assert.equal "", msg
]
# run the async tests
danger.start tests, ->
helper.receive 'hubot speak me Ich bin ein Berliner'
| 138418 | Tests = require('../tests')
assert = require 'assert'
helper = Tests.helper()
require('../../src/scripts/speak') helper
process.env.HUBOT_MSTRANSLATE_APIKEY ||= "<KEY>"
# start up a danger room for hubt speak
danger = Tests.danger helper, (req, res, url) ->
res.writeHead 200
res.end JSON.stringify(
{responseData: {results: [
{unescapedUrl: url.query }
]}}
)
# callbacks for when hubot sends messages
tests = [
(msg) -> assert.equal "", msg
]
# run the async tests
danger.start tests, ->
helper.receive 'hubot speak me Ich bin ein Berliner'
| true | Tests = require('../tests')
assert = require 'assert'
helper = Tests.helper()
require('../../src/scripts/speak') helper
process.env.HUBOT_MSTRANSLATE_APIKEY ||= "PI:KEY:<KEY>END_PI"
# start up a danger room for hubt speak
danger = Tests.danger helper, (req, res, url) ->
res.writeHead 200
res.end JSON.stringify(
{responseData: {results: [
{unescapedUrl: url.query }
]}}
)
# callbacks for when hubot sends messages
tests = [
(msg) -> assert.equal "", msg
]
# run the async tests
danger.start tests, ->
helper.receive 'hubot speak me Ich bin ein Berliner'
|
[
{
"context": "# parsing JavaScript code\n#\n# Copyright (c) 2013 JeongHoon Byun aka \"Outsider\", <http://blog.outsider.ne.kr/>\n# L",
"end": 63,
"score": 0.9998614192008972,
"start": 49,
"tag": "NAME",
"value": "JeongHoon Byun"
},
{
"context": "t code\n#\n# Copyright (c) 2013 JeongHoon Byun aka \"Outsider\", <http://blog.outsider.ne.kr/>\n# Licensed under ",
"end": 77,
"score": 0.9322021007537842,
"start": 69,
"tag": "USERNAME",
"value": "Outsider"
}
] | src/parser/js-parser.coffee | uppalapatisujitha/CodingConventionofCommitHistory | 421 | # parsing JavaScript code
#
# Copyright (c) 2013 JeongHoon Byun aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
helpers = require '../helpers'
_ = require 'underscore'
jsParser = module.exports =
lang: 'js'
parse: (line, convention, commitUrl) ->
convention = this.comma line, convention, commitUrl
convention = this.indent line, convention, commitUrl
convention = this.functiondef line, convention, commitUrl
convention = this.argumentdef line, convention, commitUrl
convention = this.literaldef line, convention, commitUrl
convention = this.conditionstatement line, convention, commitUrl
convention = this.quotes line, convention, commitUrl
comma: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.comma =
title: "Last comma vs. First comma"
column: [
{
key: "first", display: "First comma",
code: """
var foo = 1
, bar = 2
, baz = 3;
var obj = {
foo: 1
, bar: 2
, baz: 3
};
"""
}
{
key: "last", display: "Last comma",
code: """
var foo = 1,
bar = 2,
baz = 3;
var obj = {
foo: 1,
bar: 2,
baz: 3
};
"""
}
]
first: 0
last: 0
commits: []
) unless convention.comma
first = /^\s*,.*/
last = /.*,\s*$/
convention.comma.first = convention.comma.first + 1 if first.test line
convention.comma.last = convention.comma.last + 1 if last.test line
convention.comma.commits.push commitUrl if first.test(line) or last.test(line)
convention.comma.commits = _.uniq convention.comma.commits
convention
indent: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.indent =
title: "Space vs. Tab"
column: [
{
key: "tab", display: "Tab",
code: """
function foo() {
// use tab for indentation
return "bar";
}
"""
}
{
key: "space", display: "Space",
code: """
function foo() {
return "bar";
}
"""
}
]
tab: 0
space: 0
commits: []
) unless convention.indent
tab = /^\t+.*/
space = /^\s+.*/
convention.indent.tab = convention.indent.tab + 1 if tab.test line
convention.indent.space = convention.indent.space + 1 if space.test line
convention.indent.commits.push commitUrl if tab.test(line) or space.test(line)
convention.indent.commits = _.uniq convention.indent.commits
convention
functiondef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.functiondef =
title: "Function followed by one space vs. Function followed by no space"
column: [
{
key: "onespace", display: "One space",
code: """
function foo () {
return "bar";
}
"""
}
{
key: "nospace", display: "No space",
code: """
function foo() {
return "bar";
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.functiondef
onespace = /function(\s+.)*\s+\(/
nospace = /function(\s+.)*\(/
convention.functiondef.onespace = convention.functiondef.onespace + 1 if onespace.test line
convention.functiondef.nospace = convention.functiondef.nospace + 1 if nospace.test line
convention.functiondef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.functiondef.commits = _.uniq convention.functiondef.commits
convention
argumentdef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.argumentdef =
title: "Arguments definition with one space vs. no space"
column: [
{
key: "onespace", display: "One space",
code: """
function fn( arg1, arg2 ) {
// ...
}
if ( true ) {
// ...
}
"""
}
{
key: "nospace", display: "No space",
code: "function fn(arg1, arg2) {\n//or\nif (true) {"
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.argumentdef
onespace = /(function|if|while|switch)(\s+\w*)?\s*\(\s+/
nospace = /(function|if|while|switch)(\s+\w*)?\s*\(\S+/
convention.argumentdef.onespace = convention.argumentdef.onespace + 1 if onespace.test line
convention.argumentdef.nospace = convention.argumentdef.nospace + 1 if nospace.test line
convention.argumentdef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.argumentdef.commits = _.uniq convention.argumentdef.commits
convention
literaldef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.literaldef =
title: "Object Literal Definition types"
column: [
{
key: "tracespace", display: "Followed by space",
code: """
{
foo: 1,
bar: 2,
baz: 3
}
"""
}
{
key: "bothspace", display: "Using space in before/after",
code: """
{
foo : 1,
bar : 2,
baz : 3
}
"""
}
{
key: "nospace", display: "No space",
code: """
{
foo:1,
bar:2,
baz:3
}
"""
}
]
tracespace: 0
bothspace: 0
nospace: 0
commits: []
) unless convention.literaldef
tracespace = /\w:\s+[\w"'\/]/
bothspace = /\w\s+:\s+[\w"'\/]/
nospace = /\w:[\w"'\/]/
convention.literaldef.tracespace = convention.literaldef.tracespace + 1 if tracespace.test line
convention.literaldef.bothspace = convention.literaldef.bothspace + 1 if bothspace.test line
convention.literaldef.nospace = convention.literaldef.nospace + 1 if nospace.test line
convention.literaldef.commits.push commitUrl if tracespace.test(line) or bothspace.test(line) or nospace.test(line)
convention.literaldef.commits = _.uniq convention.literaldef.commits
convention
conditionstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.conditionstatement =
title: "How to write conditional statement"
column: [
{
key: "onespace", display: "Condition with one space",
code: """
if (true) {
//...
}
while (true) {
//...
}
switch (v) {
//...
}
"""
}
{
key: "nospace", display: "Condition with no space",
code: """
if(true) {
//...
}
while(true) {
//...
}
switch(v) {
//...
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.conditionstatement
onespace = /(if|while|switch)\s+\(/
nospace = /(if|while|switch)\(/
convention.conditionstatement.onespace = convention.conditionstatement.onespace + 1 if onespace.test line
convention.conditionstatement.nospace = convention.conditionstatement.nospace + 1 if nospace.test line
convention.conditionstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.conditionstatement.commits = _.uniq convention.conditionstatement.commits
convention
blockstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.blockstatement =
title: "How to write block statement"
column: [
{
key: "onespace", display: "Curlybrace with one space",
code: """
if (true) {
// ...
}
while (true) {
// ...
}
switch (v) {
// ...
}
"""
}
{
key: "nospace", display: "Curlybrace with no space",
code: """
if (true){
// ...
}
while (true){
// ...
}
switch (v){
// ...
}
"""
}
{
key: "newline", display: "Curlybrace at new line",
code: """
if (true)
{
// ...
}
while (true)
{
// ...
}
switch (v)
{
// ...
}
"""
}
]
onespace: 0
nospace: 0
newline: 0
commits: []
) unless convention.blockstatement
onespace = /((if|while|switch).*\)\s+{)|(}\s+else)/
nospace = /((if|while|switch).*\){)|(}else)/
newline = /((if|while|switch).*\)\s*$)|((if|while|switch).*\)\s*\/[\/\*])|(^\s*else)/
convention.blockstatement.onespace = convention.blockstatement.onespace + 1 if onespace.test line
convention.blockstatement.nospace = convention.blockstatement.nospace + 1 if nospace.test line
convention.blockstatement.newline = convention.blockstatement.newline + 1 if newline.test line
convention.blockstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line) or newline.test(line)
convention.blockstatement.commits = _.uniq convention.blockstatement.commits
convention
linelength: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.linelength =
title: "Line length is over 80 characters?"
column: [
{
key: "char80", display: "Line length is within 80 characters.",
code: "/* width is within 80 characters */"
}
{
key: "char120", display: "Line length is within 120 characters",
code: "/* width is within 120 characters */"
}
{
key: "char150", display: "Line length is within 150 characters",
code: "/* width is within 150 characters */"
}
]
char80: 0
char120: 0
char150: 0
commits: []
) unless convention.linelength
width = line.length
tabcount = line.split('\t').length - 1
# assume tab size is 4 space
width += tabcount * 3
if width < 80
convention.linelength.char80 = convention.linelength.char80 + 1
else if width < 120
convention.linelength.char120 = convention.linelength.char120 + 1
else
convention.linelength.char150 = convention.linelength.char150 + 1
convention.linelength.commits.push commitUrl
convention.linelength.commits = _.uniq convention.linelength.commits
convention
quotes: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.quotes =
title: "Single quote vs double quotes"
column: [
{
key: "single", display: "Single quote",
code: """
var foo = 'bar';
var obj = { 'foo': 'bar'};
"""
}
{
key: "double", display: "Double quotes",
code: """
var foo = "bar";
var obj = { "foo": "bar"};
"""
}
]
single: 0
double: 0
commits: []
) unless convention.quotes
placeholder = "CONVENTION-PLACEHOLDER"
single = (line) ->
temp = line.replace /'.*?'/g, placeholder
(///#{placeholder}///.test temp) and (!///"[\w\s<>/=]*#{placeholder}[\w\s<>/=]*"///.test temp) and (!///"///.test temp)
double = (line) ->
temp = line.replace /".*?"/g, placeholder
(///#{placeholder}///.test temp) and (!///'[\w\s<>/=]*#{placeholder}[\w\s<>/=]*'///.test temp) and (!///'///.test temp)
convention.quotes.single = convention.quotes.single + 1 if single line
convention.quotes.double = convention.quotes.double + 1 if double line
convention.quotes.commits.push commitUrl if single(line) or double(line)
convention.quotes.commits = _.uniq convention.quotes.commits
convention
| 192778 | # parsing JavaScript code
#
# Copyright (c) 2013 <NAME> aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
helpers = require '../helpers'
_ = require 'underscore'
jsParser = module.exports =
lang: 'js'
parse: (line, convention, commitUrl) ->
convention = this.comma line, convention, commitUrl
convention = this.indent line, convention, commitUrl
convention = this.functiondef line, convention, commitUrl
convention = this.argumentdef line, convention, commitUrl
convention = this.literaldef line, convention, commitUrl
convention = this.conditionstatement line, convention, commitUrl
convention = this.quotes line, convention, commitUrl
comma: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.comma =
title: "Last comma vs. First comma"
column: [
{
key: "first", display: "First comma",
code: """
var foo = 1
, bar = 2
, baz = 3;
var obj = {
foo: 1
, bar: 2
, baz: 3
};
"""
}
{
key: "last", display: "Last comma",
code: """
var foo = 1,
bar = 2,
baz = 3;
var obj = {
foo: 1,
bar: 2,
baz: 3
};
"""
}
]
first: 0
last: 0
commits: []
) unless convention.comma
first = /^\s*,.*/
last = /.*,\s*$/
convention.comma.first = convention.comma.first + 1 if first.test line
convention.comma.last = convention.comma.last + 1 if last.test line
convention.comma.commits.push commitUrl if first.test(line) or last.test(line)
convention.comma.commits = _.uniq convention.comma.commits
convention
indent: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.indent =
title: "Space vs. Tab"
column: [
{
key: "tab", display: "Tab",
code: """
function foo() {
// use tab for indentation
return "bar";
}
"""
}
{
key: "space", display: "Space",
code: """
function foo() {
return "bar";
}
"""
}
]
tab: 0
space: 0
commits: []
) unless convention.indent
tab = /^\t+.*/
space = /^\s+.*/
convention.indent.tab = convention.indent.tab + 1 if tab.test line
convention.indent.space = convention.indent.space + 1 if space.test line
convention.indent.commits.push commitUrl if tab.test(line) or space.test(line)
convention.indent.commits = _.uniq convention.indent.commits
convention
functiondef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.functiondef =
title: "Function followed by one space vs. Function followed by no space"
column: [
{
key: "onespace", display: "One space",
code: """
function foo () {
return "bar";
}
"""
}
{
key: "nospace", display: "No space",
code: """
function foo() {
return "bar";
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.functiondef
onespace = /function(\s+.)*\s+\(/
nospace = /function(\s+.)*\(/
convention.functiondef.onespace = convention.functiondef.onespace + 1 if onespace.test line
convention.functiondef.nospace = convention.functiondef.nospace + 1 if nospace.test line
convention.functiondef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.functiondef.commits = _.uniq convention.functiondef.commits
convention
argumentdef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.argumentdef =
title: "Arguments definition with one space vs. no space"
column: [
{
key: "onespace", display: "One space",
code: """
function fn( arg1, arg2 ) {
// ...
}
if ( true ) {
// ...
}
"""
}
{
key: "nospace", display: "No space",
code: "function fn(arg1, arg2) {\n//or\nif (true) {"
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.argumentdef
onespace = /(function|if|while|switch)(\s+\w*)?\s*\(\s+/
nospace = /(function|if|while|switch)(\s+\w*)?\s*\(\S+/
convention.argumentdef.onespace = convention.argumentdef.onespace + 1 if onespace.test line
convention.argumentdef.nospace = convention.argumentdef.nospace + 1 if nospace.test line
convention.argumentdef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.argumentdef.commits = _.uniq convention.argumentdef.commits
convention
literaldef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.literaldef =
title: "Object Literal Definition types"
column: [
{
key: "tracespace", display: "Followed by space",
code: """
{
foo: 1,
bar: 2,
baz: 3
}
"""
}
{
key: "bothspace", display: "Using space in before/after",
code: """
{
foo : 1,
bar : 2,
baz : 3
}
"""
}
{
key: "nospace", display: "No space",
code: """
{
foo:1,
bar:2,
baz:3
}
"""
}
]
tracespace: 0
bothspace: 0
nospace: 0
commits: []
) unless convention.literaldef
tracespace = /\w:\s+[\w"'\/]/
bothspace = /\w\s+:\s+[\w"'\/]/
nospace = /\w:[\w"'\/]/
convention.literaldef.tracespace = convention.literaldef.tracespace + 1 if tracespace.test line
convention.literaldef.bothspace = convention.literaldef.bothspace + 1 if bothspace.test line
convention.literaldef.nospace = convention.literaldef.nospace + 1 if nospace.test line
convention.literaldef.commits.push commitUrl if tracespace.test(line) or bothspace.test(line) or nospace.test(line)
convention.literaldef.commits = _.uniq convention.literaldef.commits
convention
conditionstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.conditionstatement =
title: "How to write conditional statement"
column: [
{
key: "onespace", display: "Condition with one space",
code: """
if (true) {
//...
}
while (true) {
//...
}
switch (v) {
//...
}
"""
}
{
key: "nospace", display: "Condition with no space",
code: """
if(true) {
//...
}
while(true) {
//...
}
switch(v) {
//...
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.conditionstatement
onespace = /(if|while|switch)\s+\(/
nospace = /(if|while|switch)\(/
convention.conditionstatement.onespace = convention.conditionstatement.onespace + 1 if onespace.test line
convention.conditionstatement.nospace = convention.conditionstatement.nospace + 1 if nospace.test line
convention.conditionstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.conditionstatement.commits = _.uniq convention.conditionstatement.commits
convention
blockstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.blockstatement =
title: "How to write block statement"
column: [
{
key: "onespace", display: "Curlybrace with one space",
code: """
if (true) {
// ...
}
while (true) {
// ...
}
switch (v) {
// ...
}
"""
}
{
key: "nospace", display: "Curlybrace with no space",
code: """
if (true){
// ...
}
while (true){
// ...
}
switch (v){
// ...
}
"""
}
{
key: "newline", display: "Curlybrace at new line",
code: """
if (true)
{
// ...
}
while (true)
{
// ...
}
switch (v)
{
// ...
}
"""
}
]
onespace: 0
nospace: 0
newline: 0
commits: []
) unless convention.blockstatement
onespace = /((if|while|switch).*\)\s+{)|(}\s+else)/
nospace = /((if|while|switch).*\){)|(}else)/
newline = /((if|while|switch).*\)\s*$)|((if|while|switch).*\)\s*\/[\/\*])|(^\s*else)/
convention.blockstatement.onespace = convention.blockstatement.onespace + 1 if onespace.test line
convention.blockstatement.nospace = convention.blockstatement.nospace + 1 if nospace.test line
convention.blockstatement.newline = convention.blockstatement.newline + 1 if newline.test line
convention.blockstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line) or newline.test(line)
convention.blockstatement.commits = _.uniq convention.blockstatement.commits
convention
linelength: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.linelength =
title: "Line length is over 80 characters?"
column: [
{
key: "char80", display: "Line length is within 80 characters.",
code: "/* width is within 80 characters */"
}
{
key: "char120", display: "Line length is within 120 characters",
code: "/* width is within 120 characters */"
}
{
key: "char150", display: "Line length is within 150 characters",
code: "/* width is within 150 characters */"
}
]
char80: 0
char120: 0
char150: 0
commits: []
) unless convention.linelength
width = line.length
tabcount = line.split('\t').length - 1
# assume tab size is 4 space
width += tabcount * 3
if width < 80
convention.linelength.char80 = convention.linelength.char80 + 1
else if width < 120
convention.linelength.char120 = convention.linelength.char120 + 1
else
convention.linelength.char150 = convention.linelength.char150 + 1
convention.linelength.commits.push commitUrl
convention.linelength.commits = _.uniq convention.linelength.commits
convention
quotes: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.quotes =
title: "Single quote vs double quotes"
column: [
{
key: "single", display: "Single quote",
code: """
var foo = 'bar';
var obj = { 'foo': 'bar'};
"""
}
{
key: "double", display: "Double quotes",
code: """
var foo = "bar";
var obj = { "foo": "bar"};
"""
}
]
single: 0
double: 0
commits: []
) unless convention.quotes
placeholder = "CONVENTION-PLACEHOLDER"
single = (line) ->
temp = line.replace /'.*?'/g, placeholder
(///#{placeholder}///.test temp) and (!///"[\w\s<>/=]*#{placeholder}[\w\s<>/=]*"///.test temp) and (!///"///.test temp)
double = (line) ->
temp = line.replace /".*?"/g, placeholder
(///#{placeholder}///.test temp) and (!///'[\w\s<>/=]*#{placeholder}[\w\s<>/=]*'///.test temp) and (!///'///.test temp)
convention.quotes.single = convention.quotes.single + 1 if single line
convention.quotes.double = convention.quotes.double + 1 if double line
convention.quotes.commits.push commitUrl if single(line) or double(line)
convention.quotes.commits = _.uniq convention.quotes.commits
convention
| true | # parsing JavaScript code
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PI aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
helpers = require '../helpers'
_ = require 'underscore'
jsParser = module.exports =
lang: 'js'
parse: (line, convention, commitUrl) ->
convention = this.comma line, convention, commitUrl
convention = this.indent line, convention, commitUrl
convention = this.functiondef line, convention, commitUrl
convention = this.argumentdef line, convention, commitUrl
convention = this.literaldef line, convention, commitUrl
convention = this.conditionstatement line, convention, commitUrl
convention = this.quotes line, convention, commitUrl
comma: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.comma =
title: "Last comma vs. First comma"
column: [
{
key: "first", display: "First comma",
code: """
var foo = 1
, bar = 2
, baz = 3;
var obj = {
foo: 1
, bar: 2
, baz: 3
};
"""
}
{
key: "last", display: "Last comma",
code: """
var foo = 1,
bar = 2,
baz = 3;
var obj = {
foo: 1,
bar: 2,
baz: 3
};
"""
}
]
first: 0
last: 0
commits: []
) unless convention.comma
first = /^\s*,.*/
last = /.*,\s*$/
convention.comma.first = convention.comma.first + 1 if first.test line
convention.comma.last = convention.comma.last + 1 if last.test line
convention.comma.commits.push commitUrl if first.test(line) or last.test(line)
convention.comma.commits = _.uniq convention.comma.commits
convention
indent: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.indent =
title: "Space vs. Tab"
column: [
{
key: "tab", display: "Tab",
code: """
function foo() {
// use tab for indentation
return "bar";
}
"""
}
{
key: "space", display: "Space",
code: """
function foo() {
return "bar";
}
"""
}
]
tab: 0
space: 0
commits: []
) unless convention.indent
tab = /^\t+.*/
space = /^\s+.*/
convention.indent.tab = convention.indent.tab + 1 if tab.test line
convention.indent.space = convention.indent.space + 1 if space.test line
convention.indent.commits.push commitUrl if tab.test(line) or space.test(line)
convention.indent.commits = _.uniq convention.indent.commits
convention
functiondef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.functiondef =
title: "Function followed by one space vs. Function followed by no space"
column: [
{
key: "onespace", display: "One space",
code: """
function foo () {
return "bar";
}
"""
}
{
key: "nospace", display: "No space",
code: """
function foo() {
return "bar";
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.functiondef
onespace = /function(\s+.)*\s+\(/
nospace = /function(\s+.)*\(/
convention.functiondef.onespace = convention.functiondef.onespace + 1 if onespace.test line
convention.functiondef.nospace = convention.functiondef.nospace + 1 if nospace.test line
convention.functiondef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.functiondef.commits = _.uniq convention.functiondef.commits
convention
argumentdef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.argumentdef =
title: "Arguments definition with one space vs. no space"
column: [
{
key: "onespace", display: "One space",
code: """
function fn( arg1, arg2 ) {
// ...
}
if ( true ) {
// ...
}
"""
}
{
key: "nospace", display: "No space",
code: "function fn(arg1, arg2) {\n//or\nif (true) {"
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.argumentdef
onespace = /(function|if|while|switch)(\s+\w*)?\s*\(\s+/
nospace = /(function|if|while|switch)(\s+\w*)?\s*\(\S+/
convention.argumentdef.onespace = convention.argumentdef.onespace + 1 if onespace.test line
convention.argumentdef.nospace = convention.argumentdef.nospace + 1 if nospace.test line
convention.argumentdef.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.argumentdef.commits = _.uniq convention.argumentdef.commits
convention
literaldef: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.literaldef =
title: "Object Literal Definition types"
column: [
{
key: "tracespace", display: "Followed by space",
code: """
{
foo: 1,
bar: 2,
baz: 3
}
"""
}
{
key: "bothspace", display: "Using space in before/after",
code: """
{
foo : 1,
bar : 2,
baz : 3
}
"""
}
{
key: "nospace", display: "No space",
code: """
{
foo:1,
bar:2,
baz:3
}
"""
}
]
tracespace: 0
bothspace: 0
nospace: 0
commits: []
) unless convention.literaldef
tracespace = /\w:\s+[\w"'\/]/
bothspace = /\w\s+:\s+[\w"'\/]/
nospace = /\w:[\w"'\/]/
convention.literaldef.tracespace = convention.literaldef.tracespace + 1 if tracespace.test line
convention.literaldef.bothspace = convention.literaldef.bothspace + 1 if bothspace.test line
convention.literaldef.nospace = convention.literaldef.nospace + 1 if nospace.test line
convention.literaldef.commits.push commitUrl if tracespace.test(line) or bothspace.test(line) or nospace.test(line)
convention.literaldef.commits = _.uniq convention.literaldef.commits
convention
conditionstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.conditionstatement =
title: "How to write conditional statement"
column: [
{
key: "onespace", display: "Condition with one space",
code: """
if (true) {
//...
}
while (true) {
//...
}
switch (v) {
//...
}
"""
}
{
key: "nospace", display: "Condition with no space",
code: """
if(true) {
//...
}
while(true) {
//...
}
switch(v) {
//...
}
"""
}
]
onespace: 0
nospace: 0
commits: []
) unless convention.conditionstatement
onespace = /(if|while|switch)\s+\(/
nospace = /(if|while|switch)\(/
convention.conditionstatement.onespace = convention.conditionstatement.onespace + 1 if onespace.test line
convention.conditionstatement.nospace = convention.conditionstatement.nospace + 1 if nospace.test line
convention.conditionstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line)
convention.conditionstatement.commits = _.uniq convention.conditionstatement.commits
convention
blockstatement: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.blockstatement =
title: "How to write block statement"
column: [
{
key: "onespace", display: "Curlybrace with one space",
code: """
if (true) {
// ...
}
while (true) {
// ...
}
switch (v) {
// ...
}
"""
}
{
key: "nospace", display: "Curlybrace with no space",
code: """
if (true){
// ...
}
while (true){
// ...
}
switch (v){
// ...
}
"""
}
{
key: "newline", display: "Curlybrace at new line",
code: """
if (true)
{
// ...
}
while (true)
{
// ...
}
switch (v)
{
// ...
}
"""
}
]
onespace: 0
nospace: 0
newline: 0
commits: []
) unless convention.blockstatement
onespace = /((if|while|switch).*\)\s+{)|(}\s+else)/
nospace = /((if|while|switch).*\){)|(}else)/
newline = /((if|while|switch).*\)\s*$)|((if|while|switch).*\)\s*\/[\/\*])|(^\s*else)/
convention.blockstatement.onespace = convention.blockstatement.onespace + 1 if onespace.test line
convention.blockstatement.nospace = convention.blockstatement.nospace + 1 if nospace.test line
convention.blockstatement.newline = convention.blockstatement.newline + 1 if newline.test line
convention.blockstatement.commits.push commitUrl if onespace.test(line) or nospace.test(line) or newline.test(line)
convention.blockstatement.commits = _.uniq convention.blockstatement.commits
convention
linelength: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.linelength =
title: "Line length is over 80 characters?"
column: [
{
key: "char80", display: "Line length is within 80 characters.",
code: "/* width is within 80 characters */"
}
{
key: "char120", display: "Line length is within 120 characters",
code: "/* width is within 120 characters */"
}
{
key: "char150", display: "Line length is within 150 characters",
code: "/* width is within 150 characters */"
}
]
char80: 0
char120: 0
char150: 0
commits: []
) unless convention.linelength
width = line.length
tabcount = line.split('\t').length - 1
# assume tab size is 4 space
width += tabcount * 3
if width < 80
convention.linelength.char80 = convention.linelength.char80 + 1
else if width < 120
convention.linelength.char120 = convention.linelength.char120 + 1
else
convention.linelength.char150 = convention.linelength.char150 + 1
convention.linelength.commits.push commitUrl
convention.linelength.commits = _.uniq convention.linelength.commits
convention
quotes: (line, convention, commitUrl) ->
convention = {lang: this.lang} unless convention
(convention.quotes =
title: "Single quote vs double quotes"
column: [
{
key: "single", display: "Single quote",
code: """
var foo = 'bar';
var obj = { 'foo': 'bar'};
"""
}
{
key: "double", display: "Double quotes",
code: """
var foo = "bar";
var obj = { "foo": "bar"};
"""
}
]
single: 0
double: 0
commits: []
) unless convention.quotes
placeholder = "CONVENTION-PLACEHOLDER"
single = (line) ->
temp = line.replace /'.*?'/g, placeholder
(///#{placeholder}///.test temp) and (!///"[\w\s<>/=]*#{placeholder}[\w\s<>/=]*"///.test temp) and (!///"///.test temp)
double = (line) ->
temp = line.replace /".*?"/g, placeholder
(///#{placeholder}///.test temp) and (!///'[\w\s<>/=]*#{placeholder}[\w\s<>/=]*'///.test temp) and (!///'///.test temp)
convention.quotes.single = convention.quotes.single + 1 if single line
convention.quotes.double = convention.quotes.double + 1 if double line
convention.quotes.commits.push commitUrl if single(line) or double(line)
convention.quotes.commits = _.uniq convention.quotes.commits
convention
|
[
{
"context": "###\n * fs-filesysteminfo\n * https://github.com/cookch10/node-fs-filesysteminfo\n *\n * Copyright (c) 2016 ",
"end": 57,
"score": 0.9996516704559326,
"start": 49,
"tag": "USERNAME",
"value": "cookch10"
},
{
"context": "0/node-fs-filesysteminfo\n *\n * Copyright (c) 2016 Christopher M. Cook\n * Licensed under the MIT license.\n###\n\n_root = ",
"end": 126,
"score": 0.9998747110366821,
"start": 107,
"tag": "NAME",
"value": "Christopher M. Cook"
},
{
"context": "ue\n \n _otherUsers = {name: 'otherUsers', value: fnGetPermissionObj()}\n Object",
"end": 4703,
"score": 0.886655330657959,
"start": 4693,
"tag": "USERNAME",
"value": "otherUsers"
},
{
"context": "ows OS permissions issues, see https://github.com/joyent/node/issues/4812 and https://github.com/joyent/no",
"end": 12369,
"score": 0.9993802905082703,
"start": 12363,
"tag": "USERNAME",
"value": "joyent"
},
{
"context": "om/joyent/node/issues/4812 and https://github.com/joyent/node/issues/6381\n \n ",
"end": 12416,
"score": 0.9994075298309326,
"start": 12410,
"tag": "USERNAME",
"value": "joyent"
},
{
"context": "tory contents.\n _fs.rmdirSync(@fullName)\n else\n _fs.rmdirSync(@",
"end": 22200,
"score": 0.8992923498153687,
"start": 22192,
"tag": "USERNAME",
"value": "fullName"
}
] | src/fs-filesysteminfo.coffee | cookch10/node-fs-filesysteminfo | 4 | ###
* fs-filesysteminfo
* https://github.com/cookch10/node-fs-filesysteminfo
*
* Copyright (c) 2016 Christopher M. Cook
* Licensed under the MIT license.
###
_root = exports ? this
_path = require('path')
_fs = require('fs')
_exec = require('child_process').exec
_child = null
oldmask = undefined
newmask = 0
oldmask = process.umask(newmask)
thisNamespaceObjectContainer = {}
#region ************* internal lib: extension methods ********************
unless String::equals
String::equals = (str, ignoreCase) ->
ignoreCase = if typeof ignoreCase is 'boolean' then ignoreCase else false
`typeof str === 'string' ? (ignoreCase ? this.toLowerCase() === str.toLowerCase() : this === str) : false`
#endregion
#region ************* internal lib: utility methods ********************
isObject = (obj) ->
typeof obj is 'object'
isFunction = (obj) ->
typeof obj is 'function'
isBoolean = (obj) ->
typeof obj is 'boolean'
isString = (obj) ->
typeof obj is 'string'
isNullOrUndefined = (obj) ->
typeof obj is 'undefined' or obj is null
toIntegerFromOctalRepresenation = (obj) ->
obj ?= ''
integerFromOctal = parseInt(obj, 8)
if isNaN(integerFromOctal) then null else integerFromOctal
toOctalStringFromIntegerRepresenation = (obj) ->
obj ?= ''
octalStringFromInteger = '0' + (obj & parseInt('07777', 8)).toString(8)
if octalStringFromInteger is '00' then '' else octalStringFromInteger
namespace = (target, name, block) ->
objectNamespaceContainer = (if arguments.length is 4 then Array::slice.call(arguments).shift() else null)
[target, name, block] = [(if typeof exports isnt 'undefined' then exports else _root), arguments...] if arguments.length < 3
top = target
target = target[item] or= {} for item in name.split '.'
block target, top
for own key, value of target
value::___typeName___ = key
#endregion
namespace thisNamespaceObjectContainer, 'Util.System.IO', (exports) ->
DEFAULT_FILESYSTEM_CREATION_MODE = '777'
class exports.Base
getType: -> @___typeName___ or @constructor.name
toString: ->
@getType().toString()
class FileSystemPermissions extends exports.Base
constructor: (@octalFileSystemModeString = '') ->
CONST_LETTERFORMAT_ARR = ['---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx']
CONST_BINARYFORMAT_ARR = ['000', '001', '010', '011', '100', '101', '110', '111']
fnGetPermissionObj = (intPermissionString) ->
intPermission = parseInt(intPermissionString) or 0
defaultObj = { canRead: false, canWrite: false, canExecute: false, letterFormat: CONST_LETTERFORMAT_ARR[intPermission], binaryFormat: CONST_BINARYFORMAT_ARR[intPermission] }
if intPermission
switch intPermission
when 7
defaultObj.canRead = true
defaultObj.canWrite = true
defaultObj.canExecute = true
when 6
defaultObj.canRead = true
defaultObj.canWrite = true
when 5
defaultObj.canRead = true
defaultObj.canExecute = true
when 4
defaultObj.canRead = true
when 3
defaultObj.canWrite = true
defaultObj.canExecute = true
when 2
defaultObj.canWrite = true
when 1
defaultObj.canExecute = true
else
Object.freeze(defaultObj)
_owner = {name: 'owner', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _owner.name + '',
get: ->
_owner.value
configurable: true
enumerable: true
@owner = _owner.value
_group = {name: 'group', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _group.name + '',
get: ->
_group.value
configurable: true
enumerable: true
@group = _group.value
_otherUsers = {name: 'otherUsers', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _otherUsers.name + '',
get: ->
_otherUsers.value
configurable: true
enumerable: true
@otherUsers = _otherUsers.value
updateRolePermissions = (octalString) ->
octalString = if octalString then octalString.toString() else '000'
octalString = if octalString.length is 4 then octalString.substring(1) else octalString
octalArr = octalString.split('')
for octal, i in octalArr
role = if i is 0 then _owner else if i is 1 then _group else _otherUsers
permission = fnGetPermissionObj(octal)
role.value = permission
octalString
_octalFileSystemModeString = {name: 'octalFileSystemModeString', value: @octalFileSystemModeString}
Object.defineProperty @, '' + _octalFileSystemModeString.name + '',
get: ->
_octalFileSystemModeString.value
set: (value) ->
_value = if value and isString(value) and /^(?:0|0?[0-7]{3})$/g.test(value) then value else ''
updateRolePermissions(_value)
_octalFileSystemModeString.value = value
configurable: true
enumerable: true
@octalFileSystemModeString = _octalFileSystemModeString.value
toRoleLetterRepresentationString: ->
@owner.letterFormat + @group.letterFormat + @otherUsers.letterFormat
toRoleBinaryRepresentationString: ->
@owner.binaryFormat + @group.binaryFormat + @otherUsers.binaryFormat
class exports.FileSystemInfo extends exports.Base
constructor: (@originalPath) ->
_initialized = false
_emptyStatusObj = (->
#If the path used to construct the FileSystemInfo object does not exist, all time-related status attributes will be set to 12:00 midnight, January 1, 1601 A.D. (C.E.) Coordinated Universal Time (UTC), adjusted to local time.
DEFAULT_DATETIME_ATTRIBUTE_VALUE = new Date(-11644412400000)
if not _initialized
emptyStatsObj = {}
try
emptyStatsObj = _fs.statSync(process.cwd())
catch ex
emptyStatsObj = {}
finally
for k of emptyStatsObj
kValue = if (/time$/i).test(k) then DEFAULT_DATETIME_ATTRIBUTE_VALUE else 0
emptyStatsObj[k] = kValue
emptyStatsObj
)()
_originalPath = {name: 'originalPath', value: @originalPath}
Object.defineProperty @, '' + _originalPath.name + '',
writable: false #When the writable property attribute is set to false, the property is said to be 'non-writable'. It cannot be reassigned.
value: `_originalPath.value.length === 2 && _originalPath.value.charAt(1) === ':' ? '.' : _originalPath.value`
configurable: false #true if and only if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. Defaults to false.
enumerable: true #true if and only if this property shows up during enumeration of the properties on the corresponding object. Defaults to false.
@originalPath = _originalPath.value
_fullName = {name: 'fullName', value: _path.resolve(@originalPath)}
Object.defineProperty @, '' + _fullName.name + '',
writable: false
value: _fullName.value
configurable: false
enumerable: true
@fullName = _fullName.value
_name = {name: 'name', value: _path.basename(@originalPath)}
Object.defineProperty @, '' + _name.name + '',
writable: false
value: _name.value
configurable: false
enumerable: true
@name = _name.value
_extension = {name: 'extension', value: _path.extname(@originalPath)}
Object.defineProperty @, '' + _extension.name + '',
writable: false
value: _extension.value
configurable: false
enumerable: true
@extension = _extension.value
_flags = {name: 'flags', value: {
isDirectory: false,
isFile: false,
isBlockDevice: false,
isCharacterDevice: false,
isFIFO: false,
isSocket: false,
isSymbolicLink: false
}}
Object.defineProperty @, '' + _flags.name + '',
writable: false
value: _flags.value
configurable: false
enumerable: true
@flags = _flags.value
_fileSystemPermissions = {name: 'fileSystemPermissions', value: new FileSystemPermissions()}
Object.defineProperty @, '' + _fileSystemPermissions.name + '',
get: ->
_fileSystemPermissions.value
configurable: true
enumerable: true
@fileSystemPermissions = _fileSystemPermissions.value
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_status = {name: 'status', value: null}
Object.defineProperty @, '' + _status.name + '',
get: ->
_status.value
set: (value) ->
value ?= _emptyStatusObj
_status.value = value
configurable: true
enumerable: true
@status = _status.value
init = (->
@refresh(_exists) if not _initialized
_initialized = true if not _initialized
).apply(@)
refresh: (i) =>
if i?.value?
exists = _fs.existsSync(@fullName)
if exists
i.value = true
else
i.value = false
try
parentPath = _path.resolve("#{@fullName}", '..')
if not parentPath.equals(@fullName, true)
parentContentsArr = _fs.readdirSync(parentPath)
i.value = true if parentContentsArr.indexOf(@name) >= 0
catch ex0
try
status = @status = _fs.statSync(@fullName)
isWin = process.platform is 'win32'
isFile = status.isFile()
mode = status.mode
if isWin then mode = mode | ((mode & 146) >> 1) # workaround for Windows OS permissions issues, see https://github.com/joyent/node/issues/4812 and https://github.com/joyent/node/issues/6381
fnContinue = ((error, stdout, stderr) ->
fsPermissions = @fileSystemPermissions
flags = @flags
status = @status
parent = @parent
parentFSPermissions = parent?.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = if parentFSPermissions then parentFSPermissions.octalFileSystemModeString else ''
if arguments.length is 3
fsPermissions.octalFileSystemModeString = '0' + stdout.trim() unless error
else
fsPermissions.octalFileSystemModeString = toOctalStringFromIntegerRepresenation(error)
for k of flags
kAttrib = status[k]
flags[k] = kAttrib.apply(status) if isFunction(kAttrib)
if parent and not parentFSPermissionsOctalFileSystemModeString
parent.refresh()
@
).bind(@)
if !mode
_child = _exec('stat -c "%a" ' + @parent.fullName, fnContinue)
else
fnContinue(mode)
catch ex1
@status = null
return
class exports.DirectoryInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (mode..., cb) =>
if not @parent.exists then @parent.create(mode, cb)
if not @exists
_fs.mkdir @fullName, toIntegerFromOctalRepresenation(mode), ((error) ->
if isNullOrUndefined(error) then @refresh()
return cb.call(@, error, @) if isFunction(cb)
).bind(@)
else
return cb.call(@, null, @) if isFunction(cb)
return
createSync: (mode) =>
if not @parent.exists then @parent.createSync(mode)
if not @exists
success = true
try
_fs.mkdirSync(@fullName, toIntegerFromOctalRepresenation(mode))
catch ex
success = false
finally
if success
@refresh()
else
throw ex if ex
return @
createSubdirectory: (path, mode..., cb) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
mode = if Array.isArray then mode[0] else mode
subdirectory = new DirectoryInfo(path)
if not subdirectory.exists
subdirectory.create mode, ((context, error, result) ->
if context is result
return cb.call(this, error, result) if isFunction(cb)
else
return result
).bind(@, subdirectory)
else
cb.call(@, null, subdirectory) if isFunction(cb)
return
createSubdirectorySync: (path, mode) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
subdirectory = new DirectoryInfo(path)
subdirectory.createSync(mode) if not subdirectory.exists
subdirectory
'delete': (recursive = false, cb) =>
if arguments.length is 1
rArg = Array::slice.call(arguments).slice(-1).shift()
if isFunction(rArg)
recursive = false
cb = rArg
recursive = if isBoolean(recursive) then recursive else false
fnFilter = if recursive then null else -> false
self = @
fnIterator = ((dir, done) ->
context = @
results = []
context.enumerateFileSystemInfos({ fnFilter: fnFilter, recursive: recursive }, (error1, list) ->
return done.call(self, error1) if error1
###
* NOTE:
* A big assumption is being made here in that we assume directories will always appear ahead of files in the array from enumerateFileSystemInfos().
* To prevent exceptions from being thrown by attempting to delete a non-empty directory, we are going to reverse() the array before continuing.
* This means all files will be deleted ahead of their respective parent directories.
* This also means that all subdirectories will be deleted ahead of their parent directories.
*
* **If this assumption proves false (possibly for other operating systems), this method logic can be revisited.
*
* **Possible alternative logic (just putting here for note purposes)
* -> One alternative (which I am not sure I like) would be to perform an array.sort(), which might be more expensive resource-wise.
* -> Another alternative would be to queue directories during iteration to be deleted after all files have been deleted.
* -> Something better than the previous two.
###
list.reverse().push(self)
i = 0
(next = ->
fsinfo = list[i++]
return done.call(self, null) unless fsinfo
_fs.chmod fsinfo.fullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
return done.call(self, error2) if error2
ftype = @getType()
if ftype is 'FileSystemInfo' then @refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsMethod = _fs[if ftype is 'FileInfo' then 'unlink' else 'rmdir']
fsMethod.call @, @fullName, ((error3) ->
return done.call(self, error3) if error3
return next.call(@)
)
else
return done('Unhandled exception for delete of ambiguous ' + ftype) # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
).bind(fsinfo)
).call(context)
return
)
return
)
fnIterator.call(@, @fullName, cb)
deleteSync: (recursive) =>
recursive = if isBoolean(recursive) then recursive else false
_fs.chmodSync(@fullName, toIntegerFromOctalRepresenation('777'))
if recursive
children = @enumerateFileSystemInfosSync({ fnFilter: null, recursive: false })
if (children.length is 0)
_fs.rmdirSync(@fullName)
else
children.forEach (fsinfo) ->
ftype = fsinfo.getType()
if ftype is 'FileSystemInfo' then fsinfo.refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsinfo.deleteSync(recursive)
else
throw 'Unhandled exception for deleteSync of ambiguous ' + ftype # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
_fs.rmdirSync(@fullName)
else
_fs.rmdirSync(@fullName) # this will (and should) throw an exception if the directory is not empty. To delete a non-empty directory, set recursive equal to true.
return
enumerateFileSystemInfos: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
fileSystemInfosArr = []
self = @
fnIterator = ((dir, done) ->
context = @
_fs.readdir dir, ((error, list) ->
return done.call(self, error) if error
i = 0
(next = ->
fsname = list[i++]
return done.call(self, null, fileSystemInfosArr) unless fsname
if fnFilter(fsname)
path = _path.join(context.fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
isDirectory = fileSystemInfoObj.flags.isDirectory
isFile = fileSystemInfoObj.flags.isFile
if (isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
if (recursive and isDirectory)
fnIterator.call fileSystemInfoObj, fileSystemInfoObj.fullName, (error, results) ->
next.call(fileSystemInfoObj)
return
else
next.call(context)
else
next.call(context)
).call(context)
return
).bind(context)
return
)
fnIterator.call(@, @fullName, cb)
enumerateFileSystemInfosSync: (opts = {}) =>
throw 'Path does not exist and hence cannot be enumerated' if not @exists
throw 'Invalid opts argument' if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
rArg = Array::slice.call(arguments).slice(-1).shift()
resultsArr = if Array.isArray(rArg) then rArg else []
fileSystemInfosArr = []
_fileSystemInfosArr = _fs.readdirSync(@fullName)
_fileSystemInfosArr.forEach ((fsname) =>
if fnFilter(fsname)
path = _path.join(@fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
if (fileSystemInfoObj.flags.isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (fileSystemInfoObj.flags.isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
)
if recursive
fileSystemInfosArr.forEach (fsinfo) ->
if fsinfo.flags.isDirectory
resultsArr = fsinfo.enumerateFileSystemInfosSync(opts, resultsArr)
return
fileSystemInfosArr = fileSystemInfosArr.concat(resultsArr)
fileSystemInfosArr
class exports.FileInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new exports.DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
fnContinue = ((error) ->
_fs.writeFile @fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }, ((error) -> # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
if isNullOrUndefined(error) then @refresh()
if ensureCreateParent
_fs.chmod @parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE), ((error) ->
@parent.refresh()
return cb.call(@, error) if isFunction(cb)
).bind(@)
else
return cb.call(@, error) if isFunction(cb)
).bind(@)
).bind(@)
if ensure and not @parent.exists
ensureCreateParent = true
@parent.create('777', cb, fnContinue)
else
fnContinue()
return
createSync: (opts = {}) =>
throw 'Invalid opts argument' if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
success = true
if ensure and not @parent.exists
ensureCreateParent = true
@parent.createSync('777')
try
_fs.writeFileSync(@fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }) # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
catch ex
success = false
finally
if success
@refresh()
if ensureCreateParent
_fs.chmodSync(@parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE))
@parent.refresh()
else
throw ex if ex
return @
'delete': (cb) =>
cb = if arguments.length > 1 then arguments[arguments.length - 1] else cb
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmod parentFullName, toIntegerFromOctalRepresenation('777'), ((error1) ->
_fs.chmod thisFullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
_fs.unlink thisFullName, ((error3) ->
_fs.chmod parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString), ((error4) ->
return cb.call(@, error4) if isFunction(cb)
).bind(@)
).bind(@)
).bind(@)
).bind(@)
deleteSync: () =>
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation('777'))
_fs.chmodSync(thisFullName, toIntegerFromOctalRepresenation('777'))
_fs.unlinkSync(thisFullName)
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString))
return
return _root[k] = v for k, v of thisNamespaceObjectContainer
| 130868 | ###
* fs-filesysteminfo
* https://github.com/cookch10/node-fs-filesysteminfo
*
* Copyright (c) 2016 <NAME>
* Licensed under the MIT license.
###
_root = exports ? this
_path = require('path')
_fs = require('fs')
_exec = require('child_process').exec
_child = null
oldmask = undefined
newmask = 0
oldmask = process.umask(newmask)
thisNamespaceObjectContainer = {}
#region ************* internal lib: extension methods ********************
unless String::equals
String::equals = (str, ignoreCase) ->
ignoreCase = if typeof ignoreCase is 'boolean' then ignoreCase else false
`typeof str === 'string' ? (ignoreCase ? this.toLowerCase() === str.toLowerCase() : this === str) : false`
#endregion
#region ************* internal lib: utility methods ********************
isObject = (obj) ->
typeof obj is 'object'
isFunction = (obj) ->
typeof obj is 'function'
isBoolean = (obj) ->
typeof obj is 'boolean'
isString = (obj) ->
typeof obj is 'string'
isNullOrUndefined = (obj) ->
typeof obj is 'undefined' or obj is null
toIntegerFromOctalRepresenation = (obj) ->
obj ?= ''
integerFromOctal = parseInt(obj, 8)
if isNaN(integerFromOctal) then null else integerFromOctal
toOctalStringFromIntegerRepresenation = (obj) ->
obj ?= ''
octalStringFromInteger = '0' + (obj & parseInt('07777', 8)).toString(8)
if octalStringFromInteger is '00' then '' else octalStringFromInteger
namespace = (target, name, block) ->
objectNamespaceContainer = (if arguments.length is 4 then Array::slice.call(arguments).shift() else null)
[target, name, block] = [(if typeof exports isnt 'undefined' then exports else _root), arguments...] if arguments.length < 3
top = target
target = target[item] or= {} for item in name.split '.'
block target, top
for own key, value of target
value::___typeName___ = key
#endregion
namespace thisNamespaceObjectContainer, 'Util.System.IO', (exports) ->
DEFAULT_FILESYSTEM_CREATION_MODE = '777'
class exports.Base
getType: -> @___typeName___ or @constructor.name
toString: ->
@getType().toString()
class FileSystemPermissions extends exports.Base
constructor: (@octalFileSystemModeString = '') ->
CONST_LETTERFORMAT_ARR = ['---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx']
CONST_BINARYFORMAT_ARR = ['000', '001', '010', '011', '100', '101', '110', '111']
fnGetPermissionObj = (intPermissionString) ->
intPermission = parseInt(intPermissionString) or 0
defaultObj = { canRead: false, canWrite: false, canExecute: false, letterFormat: CONST_LETTERFORMAT_ARR[intPermission], binaryFormat: CONST_BINARYFORMAT_ARR[intPermission] }
if intPermission
switch intPermission
when 7
defaultObj.canRead = true
defaultObj.canWrite = true
defaultObj.canExecute = true
when 6
defaultObj.canRead = true
defaultObj.canWrite = true
when 5
defaultObj.canRead = true
defaultObj.canExecute = true
when 4
defaultObj.canRead = true
when 3
defaultObj.canWrite = true
defaultObj.canExecute = true
when 2
defaultObj.canWrite = true
when 1
defaultObj.canExecute = true
else
Object.freeze(defaultObj)
_owner = {name: 'owner', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _owner.name + '',
get: ->
_owner.value
configurable: true
enumerable: true
@owner = _owner.value
_group = {name: 'group', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _group.name + '',
get: ->
_group.value
configurable: true
enumerable: true
@group = _group.value
_otherUsers = {name: 'otherUsers', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _otherUsers.name + '',
get: ->
_otherUsers.value
configurable: true
enumerable: true
@otherUsers = _otherUsers.value
updateRolePermissions = (octalString) ->
octalString = if octalString then octalString.toString() else '000'
octalString = if octalString.length is 4 then octalString.substring(1) else octalString
octalArr = octalString.split('')
for octal, i in octalArr
role = if i is 0 then _owner else if i is 1 then _group else _otherUsers
permission = fnGetPermissionObj(octal)
role.value = permission
octalString
_octalFileSystemModeString = {name: 'octalFileSystemModeString', value: @octalFileSystemModeString}
Object.defineProperty @, '' + _octalFileSystemModeString.name + '',
get: ->
_octalFileSystemModeString.value
set: (value) ->
_value = if value and isString(value) and /^(?:0|0?[0-7]{3})$/g.test(value) then value else ''
updateRolePermissions(_value)
_octalFileSystemModeString.value = value
configurable: true
enumerable: true
@octalFileSystemModeString = _octalFileSystemModeString.value
toRoleLetterRepresentationString: ->
@owner.letterFormat + @group.letterFormat + @otherUsers.letterFormat
toRoleBinaryRepresentationString: ->
@owner.binaryFormat + @group.binaryFormat + @otherUsers.binaryFormat
class exports.FileSystemInfo extends exports.Base
constructor: (@originalPath) ->
_initialized = false
_emptyStatusObj = (->
#If the path used to construct the FileSystemInfo object does not exist, all time-related status attributes will be set to 12:00 midnight, January 1, 1601 A.D. (C.E.) Coordinated Universal Time (UTC), adjusted to local time.
DEFAULT_DATETIME_ATTRIBUTE_VALUE = new Date(-11644412400000)
if not _initialized
emptyStatsObj = {}
try
emptyStatsObj = _fs.statSync(process.cwd())
catch ex
emptyStatsObj = {}
finally
for k of emptyStatsObj
kValue = if (/time$/i).test(k) then DEFAULT_DATETIME_ATTRIBUTE_VALUE else 0
emptyStatsObj[k] = kValue
emptyStatsObj
)()
_originalPath = {name: 'originalPath', value: @originalPath}
Object.defineProperty @, '' + _originalPath.name + '',
writable: false #When the writable property attribute is set to false, the property is said to be 'non-writable'. It cannot be reassigned.
value: `_originalPath.value.length === 2 && _originalPath.value.charAt(1) === ':' ? '.' : _originalPath.value`
configurable: false #true if and only if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. Defaults to false.
enumerable: true #true if and only if this property shows up during enumeration of the properties on the corresponding object. Defaults to false.
@originalPath = _originalPath.value
_fullName = {name: 'fullName', value: _path.resolve(@originalPath)}
Object.defineProperty @, '' + _fullName.name + '',
writable: false
value: _fullName.value
configurable: false
enumerable: true
@fullName = _fullName.value
_name = {name: 'name', value: _path.basename(@originalPath)}
Object.defineProperty @, '' + _name.name + '',
writable: false
value: _name.value
configurable: false
enumerable: true
@name = _name.value
_extension = {name: 'extension', value: _path.extname(@originalPath)}
Object.defineProperty @, '' + _extension.name + '',
writable: false
value: _extension.value
configurable: false
enumerable: true
@extension = _extension.value
_flags = {name: 'flags', value: {
isDirectory: false,
isFile: false,
isBlockDevice: false,
isCharacterDevice: false,
isFIFO: false,
isSocket: false,
isSymbolicLink: false
}}
Object.defineProperty @, '' + _flags.name + '',
writable: false
value: _flags.value
configurable: false
enumerable: true
@flags = _flags.value
_fileSystemPermissions = {name: 'fileSystemPermissions', value: new FileSystemPermissions()}
Object.defineProperty @, '' + _fileSystemPermissions.name + '',
get: ->
_fileSystemPermissions.value
configurable: true
enumerable: true
@fileSystemPermissions = _fileSystemPermissions.value
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_status = {name: 'status', value: null}
Object.defineProperty @, '' + _status.name + '',
get: ->
_status.value
set: (value) ->
value ?= _emptyStatusObj
_status.value = value
configurable: true
enumerable: true
@status = _status.value
init = (->
@refresh(_exists) if not _initialized
_initialized = true if not _initialized
).apply(@)
refresh: (i) =>
if i?.value?
exists = _fs.existsSync(@fullName)
if exists
i.value = true
else
i.value = false
try
parentPath = _path.resolve("#{@fullName}", '..')
if not parentPath.equals(@fullName, true)
parentContentsArr = _fs.readdirSync(parentPath)
i.value = true if parentContentsArr.indexOf(@name) >= 0
catch ex0
try
status = @status = _fs.statSync(@fullName)
isWin = process.platform is 'win32'
isFile = status.isFile()
mode = status.mode
if isWin then mode = mode | ((mode & 146) >> 1) # workaround for Windows OS permissions issues, see https://github.com/joyent/node/issues/4812 and https://github.com/joyent/node/issues/6381
fnContinue = ((error, stdout, stderr) ->
fsPermissions = @fileSystemPermissions
flags = @flags
status = @status
parent = @parent
parentFSPermissions = parent?.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = if parentFSPermissions then parentFSPermissions.octalFileSystemModeString else ''
if arguments.length is 3
fsPermissions.octalFileSystemModeString = '0' + stdout.trim() unless error
else
fsPermissions.octalFileSystemModeString = toOctalStringFromIntegerRepresenation(error)
for k of flags
kAttrib = status[k]
flags[k] = kAttrib.apply(status) if isFunction(kAttrib)
if parent and not parentFSPermissionsOctalFileSystemModeString
parent.refresh()
@
).bind(@)
if !mode
_child = _exec('stat -c "%a" ' + @parent.fullName, fnContinue)
else
fnContinue(mode)
catch ex1
@status = null
return
class exports.DirectoryInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (mode..., cb) =>
if not @parent.exists then @parent.create(mode, cb)
if not @exists
_fs.mkdir @fullName, toIntegerFromOctalRepresenation(mode), ((error) ->
if isNullOrUndefined(error) then @refresh()
return cb.call(@, error, @) if isFunction(cb)
).bind(@)
else
return cb.call(@, null, @) if isFunction(cb)
return
createSync: (mode) =>
if not @parent.exists then @parent.createSync(mode)
if not @exists
success = true
try
_fs.mkdirSync(@fullName, toIntegerFromOctalRepresenation(mode))
catch ex
success = false
finally
if success
@refresh()
else
throw ex if ex
return @
createSubdirectory: (path, mode..., cb) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
mode = if Array.isArray then mode[0] else mode
subdirectory = new DirectoryInfo(path)
if not subdirectory.exists
subdirectory.create mode, ((context, error, result) ->
if context is result
return cb.call(this, error, result) if isFunction(cb)
else
return result
).bind(@, subdirectory)
else
cb.call(@, null, subdirectory) if isFunction(cb)
return
createSubdirectorySync: (path, mode) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
subdirectory = new DirectoryInfo(path)
subdirectory.createSync(mode) if not subdirectory.exists
subdirectory
'delete': (recursive = false, cb) =>
if arguments.length is 1
rArg = Array::slice.call(arguments).slice(-1).shift()
if isFunction(rArg)
recursive = false
cb = rArg
recursive = if isBoolean(recursive) then recursive else false
fnFilter = if recursive then null else -> false
self = @
fnIterator = ((dir, done) ->
context = @
results = []
context.enumerateFileSystemInfos({ fnFilter: fnFilter, recursive: recursive }, (error1, list) ->
return done.call(self, error1) if error1
###
* NOTE:
* A big assumption is being made here in that we assume directories will always appear ahead of files in the array from enumerateFileSystemInfos().
* To prevent exceptions from being thrown by attempting to delete a non-empty directory, we are going to reverse() the array before continuing.
* This means all files will be deleted ahead of their respective parent directories.
* This also means that all subdirectories will be deleted ahead of their parent directories.
*
* **If this assumption proves false (possibly for other operating systems), this method logic can be revisited.
*
* **Possible alternative logic (just putting here for note purposes)
* -> One alternative (which I am not sure I like) would be to perform an array.sort(), which might be more expensive resource-wise.
* -> Another alternative would be to queue directories during iteration to be deleted after all files have been deleted.
* -> Something better than the previous two.
###
list.reverse().push(self)
i = 0
(next = ->
fsinfo = list[i++]
return done.call(self, null) unless fsinfo
_fs.chmod fsinfo.fullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
return done.call(self, error2) if error2
ftype = @getType()
if ftype is 'FileSystemInfo' then @refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsMethod = _fs[if ftype is 'FileInfo' then 'unlink' else 'rmdir']
fsMethod.call @, @fullName, ((error3) ->
return done.call(self, error3) if error3
return next.call(@)
)
else
return done('Unhandled exception for delete of ambiguous ' + ftype) # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
).bind(fsinfo)
).call(context)
return
)
return
)
fnIterator.call(@, @fullName, cb)
deleteSync: (recursive) =>
recursive = if isBoolean(recursive) then recursive else false
_fs.chmodSync(@fullName, toIntegerFromOctalRepresenation('777'))
if recursive
children = @enumerateFileSystemInfosSync({ fnFilter: null, recursive: false })
if (children.length is 0)
_fs.rmdirSync(@fullName)
else
children.forEach (fsinfo) ->
ftype = fsinfo.getType()
if ftype is 'FileSystemInfo' then fsinfo.refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsinfo.deleteSync(recursive)
else
throw 'Unhandled exception for deleteSync of ambiguous ' + ftype # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
_fs.rmdirSync(@fullName)
else
_fs.rmdirSync(@fullName) # this will (and should) throw an exception if the directory is not empty. To delete a non-empty directory, set recursive equal to true.
return
enumerateFileSystemInfos: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
fileSystemInfosArr = []
self = @
fnIterator = ((dir, done) ->
context = @
_fs.readdir dir, ((error, list) ->
return done.call(self, error) if error
i = 0
(next = ->
fsname = list[i++]
return done.call(self, null, fileSystemInfosArr) unless fsname
if fnFilter(fsname)
path = _path.join(context.fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
isDirectory = fileSystemInfoObj.flags.isDirectory
isFile = fileSystemInfoObj.flags.isFile
if (isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
if (recursive and isDirectory)
fnIterator.call fileSystemInfoObj, fileSystemInfoObj.fullName, (error, results) ->
next.call(fileSystemInfoObj)
return
else
next.call(context)
else
next.call(context)
).call(context)
return
).bind(context)
return
)
fnIterator.call(@, @fullName, cb)
enumerateFileSystemInfosSync: (opts = {}) =>
throw 'Path does not exist and hence cannot be enumerated' if not @exists
throw 'Invalid opts argument' if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
rArg = Array::slice.call(arguments).slice(-1).shift()
resultsArr = if Array.isArray(rArg) then rArg else []
fileSystemInfosArr = []
_fileSystemInfosArr = _fs.readdirSync(@fullName)
_fileSystemInfosArr.forEach ((fsname) =>
if fnFilter(fsname)
path = _path.join(@fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
if (fileSystemInfoObj.flags.isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (fileSystemInfoObj.flags.isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
)
if recursive
fileSystemInfosArr.forEach (fsinfo) ->
if fsinfo.flags.isDirectory
resultsArr = fsinfo.enumerateFileSystemInfosSync(opts, resultsArr)
return
fileSystemInfosArr = fileSystemInfosArr.concat(resultsArr)
fileSystemInfosArr
class exports.FileInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new exports.DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
fnContinue = ((error) ->
_fs.writeFile @fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }, ((error) -> # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
if isNullOrUndefined(error) then @refresh()
if ensureCreateParent
_fs.chmod @parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE), ((error) ->
@parent.refresh()
return cb.call(@, error) if isFunction(cb)
).bind(@)
else
return cb.call(@, error) if isFunction(cb)
).bind(@)
).bind(@)
if ensure and not @parent.exists
ensureCreateParent = true
@parent.create('777', cb, fnContinue)
else
fnContinue()
return
createSync: (opts = {}) =>
throw 'Invalid opts argument' if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
success = true
if ensure and not @parent.exists
ensureCreateParent = true
@parent.createSync('777')
try
_fs.writeFileSync(@fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }) # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
catch ex
success = false
finally
if success
@refresh()
if ensureCreateParent
_fs.chmodSync(@parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE))
@parent.refresh()
else
throw ex if ex
return @
'delete': (cb) =>
cb = if arguments.length > 1 then arguments[arguments.length - 1] else cb
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmod parentFullName, toIntegerFromOctalRepresenation('777'), ((error1) ->
_fs.chmod thisFullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
_fs.unlink thisFullName, ((error3) ->
_fs.chmod parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString), ((error4) ->
return cb.call(@, error4) if isFunction(cb)
).bind(@)
).bind(@)
).bind(@)
).bind(@)
deleteSync: () =>
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation('777'))
_fs.chmodSync(thisFullName, toIntegerFromOctalRepresenation('777'))
_fs.unlinkSync(thisFullName)
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString))
return
return _root[k] = v for k, v of thisNamespaceObjectContainer
| true | ###
* fs-filesysteminfo
* https://github.com/cookch10/node-fs-filesysteminfo
*
* Copyright (c) 2016 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
_root = exports ? this
_path = require('path')
_fs = require('fs')
_exec = require('child_process').exec
_child = null
oldmask = undefined
newmask = 0
oldmask = process.umask(newmask)
thisNamespaceObjectContainer = {}
#region ************* internal lib: extension methods ********************
unless String::equals
String::equals = (str, ignoreCase) ->
ignoreCase = if typeof ignoreCase is 'boolean' then ignoreCase else false
`typeof str === 'string' ? (ignoreCase ? this.toLowerCase() === str.toLowerCase() : this === str) : false`
#endregion
#region ************* internal lib: utility methods ********************
isObject = (obj) ->
typeof obj is 'object'
isFunction = (obj) ->
typeof obj is 'function'
isBoolean = (obj) ->
typeof obj is 'boolean'
isString = (obj) ->
typeof obj is 'string'
isNullOrUndefined = (obj) ->
typeof obj is 'undefined' or obj is null
toIntegerFromOctalRepresenation = (obj) ->
obj ?= ''
integerFromOctal = parseInt(obj, 8)
if isNaN(integerFromOctal) then null else integerFromOctal
toOctalStringFromIntegerRepresenation = (obj) ->
obj ?= ''
octalStringFromInteger = '0' + (obj & parseInt('07777', 8)).toString(8)
if octalStringFromInteger is '00' then '' else octalStringFromInteger
namespace = (target, name, block) ->
objectNamespaceContainer = (if arguments.length is 4 then Array::slice.call(arguments).shift() else null)
[target, name, block] = [(if typeof exports isnt 'undefined' then exports else _root), arguments...] if arguments.length < 3
top = target
target = target[item] or= {} for item in name.split '.'
block target, top
for own key, value of target
value::___typeName___ = key
#endregion
namespace thisNamespaceObjectContainer, 'Util.System.IO', (exports) ->
DEFAULT_FILESYSTEM_CREATION_MODE = '777'
class exports.Base
getType: -> @___typeName___ or @constructor.name
toString: ->
@getType().toString()
class FileSystemPermissions extends exports.Base
constructor: (@octalFileSystemModeString = '') ->
CONST_LETTERFORMAT_ARR = ['---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx']
CONST_BINARYFORMAT_ARR = ['000', '001', '010', '011', '100', '101', '110', '111']
fnGetPermissionObj = (intPermissionString) ->
intPermission = parseInt(intPermissionString) or 0
defaultObj = { canRead: false, canWrite: false, canExecute: false, letterFormat: CONST_LETTERFORMAT_ARR[intPermission], binaryFormat: CONST_BINARYFORMAT_ARR[intPermission] }
if intPermission
switch intPermission
when 7
defaultObj.canRead = true
defaultObj.canWrite = true
defaultObj.canExecute = true
when 6
defaultObj.canRead = true
defaultObj.canWrite = true
when 5
defaultObj.canRead = true
defaultObj.canExecute = true
when 4
defaultObj.canRead = true
when 3
defaultObj.canWrite = true
defaultObj.canExecute = true
when 2
defaultObj.canWrite = true
when 1
defaultObj.canExecute = true
else
Object.freeze(defaultObj)
_owner = {name: 'owner', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _owner.name + '',
get: ->
_owner.value
configurable: true
enumerable: true
@owner = _owner.value
_group = {name: 'group', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _group.name + '',
get: ->
_group.value
configurable: true
enumerable: true
@group = _group.value
_otherUsers = {name: 'otherUsers', value: fnGetPermissionObj()}
Object.defineProperty @, '' + _otherUsers.name + '',
get: ->
_otherUsers.value
configurable: true
enumerable: true
@otherUsers = _otherUsers.value
updateRolePermissions = (octalString) ->
octalString = if octalString then octalString.toString() else '000'
octalString = if octalString.length is 4 then octalString.substring(1) else octalString
octalArr = octalString.split('')
for octal, i in octalArr
role = if i is 0 then _owner else if i is 1 then _group else _otherUsers
permission = fnGetPermissionObj(octal)
role.value = permission
octalString
_octalFileSystemModeString = {name: 'octalFileSystemModeString', value: @octalFileSystemModeString}
Object.defineProperty @, '' + _octalFileSystemModeString.name + '',
get: ->
_octalFileSystemModeString.value
set: (value) ->
_value = if value and isString(value) and /^(?:0|0?[0-7]{3})$/g.test(value) then value else ''
updateRolePermissions(_value)
_octalFileSystemModeString.value = value
configurable: true
enumerable: true
@octalFileSystemModeString = _octalFileSystemModeString.value
toRoleLetterRepresentationString: ->
@owner.letterFormat + @group.letterFormat + @otherUsers.letterFormat
toRoleBinaryRepresentationString: ->
@owner.binaryFormat + @group.binaryFormat + @otherUsers.binaryFormat
class exports.FileSystemInfo extends exports.Base
constructor: (@originalPath) ->
_initialized = false
_emptyStatusObj = (->
#If the path used to construct the FileSystemInfo object does not exist, all time-related status attributes will be set to 12:00 midnight, January 1, 1601 A.D. (C.E.) Coordinated Universal Time (UTC), adjusted to local time.
DEFAULT_DATETIME_ATTRIBUTE_VALUE = new Date(-11644412400000)
if not _initialized
emptyStatsObj = {}
try
emptyStatsObj = _fs.statSync(process.cwd())
catch ex
emptyStatsObj = {}
finally
for k of emptyStatsObj
kValue = if (/time$/i).test(k) then DEFAULT_DATETIME_ATTRIBUTE_VALUE else 0
emptyStatsObj[k] = kValue
emptyStatsObj
)()
_originalPath = {name: 'originalPath', value: @originalPath}
Object.defineProperty @, '' + _originalPath.name + '',
writable: false #When the writable property attribute is set to false, the property is said to be 'non-writable'. It cannot be reassigned.
value: `_originalPath.value.length === 2 && _originalPath.value.charAt(1) === ':' ? '.' : _originalPath.value`
configurable: false #true if and only if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. Defaults to false.
enumerable: true #true if and only if this property shows up during enumeration of the properties on the corresponding object. Defaults to false.
@originalPath = _originalPath.value
_fullName = {name: 'fullName', value: _path.resolve(@originalPath)}
Object.defineProperty @, '' + _fullName.name + '',
writable: false
value: _fullName.value
configurable: false
enumerable: true
@fullName = _fullName.value
_name = {name: 'name', value: _path.basename(@originalPath)}
Object.defineProperty @, '' + _name.name + '',
writable: false
value: _name.value
configurable: false
enumerable: true
@name = _name.value
_extension = {name: 'extension', value: _path.extname(@originalPath)}
Object.defineProperty @, '' + _extension.name + '',
writable: false
value: _extension.value
configurable: false
enumerable: true
@extension = _extension.value
_flags = {name: 'flags', value: {
isDirectory: false,
isFile: false,
isBlockDevice: false,
isCharacterDevice: false,
isFIFO: false,
isSocket: false,
isSymbolicLink: false
}}
Object.defineProperty @, '' + _flags.name + '',
writable: false
value: _flags.value
configurable: false
enumerable: true
@flags = _flags.value
_fileSystemPermissions = {name: 'fileSystemPermissions', value: new FileSystemPermissions()}
Object.defineProperty @, '' + _fileSystemPermissions.name + '',
get: ->
_fileSystemPermissions.value
configurable: true
enumerable: true
@fileSystemPermissions = _fileSystemPermissions.value
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_status = {name: 'status', value: null}
Object.defineProperty @, '' + _status.name + '',
get: ->
_status.value
set: (value) ->
value ?= _emptyStatusObj
_status.value = value
configurable: true
enumerable: true
@status = _status.value
init = (->
@refresh(_exists) if not _initialized
_initialized = true if not _initialized
).apply(@)
refresh: (i) =>
if i?.value?
exists = _fs.existsSync(@fullName)
if exists
i.value = true
else
i.value = false
try
parentPath = _path.resolve("#{@fullName}", '..')
if not parentPath.equals(@fullName, true)
parentContentsArr = _fs.readdirSync(parentPath)
i.value = true if parentContentsArr.indexOf(@name) >= 0
catch ex0
try
status = @status = _fs.statSync(@fullName)
isWin = process.platform is 'win32'
isFile = status.isFile()
mode = status.mode
if isWin then mode = mode | ((mode & 146) >> 1) # workaround for Windows OS permissions issues, see https://github.com/joyent/node/issues/4812 and https://github.com/joyent/node/issues/6381
fnContinue = ((error, stdout, stderr) ->
fsPermissions = @fileSystemPermissions
flags = @flags
status = @status
parent = @parent
parentFSPermissions = parent?.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = if parentFSPermissions then parentFSPermissions.octalFileSystemModeString else ''
if arguments.length is 3
fsPermissions.octalFileSystemModeString = '0' + stdout.trim() unless error
else
fsPermissions.octalFileSystemModeString = toOctalStringFromIntegerRepresenation(error)
for k of flags
kAttrib = status[k]
flags[k] = kAttrib.apply(status) if isFunction(kAttrib)
if parent and not parentFSPermissionsOctalFileSystemModeString
parent.refresh()
@
).bind(@)
if !mode
_child = _exec('stat -c "%a" ' + @parent.fullName, fnContinue)
else
fnContinue(mode)
catch ex1
@status = null
return
class exports.DirectoryInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (mode..., cb) =>
if not @parent.exists then @parent.create(mode, cb)
if not @exists
_fs.mkdir @fullName, toIntegerFromOctalRepresenation(mode), ((error) ->
if isNullOrUndefined(error) then @refresh()
return cb.call(@, error, @) if isFunction(cb)
).bind(@)
else
return cb.call(@, null, @) if isFunction(cb)
return
createSync: (mode) =>
if not @parent.exists then @parent.createSync(mode)
if not @exists
success = true
try
_fs.mkdirSync(@fullName, toIntegerFromOctalRepresenation(mode))
catch ex
success = false
finally
if success
@refresh()
else
throw ex if ex
return @
createSubdirectory: (path, mode..., cb) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
mode = if Array.isArray then mode[0] else mode
subdirectory = new DirectoryInfo(path)
if not subdirectory.exists
subdirectory.create mode, ((context, error, result) ->
if context is result
return cb.call(this, error, result) if isFunction(cb)
else
return result
).bind(@, subdirectory)
else
cb.call(@, null, subdirectory) if isFunction(cb)
return
createSubdirectorySync: (path, mode) =>
throw 'Path is null or undefined' if isNullOrUndefined(path) or path.equals('')
path = _path.join(@fullName, path)
subdirectory = new DirectoryInfo(path)
subdirectory.createSync(mode) if not subdirectory.exists
subdirectory
'delete': (recursive = false, cb) =>
if arguments.length is 1
rArg = Array::slice.call(arguments).slice(-1).shift()
if isFunction(rArg)
recursive = false
cb = rArg
recursive = if isBoolean(recursive) then recursive else false
fnFilter = if recursive then null else -> false
self = @
fnIterator = ((dir, done) ->
context = @
results = []
context.enumerateFileSystemInfos({ fnFilter: fnFilter, recursive: recursive }, (error1, list) ->
return done.call(self, error1) if error1
###
* NOTE:
* A big assumption is being made here in that we assume directories will always appear ahead of files in the array from enumerateFileSystemInfos().
* To prevent exceptions from being thrown by attempting to delete a non-empty directory, we are going to reverse() the array before continuing.
* This means all files will be deleted ahead of their respective parent directories.
* This also means that all subdirectories will be deleted ahead of their parent directories.
*
* **If this assumption proves false (possibly for other operating systems), this method logic can be revisited.
*
* **Possible alternative logic (just putting here for note purposes)
* -> One alternative (which I am not sure I like) would be to perform an array.sort(), which might be more expensive resource-wise.
* -> Another alternative would be to queue directories during iteration to be deleted after all files have been deleted.
* -> Something better than the previous two.
###
list.reverse().push(self)
i = 0
(next = ->
fsinfo = list[i++]
return done.call(self, null) unless fsinfo
_fs.chmod fsinfo.fullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
return done.call(self, error2) if error2
ftype = @getType()
if ftype is 'FileSystemInfo' then @refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsMethod = _fs[if ftype is 'FileInfo' then 'unlink' else 'rmdir']
fsMethod.call @, @fullName, ((error3) ->
return done.call(self, error3) if error3
return next.call(@)
)
else
return done('Unhandled exception for delete of ambiguous ' + ftype) # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
).bind(fsinfo)
).call(context)
return
)
return
)
fnIterator.call(@, @fullName, cb)
deleteSync: (recursive) =>
recursive = if isBoolean(recursive) then recursive else false
_fs.chmodSync(@fullName, toIntegerFromOctalRepresenation('777'))
if recursive
children = @enumerateFileSystemInfosSync({ fnFilter: null, recursive: false })
if (children.length is 0)
_fs.rmdirSync(@fullName)
else
children.forEach (fsinfo) ->
ftype = fsinfo.getType()
if ftype is 'FileSystemInfo' then fsinfo.refresh()
if ftype is 'FileInfo' or ftype is 'DirectoryInfo'
fsinfo.deleteSync(recursive)
else
throw 'Unhandled exception for deleteSync of ambiguous ' + ftype # This could happen in some edge cases where I specific file or directory has non-read permissions, but was found to exist by looking at the parent directory contents.
_fs.rmdirSync(@fullName)
else
_fs.rmdirSync(@fullName) # this will (and should) throw an exception if the directory is not empty. To delete a non-empty directory, set recursive equal to true.
return
enumerateFileSystemInfos: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
fileSystemInfosArr = []
self = @
fnIterator = ((dir, done) ->
context = @
_fs.readdir dir, ((error, list) ->
return done.call(self, error) if error
i = 0
(next = ->
fsname = list[i++]
return done.call(self, null, fileSystemInfosArr) unless fsname
if fnFilter(fsname)
path = _path.join(context.fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
isDirectory = fileSystemInfoObj.flags.isDirectory
isFile = fileSystemInfoObj.flags.isFile
if (isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
if (recursive and isDirectory)
fnIterator.call fileSystemInfoObj, fileSystemInfoObj.fullName, (error, results) ->
next.call(fileSystemInfoObj)
return
else
next.call(context)
else
next.call(context)
).call(context)
return
).bind(context)
return
)
fnIterator.call(@, @fullName, cb)
enumerateFileSystemInfosSync: (opts = {}) =>
throw 'Path does not exist and hence cannot be enumerated' if not @exists
throw 'Invalid opts argument' if not isObject(opts)
defaultfnFilter = -> true
opts.fnFilter ?= defaultfnFilter
opts.recursive ?= false
recursive = if isBoolean(opts.recursive) then opts.recursive else false
fnFilter = if isFunction(opts.fnFilter) then opts.fnFilter else defaultfnFilter
rArg = Array::slice.call(arguments).slice(-1).shift()
resultsArr = if Array.isArray(rArg) then rArg else []
fileSystemInfosArr = []
_fileSystemInfosArr = _fs.readdirSync(@fullName)
_fileSystemInfosArr.forEach ((fsname) =>
if fnFilter(fsname)
path = _path.join(@fullName, fsname)
fileSystemInfoObj = new exports.FileSystemInfo(path)
if (fileSystemInfoObj.flags.isDirectory)
fileSystemInfoObj = new DirectoryInfo(path)
else if (fileSystemInfoObj.flags.isFile)
fileSystemInfoObj = new exports.FileInfo(path)
fileSystemInfosArr.push(fileSystemInfoObj)
)
if recursive
fileSystemInfosArr.forEach (fsinfo) ->
if fsinfo.flags.isDirectory
resultsArr = fsinfo.enumerateFileSystemInfosSync(opts, resultsArr)
return
fileSystemInfosArr = fileSystemInfosArr.concat(resultsArr)
fileSystemInfosArr
class exports.FileInfo extends exports.FileSystemInfo
constructor: (originalPath) ->
throw 'Path is null or undefined' if isNullOrUndefined(originalPath) or originalPath.equals('')
super(originalPath)
_exists = {name: 'exists', value: false}
Object.defineProperty @, '' + _exists.name + '',
get: ->
@refresh(_exists)
_exists.value
configurable: true
enumerable: true
@exists = _exists.value
_parent = {name: 'parent', value: null}
Object.defineProperty @, '' + _parent.name + '',
get: ->
if isNullOrUndefined(_parent.value)
parentPath = _path.resolve("#{@fullName}", '..')
_parent.value = new exports.DirectoryInfo(parentPath)
_parent.value
configurable: true
enumerable: true
@parent = _parent.value
create: (opts = {}, cb) =>
if arguments.length is 1 and isFunction(opts)
cb = opts
opts = {}
return cb('Invalid opts argument') if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
fnContinue = ((error) ->
_fs.writeFile @fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }, ((error) -> # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
if isNullOrUndefined(error) then @refresh()
if ensureCreateParent
_fs.chmod @parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE), ((error) ->
@parent.refresh()
return cb.call(@, error) if isFunction(cb)
).bind(@)
else
return cb.call(@, error) if isFunction(cb)
).bind(@)
).bind(@)
if ensure and not @parent.exists
ensureCreateParent = true
@parent.create('777', cb, fnContinue)
else
fnContinue()
return
createSync: (opts = {}) =>
throw 'Invalid opts argument' if not isObject(opts)
opts.ensure ?= false
opts.mode ?= ''
opts.overwrite ?= true
ensure = if isBoolean(opts.ensure) then opts.ensure else false
mode = opts.mode
modeOctal = toIntegerFromOctalRepresenation(mode)
overwrite = if isBoolean(opts.overwrite) then opts.overwrite else true
writeflag = if overwrite then 'w' else 'wx'
ensureCreateParent = false
success = true
if ensure and not @parent.exists
ensureCreateParent = true
@parent.createSync('777')
try
_fs.writeFileSync(@fullName, '', { encoding: 'utf8', mode: modeOctal, flag: writeflag }) # note that since the file contains zero bytes, the encoding doesn't actually matter at this point.
catch ex
success = false
finally
if success
@refresh()
if ensureCreateParent
_fs.chmodSync(@parent.fullName, toIntegerFromOctalRepresenation(mode || DEFAULT_FILESYSTEM_CREATION_MODE))
@parent.refresh()
else
throw ex if ex
return @
'delete': (cb) =>
cb = if arguments.length > 1 then arguments[arguments.length - 1] else cb
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmod parentFullName, toIntegerFromOctalRepresenation('777'), ((error1) ->
_fs.chmod thisFullName, toIntegerFromOctalRepresenation('777'), ((error2) ->
_fs.unlink thisFullName, ((error3) ->
_fs.chmod parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString), ((error4) ->
return cb.call(@, error4) if isFunction(cb)
).bind(@)
).bind(@)
).bind(@)
).bind(@)
deleteSync: () =>
parent = @parent
parentFullName = parent.fullName
parentFSPermissions = parent.fileSystemPermissions
parentFSPermissionsOctalFileSystemModeString = parentFSPermissions.octalFileSystemModeString or DEFAULT_FILESYSTEM_CREATION_MODE
thisFullName = @fullName
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation('777'))
_fs.chmodSync(thisFullName, toIntegerFromOctalRepresenation('777'))
_fs.unlinkSync(thisFullName)
_fs.chmodSync(parentFullName, toIntegerFromOctalRepresenation(parentFSPermissionsOctalFileSystemModeString))
return
return _root[k] = v for k, v of thisNamespaceObjectContainer
|
[
{
"context": "egy(\n# usernameField: 'email'\n# passwordField: 'password'\n#, (email, password, done) ->\n# User.findOne\n# ",
"end": 3823,
"score": 0.9993168115615845,
"start": 3815,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "OD = 'DoExpressCheckoutPayment'\n# params.TOKEN = nvp_res.TOKEN\n# params.PAYERID = nvp_res.PAYERID\n",
"end": 16824,
"score": 0.5588901042938232,
"start": 16823,
"tag": "PASSWORD",
"value": "n"
}
] | server/app.coffee | MichaelMelkonian/TriggerRally | 176 | "use strict"
_ = require 'underscore'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
connect = require 'connect'
compression = require 'compression'
cookie = require 'cookie'
express = require 'express'
expressSession = require 'express-session'
http = require 'http'
logger = require 'morgan'
methodOverride = require 'method-override'
mongoose = require 'mongoose'
mongoskin = require 'mongoskin'
session_mongoose = require 'session-mongoose'
socketio = require 'socket.io'
stylus = require 'stylus'
passport = require 'passport'
FacebookStrategy = require('passport-facebook').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
TwitterStrategy = require('passport-twitter').Strategy
LocalStrategy = require('passport-local').Strategy
# This has to come first to set up Mongoose schemas.
objects = require './objects'
api = require './api'
config = require './config'
{ makePubId } = require './objects/common'
routes = require './routes'
# stripe = require('stripe')(config.stripe.API_KEY)
getIsodate = -> new Date().toISOString()
logger.format 'isodate', (req, res) -> getIsodate()
log = (msg) ->
isodate = getIsodate()
console.log "[#{isodate}] #{msg}"
mongoose.set 'debug', true
mongoose.connection.on "error", (err) ->
log "Could not connect to mongo server!"
log err.message
SessionStore = session_mongoose(connect)
sessionStore = new SessionStore(
url: "mongodb://#{config.MONGODB_HOST}/sessions"
# Expiration check worker run interval in millisec (default: 60000)
interval: 120000
)
User = mongoose.model('User')
UserPassport = mongoose.model('UserPassport')
Car = mongoose.model('Car')
Track = mongoose.model('Track')
Run = mongoose.model('Run')
mongoose.connect config.MONGOOSE_URL
# Alternate DB connection
dbUrl = "#{config.db.host}:#{config.db.port}/#{config.db.name}?auto_reconnect"
db = mongoskin.db dbUrl, { safe: true }
db.bind 'cars'
db.bind 'runs'
db.bind 'tracks'
db.bind 'users'
log "Base directory: #{__dirname}"
app = module.exports = express()
DOMAIN = process.env.DOMAIN or 'triggerrally.com'
NODE_ENV = process.env.NODE_ENV
PORT = process.env.PORT or 80
PROTOCOL = process.env.PROTOCOL or 'http'
PUBLIC_PORT = if NODE_ENV is 'production' then 80 else PORT
PORT_SUFFIX = if PUBLIC_PORT is 80 then "" else ":#{PUBLIC_PORT}"
URL_PREFIX = "#{PROTOCOL}://#{DOMAIN}#{PORT_SUFFIX}"
authenticateUser = (profile, done) ->
passport_id = profile.identifier or (profile.provider + profile.id)
UserPassport
.findOne(passport_id: passport_id)
.populate('user')
.exec (error, userPassport) ->
return done error if error
user = userPassport?.user
return done null, userPassport if user
userPassport ?= new UserPassport()
# Create new user from passport profile.
user = new User
name: profile.displayName or profile.username
# user.email = profile.emails[0].value if profile.emails?[0]
user.save (error) ->
return done error if error
userPassport.profile = profile
userPassport.user = user._id
userPassport.save (error) ->
done error, userPassport
#res.redirect('/user/' + user.pub_id + '/edit');
authenticationSuccessfulAPI = (req, res) ->
throw new Error('authenticationSuccessfulAPI: req.user array') if Array.isArray req.user
res.redirect '/closeme'
authenticationSuccessful = (req, res) ->
throw new Error('authenticationSuccessful: req.user array') if Array.isArray req.user
res.redirect '/'
#passport.use new LocalStrategy(
# usernameField: 'email'
# passwordField: 'password'
#, (email, password, done) ->
# User.findOne
# _email: email
# , (err, user) ->
# return done(err) if err
# return done(null, false) unless user
# return done(null, false) unless user.authenticate(password)
# done null, user
#)
# for i in ["", "/v1"]
# passport.use "facebook#{i}", new FacebookStrategy(
# clientID: config.FACEBOOK_APP_ID
# clientSecret: config.FACEBOOK_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/facebook/callback"
# , (accessToken, refreshToken, profile, done) ->
# profile.auth = { accessToken, refreshToken }
# authenticateUser profile, done
# )
# passport.use "google#{i}", new GoogleStrategy(
# clientID: config.GOOGLE_CLIENT_ID
# clientSecret: config.GOOGLE_CLIENT_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/google/callback"
# , (token, refreshToken, profile, done) ->
# profile.auth = { token, refreshToken }
# authenticateUser profile, done
# )
# passport.use "twitter#{i}", new TwitterStrategy(
# consumerKey: config.TWITTER_APP_KEY
# consumerSecret: config.TWITTER_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/twitter/callback"
# , (token, tokenSecret, profile, done) ->
# profile.auth = { token, tokenSecret }
# authenticateUser profile, done
# )
passport.serializeUser (userPassport, done) ->
done null, userPassport.id
passport.deserializeUser (id, done) ->
UserPassport
.findOne(_id: id)
.populate('user')
.exec (error, userPassport) ->
done error, userPassport
app.use logger('[:isodate] :status :response-time ms :res[content-length] :method :url :referrer', format: '[:isodate] :status :response-time ms :res[content-length] :method :url :referrer')
app.disable 'x-powered-by'
app.use compression()
app.use stylus.middleware(
src: __dirname + '/stylus'
dest: __dirname + '/public'
)
app.use express.static(__dirname + '/public')
app.set 'views', __dirname + '/views'
app.set 'view engine', 'jade'
app.use (req, res, next) ->
req.rawBody = ''
# req.setEncoding('utf8')
req.on 'data', (chunk) -> req.rawBody += chunk
next()
app.use bodyParser.urlencoded({
extended: true
})
app.use bodyParser.json();
app.use cookieParser(config.SESSION_SECRET)
app.use expressSession(
secret: 'asecret'
saveUninitialized: true
resave: true
cookie:
maxAge: 4 * 7 * 24 * 60 * 60 * 1000
store: sessionStore
)
app.use passport.initialize()
app.use passport.session()
app.use methodOverride()
app.use (req, res, next) ->
# Enable Chrome Frame if installed.
res.setHeader 'X-UA-Compatible', 'chrome=1'
next()
app.use routes.defaultParams
#
#// We can delay certain resources for debugging purposes.
#app.use(function(req, res, next) {
# var delay = 0;
# if (req.path.match('nice.png')) delay = 3000;
# if (req.path.match('heightdetail1.jpg')) delay = 6000;
# setTimeout(function() {
# next();
# }, delay);
#});
#
app.use app.router
# Send any path not otherwise handled to the unified app.
# TODO: Make the app show a 404 as appropriate.
app.use routes.unified
if app.get('env') is 'development'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
call_stack: err.stack?.split('\n')
if app.get('env') is 'production'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
app.get '/v1/auth/facebook', passport.authenticate('facebook/v1')
app.get '/v1/auth/facebook/callback', passport.authenticate('facebook/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/google', passport.authenticate('google/v1', { scope : ['profile', 'email'] })
app.get '/v1/auth/google/callback', passport.authenticate('google/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/twitter', passport.authenticate('twitter/v1')
app.get '/v1/auth/twitter/callback', passport.authenticate('twitter/v1'), authenticationSuccessfulAPI
app.get '/v1/auth/logout', (req, res) ->
req.logOut()
res.json status: "ok"
api.setup app, passport
app.get '/auth/facebook', passport.authenticate('facebook')
app.get '/auth/facebook/callback', passport.authenticate('facebook',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/google', passport.authenticate('google', { scope : ['profile', 'email'] })
app.get '/auth/google/callback', passport.authenticate('google',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/twitter', passport.authenticate('twitter')
app.get '/auth/twitter/callback', passport.authenticate('twitter',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/logout', (req, res) ->
req.logOut()
res.redirect '/'
app.get '/autologin', (req, res, next) ->
code = req.query.code
passport_id = config.autologin[code]
return res.send 401 unless passport_id
UserPassport
.findOne({ passport_id })
.populate('user')
.exec (error, userPassport) ->
return next error if error
return res.send 500 unless userPassport
req.login userPassport, (error) ->
return next error if error
res.redirect '/'
app.get '/closeme', routes.closeme
# Backward compatibility.
app.get '/drive', (req, res) ->
res.redirect '/', 301
app.get '/x/Preview/Arbusu/drive', (req, res) ->
# req.params.idTrack = 'Preview'
# req.params.idCar = 'Arbusu'
# loadUrlTrack req, res, ->
# loadUrlCar req, res, ->
# routes.drive req, res
# Preview is broken, so just redirect to home.
res.redirect '/', 301
app.get '/x/:idTrack/:idCar/drive', (req, res) ->
res.redirect "/track/#{req.params.idTrack}/drive", 301
# app.get '/track/:idTrack', (req, res) ->
# res.redirect "/track/#{req.params.idTrack}/drive", 301
app.get '/login', routes.login
# ppec = require './paypal/expresscheckout'
qs = require 'querystring'
availablePacks =
ignition:
cost: '750'
currency: 'credits'
# name: 'Trigger Rally: Icarus Ignition'
# description: 'A new car for Trigger Rally.'
# url: 'https://triggerrally.com/ignition'
products: [ 'ignition' ]
mayhem:
cost: '400'
currency: 'credits'
# name: 'Trigger Rally: Mayhem Monster Truck'
# description: 'The Mayhem Monster Truck for Trigger Rally.'
# url: 'https://triggerrally.com/mayhem'
products: [ 'mayhem' ]
# full:
# name: 'Trigger Rally: Full Game'
# description: 'Access all tracks, the Arbusu, Mayhem and Icarus cars, and more!'
# url: 'https://triggerrally.com/purchase'
# products: [ 'packa', 'ignition', 'mayhem', 'paid' ]
addCredits = (credits, cost) ->
availablePacks["credits#{credits}"] =
name: "#{credits} Credits - Trigger Rally"
description: "A package of #{credits} credits for your Trigger Rally account."
url: "https://triggerrally.com/"
cost: cost
credits: credits
currency: 'USD'
addCredits '80', '0.99'
addCredits '200', '1.99'
addCredits '550', '4.99'
addCredits '1200', '9.99'
addCredits '2000', '14.99'
# # addCredits '80', '0.29'
# addCredits '200', '0.59'
# addCredits '550', '1.49'
# addCredits '1200', '2.99'
# addCredits '2000', '4.49'
# addCredits '200', '0.59'
# addCredits '400', '1.15'
# addCredits '750', '1.95'
# addCredits '1150', '2.95'
# addCredits '2000', '4.49'
# Add an 'id' field matching the pack key.
pack.id = id for own id, pack of availablePacks
grantPackToUser = (pack, bbUser, method, res) ->
saveData = {}
if pack.products
saveData.products = _.union (bbUser.products ? []), pack.products
if pack.credits
saveData.credits = bbUser.credits + parseInt(pack.credits)
saveData.pay_history = bbUser.pay_history ? []
saveData.pay_history.push [ Date.now(), method, pack.currency, pack.cost, pack.id ]
console.log saveData
bbUser.save saveData,
success: ->
log "PURCHASE COMPLETE for user #{bbUser.id} using #{method}"
res.redirect '/closeme'
error: ->
log "user: #{JSON.stringify bbUser}"
failure res, 500, "COMPLETE BUT FAILED TO RECORD - VERY BAD!!"
app.get '/checkout', (req, res) ->
return res.send 401 unless req.user
packId = req.query.pack
pack = availablePacks[packId]
return res.send 404 unless pack
if pack.products
# Check that user doesn't already have this pack. Prevents accidental double-purchase.
newProducts = _.difference pack.products, req.user.user.products
return res.send 409 if _.isEmpty newProducts
switch pack.currency
# Real currency payments are disabled.
# when 'USD'
# switch req.query.method
# when 'paypal' then paypalCheckout pack, req, res
# when 'stripe' then stripeCheckout pack, req, res
# else res.send 400
when 'credits' then creditsCheckout pack, req, res
else res.send 400
# freeCheckout = (pack, req, res) ->
# return res.send 402 unless pack.cost in [ 0, '0' ]
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure 500 unless bbUser
# products = bbUser.products ? []
# products = _.union products, pack.products
# bbUser.save { products },
# success: ->
# res.redirect '/closeme'
# error: ->
# res.send 500
creditsCheckout = (pack, req, res) ->
return failure res, 401 unless req.user
api.findUser req.user.user.pub_id, (bbUser) ->
return failure 500 unless bbUser
cost = parseInt(pack.cost)
return res.send 402 unless bbUser.credits >= cost
log "user #{bbUser.id} purchased #{pack.id} for #{cost} credits"
products = bbUser.products ? []
products = _.union products, pack.products
bbUser.save { products, credits: bbUser.credits - cost },
success: ->
log "saved user #{JSON.stringify bbUser}"
if req.query.popup
res.redirect '/closeme'
else
res.send 200
error: ->
res.send 500
# stripeCheckout = (pack, req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# charge = stripe.charges.create
# amount: Math.round(pack.cost * 100) # amount in cents
# currency: "usd"
# card: req.query.token
# description: "Charge for user ID #{bbUser.id}"
# , (err, charge) =>
# if err
# console.error err
# return res.send 500
# grantPackToUser pack, bbUser, 'stripe', res
getPaymentParams = (pack) ->
cost = pack.cost
PAYMENTREQUEST_0_CUSTOM: pack.id
PAYMENTREQUEST_0_PAYMENTACTION: 'Sale'
PAYMENTREQUEST_0_AMT: cost
PAYMENTREQUEST_0_ITEMAMT: cost # Required for digital goods.
RETURNURL: "#{URL_PREFIX}/checkout/return"
CANCELURL: "#{URL_PREFIX}/closeme"
REQCONFIRMSHIPPING: 0
NOSHIPPING: 1
ALLOWNOTE: 0
# HDRIMG: "https://triggerrally.com/images/TODO-750x90.png" # TODO
# HDRBORDERCOLOR
# HDRBACKCOLOR
# PAYFLOWCOLOR
# EMAIL: req.user.user.email
# LANDINGPAGE # should test results of this
BUYEREMAILOPTINENABLE: 1
# BUYERUSERNAME # May be useful to increase user confidence?
# BUYERREGISTRATIONDATE
L_PAYMENTREQUEST_0_ITEMCATEGORY0: 'Digital'
L_PAYMENTREQUEST_0_ITEMURL0: pack.url
L_PAYMENTREQUEST_0_QTY0: 1
L_PAYMENTREQUEST_0_AMT0: cost
L_PAYMENTREQUEST_0_DESC0: pack.description
L_PAYMENTREQUEST_0_NAME0: pack.name
# paypalCheckout = (pack, req, res) ->
# params = getPaymentParams pack
# return res.send 404 unless params
# params.METHOD = 'SetExpressCheckout'
# log "Calling: #{JSON.stringify params}"
# ppec.request params, (err, nvp_res) ->
# if err
# console.error "#{params.METHOD} error: #{err}"
# return res.send 500
# log "#{params.METHOD} response: #{JSON.stringify nvp_res}"
# return res.send 500 if nvp_res.ACK isnt 'Success'
# TOKEN = nvp_res.TOKEN
# return res.send 500 unless TOKEN
# res.redirect ppec.redirectUrl TOKEN
failure = (res, code, msg) ->
console.error "PURCHASE FAILED: (#{code}) #{msg}"
res.send code
# app.get '/checkout/return', (req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# params =
# METHOD: 'GetExpressCheckoutDetails'
# TOKEN: req.query.token
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_GetExpressCheckoutDetails.bind null, bbUser, req, res
# paypalResponse_GetExpressCheckoutDetails = (bbUser, req, res, err, nvp_res) ->
# method = 'GetExpressCheckoutDetails'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# packId = nvp_res.PAYMENTREQUEST_0_CUSTOM
# pack = availablePacks[packId]
# # TODO: Check that price and description match what we expect?
# params = getPaymentParams pack
# return failure res, 500 unless params
# params.METHOD = 'DoExpressCheckoutPayment'
# params.TOKEN = nvp_res.TOKEN
# params.PAYERID = nvp_res.PAYERID
# params.RETURNFMFDETAILS = 1
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_DoExpressCheckoutPayment.bind null, bbUser, req, res
# paypalResponse_DoExpressCheckoutPayment = (bbUser, req, res, err, nvp_res) ->
# method = 'DoExpressCheckoutPayment'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{JSON.stringify nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# grantPackToUser pack, bbUser,'paypal', res
#
#app.post('/login',
# passport.authenticate('local', { failureRedirect: '/login?status=failed' }),
# authenticationSuccessful
#);
#
server = http.createServer(app)
io = socketio.listen(server)
server.listen PORT
log "Server listening on port #{PORT} in #{app.settings.env} mode"
# TODO: Mirror http api over socket.io.
if NODE_ENV is 'production'
io.set 'log level', 1
else
io.set 'log level', 2
showNumberConnected = ->
clients = io.sockets.clients()
numConnected = clients.length
log "Connected sockets: #{numConnected}"
io.set 'authorization', (data, accept) ->
# http://www.danielbaulig.de/socket-ioexpress/
return accept('No cookie transmitted.', false) unless data.headers.cookie
data.cookie = cookie.parse(data.headers.cookie)
sid = data.cookie['connect.sid']
return accept('No session id found.', false) unless sid
data.sessionID = sid.substring(2, 26)
# save the session store to the data object
# (as required by the Session constructor)
data.sessionStore = sessionStore
sessionStore.get data.sessionID, (err, session) ->
return accept err, false if err
return accept 'No session', false unless session
# create a session object, passing data as request and our
# just acquired session data
Session = connect.middleware.session.Session
data.session = new Session(data, session)
# TODO: accept fast, before deserialization?
passport.deserializeUser data.session.passport.user, (err, userPassport) ->
return accept 'passport error: ' + err, false if err
user = data.session.user = userPassport.user
data.session.userPassport = userPassport
return accept null, true unless user
api.findUser data.session.user.pub_id, (bbUser) ->
return accept 'failed to load backbone user' unless bbUser
data.session.bbUser = bbUser
accept null, true
io.on 'connection', (socket) ->
showNumberConnected()
socket.on 'disconnect', ->
showNumberConnected()
dbCallback = (err) ->
console.error err if err
io.of('/drive').on 'connection', (socket) ->
session = socket.handshake.session
user = session.user
bbUser = session.bbUser
run = record_i_timeline = record_p_timeline = null
do resetRun = ->
run = null
record_i_timeline = []
record_p_timeline = []
completeRun = ->
return unless run
console.log "Finalizing records for run: #{run.pub_id}"
newValues =
"record_i.timeline": record_i_timeline
"record_p.timeline": record_p_timeline
newValues.times = run.times if run.times
newValues.time = run.time if run.time?
db.runs.update { _id: run._id }, $set: newValues, dbCallback
resetRun()
socket.on 'disconnect', completeRun
# TODO: Resume connections, or notify user if recording has stopped.
socket.on 'start', (data) ->
completeRun()
resetRun()
car = track = null
done = _.after 2, ->
return unless car and track
# This is why I should have a model layer.
db.tracks.update { _id: track._id }, { $inc: { count_drive: 1 } }, dbCallback
# return # Disable run recording
return unless user
newRun =
car: car._id
pub_id: makePubId()
record_i: { keyMap: data.keyMap_i, timeline: [] }
record_p: { keyMap: data.keyMap_p, timeline: [] }
status: 'Unverified'
track: track._id
user: user._id
console.log "Started run: #{newRun.pub_id}"
db.runs.insert newRun, (err) ->
return console.error 'Run insert error: ' + err if err
return if run # Another run was already started. Discard this one.
run = newRun
db.cars.findOne pub_id: data.car, (err, doc) -> car = doc; done()
db.tracks.findOne pub_id: data.track, (err, doc) -> track = doc; done()
socket.on 'record_i', (data) ->
Array::push.apply record_i_timeline, data.samples
socket.on 'record_p', (data) ->
Array::push.apply record_p_timeline, data.samples
socket.on 'times', (data) ->
# TODO: Also buffer times in the event that the run isn't ready yet.
return unless run
# TODO: Verification!
run.times = data.times
run.time = data.times[data.times.length - 1]
awardCredit = ->
credits = bbUser.credits + 1
bbUser.save { credits }
# db.users.update { _id: user._id }, { $set: { credits: bbUser.credits } }, dbCallback
socket.emit 'updateuser',
id: user.pub_id
credits: credits
return
# awardCreditThrottled = _.throttle awardCredit, 1500, leading: no
lastCall = Date.now()
awardCreditThrottled = ->
now = Date.now()
elapsed = (now - lastCall) / 1000
lastCall = now
k = 4
k2 = k * k
x2 = elapsed * elapsed
cdf = x2 / (x2 + k2)
# cdf = Math.min 1, Math.pow(elapsed / 5000, 2)
if Math.random() < cdf
setTimeout awardCredit, 800
socket.on 'advance', (data) ->
return unless user
return unless data.cp > 0
awardCreditThrottled()
| 168863 | "use strict"
_ = require 'underscore'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
connect = require 'connect'
compression = require 'compression'
cookie = require 'cookie'
express = require 'express'
expressSession = require 'express-session'
http = require 'http'
logger = require 'morgan'
methodOverride = require 'method-override'
mongoose = require 'mongoose'
mongoskin = require 'mongoskin'
session_mongoose = require 'session-mongoose'
socketio = require 'socket.io'
stylus = require 'stylus'
passport = require 'passport'
FacebookStrategy = require('passport-facebook').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
TwitterStrategy = require('passport-twitter').Strategy
LocalStrategy = require('passport-local').Strategy
# This has to come first to set up Mongoose schemas.
objects = require './objects'
api = require './api'
config = require './config'
{ makePubId } = require './objects/common'
routes = require './routes'
# stripe = require('stripe')(config.stripe.API_KEY)
getIsodate = -> new Date().toISOString()
logger.format 'isodate', (req, res) -> getIsodate()
log = (msg) ->
isodate = getIsodate()
console.log "[#{isodate}] #{msg}"
mongoose.set 'debug', true
mongoose.connection.on "error", (err) ->
log "Could not connect to mongo server!"
log err.message
SessionStore = session_mongoose(connect)
sessionStore = new SessionStore(
url: "mongodb://#{config.MONGODB_HOST}/sessions"
# Expiration check worker run interval in millisec (default: 60000)
interval: 120000
)
User = mongoose.model('User')
UserPassport = mongoose.model('UserPassport')
Car = mongoose.model('Car')
Track = mongoose.model('Track')
Run = mongoose.model('Run')
mongoose.connect config.MONGOOSE_URL
# Alternate DB connection
dbUrl = "#{config.db.host}:#{config.db.port}/#{config.db.name}?auto_reconnect"
db = mongoskin.db dbUrl, { safe: true }
db.bind 'cars'
db.bind 'runs'
db.bind 'tracks'
db.bind 'users'
log "Base directory: #{__dirname}"
app = module.exports = express()
DOMAIN = process.env.DOMAIN or 'triggerrally.com'
NODE_ENV = process.env.NODE_ENV
PORT = process.env.PORT or 80
PROTOCOL = process.env.PROTOCOL or 'http'
PUBLIC_PORT = if NODE_ENV is 'production' then 80 else PORT
PORT_SUFFIX = if PUBLIC_PORT is 80 then "" else ":#{PUBLIC_PORT}"
URL_PREFIX = "#{PROTOCOL}://#{DOMAIN}#{PORT_SUFFIX}"
authenticateUser = (profile, done) ->
passport_id = profile.identifier or (profile.provider + profile.id)
UserPassport
.findOne(passport_id: passport_id)
.populate('user')
.exec (error, userPassport) ->
return done error if error
user = userPassport?.user
return done null, userPassport if user
userPassport ?= new UserPassport()
# Create new user from passport profile.
user = new User
name: profile.displayName or profile.username
# user.email = profile.emails[0].value if profile.emails?[0]
user.save (error) ->
return done error if error
userPassport.profile = profile
userPassport.user = user._id
userPassport.save (error) ->
done error, userPassport
#res.redirect('/user/' + user.pub_id + '/edit');
authenticationSuccessfulAPI = (req, res) ->
throw new Error('authenticationSuccessfulAPI: req.user array') if Array.isArray req.user
res.redirect '/closeme'
authenticationSuccessful = (req, res) ->
throw new Error('authenticationSuccessful: req.user array') if Array.isArray req.user
res.redirect '/'
#passport.use new LocalStrategy(
# usernameField: 'email'
# passwordField: '<PASSWORD>'
#, (email, password, done) ->
# User.findOne
# _email: email
# , (err, user) ->
# return done(err) if err
# return done(null, false) unless user
# return done(null, false) unless user.authenticate(password)
# done null, user
#)
# for i in ["", "/v1"]
# passport.use "facebook#{i}", new FacebookStrategy(
# clientID: config.FACEBOOK_APP_ID
# clientSecret: config.FACEBOOK_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/facebook/callback"
# , (accessToken, refreshToken, profile, done) ->
# profile.auth = { accessToken, refreshToken }
# authenticateUser profile, done
# )
# passport.use "google#{i}", new GoogleStrategy(
# clientID: config.GOOGLE_CLIENT_ID
# clientSecret: config.GOOGLE_CLIENT_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/google/callback"
# , (token, refreshToken, profile, done) ->
# profile.auth = { token, refreshToken }
# authenticateUser profile, done
# )
# passport.use "twitter#{i}", new TwitterStrategy(
# consumerKey: config.TWITTER_APP_KEY
# consumerSecret: config.TWITTER_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/twitter/callback"
# , (token, tokenSecret, profile, done) ->
# profile.auth = { token, tokenSecret }
# authenticateUser profile, done
# )
passport.serializeUser (userPassport, done) ->
done null, userPassport.id
passport.deserializeUser (id, done) ->
UserPassport
.findOne(_id: id)
.populate('user')
.exec (error, userPassport) ->
done error, userPassport
app.use logger('[:isodate] :status :response-time ms :res[content-length] :method :url :referrer', format: '[:isodate] :status :response-time ms :res[content-length] :method :url :referrer')
app.disable 'x-powered-by'
app.use compression()
app.use stylus.middleware(
src: __dirname + '/stylus'
dest: __dirname + '/public'
)
app.use express.static(__dirname + '/public')
app.set 'views', __dirname + '/views'
app.set 'view engine', 'jade'
app.use (req, res, next) ->
req.rawBody = ''
# req.setEncoding('utf8')
req.on 'data', (chunk) -> req.rawBody += chunk
next()
app.use bodyParser.urlencoded({
extended: true
})
app.use bodyParser.json();
app.use cookieParser(config.SESSION_SECRET)
app.use expressSession(
secret: 'asecret'
saveUninitialized: true
resave: true
cookie:
maxAge: 4 * 7 * 24 * 60 * 60 * 1000
store: sessionStore
)
app.use passport.initialize()
app.use passport.session()
app.use methodOverride()
app.use (req, res, next) ->
# Enable Chrome Frame if installed.
res.setHeader 'X-UA-Compatible', 'chrome=1'
next()
app.use routes.defaultParams
#
#// We can delay certain resources for debugging purposes.
#app.use(function(req, res, next) {
# var delay = 0;
# if (req.path.match('nice.png')) delay = 3000;
# if (req.path.match('heightdetail1.jpg')) delay = 6000;
# setTimeout(function() {
# next();
# }, delay);
#});
#
app.use app.router
# Send any path not otherwise handled to the unified app.
# TODO: Make the app show a 404 as appropriate.
app.use routes.unified
if app.get('env') is 'development'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
call_stack: err.stack?.split('\n')
if app.get('env') is 'production'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
app.get '/v1/auth/facebook', passport.authenticate('facebook/v1')
app.get '/v1/auth/facebook/callback', passport.authenticate('facebook/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/google', passport.authenticate('google/v1', { scope : ['profile', 'email'] })
app.get '/v1/auth/google/callback', passport.authenticate('google/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/twitter', passport.authenticate('twitter/v1')
app.get '/v1/auth/twitter/callback', passport.authenticate('twitter/v1'), authenticationSuccessfulAPI
app.get '/v1/auth/logout', (req, res) ->
req.logOut()
res.json status: "ok"
api.setup app, passport
app.get '/auth/facebook', passport.authenticate('facebook')
app.get '/auth/facebook/callback', passport.authenticate('facebook',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/google', passport.authenticate('google', { scope : ['profile', 'email'] })
app.get '/auth/google/callback', passport.authenticate('google',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/twitter', passport.authenticate('twitter')
app.get '/auth/twitter/callback', passport.authenticate('twitter',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/logout', (req, res) ->
req.logOut()
res.redirect '/'
app.get '/autologin', (req, res, next) ->
code = req.query.code
passport_id = config.autologin[code]
return res.send 401 unless passport_id
UserPassport
.findOne({ passport_id })
.populate('user')
.exec (error, userPassport) ->
return next error if error
return res.send 500 unless userPassport
req.login userPassport, (error) ->
return next error if error
res.redirect '/'
app.get '/closeme', routes.closeme
# Backward compatibility.
app.get '/drive', (req, res) ->
res.redirect '/', 301
app.get '/x/Preview/Arbusu/drive', (req, res) ->
# req.params.idTrack = 'Preview'
# req.params.idCar = 'Arbusu'
# loadUrlTrack req, res, ->
# loadUrlCar req, res, ->
# routes.drive req, res
# Preview is broken, so just redirect to home.
res.redirect '/', 301
app.get '/x/:idTrack/:idCar/drive', (req, res) ->
res.redirect "/track/#{req.params.idTrack}/drive", 301
# app.get '/track/:idTrack', (req, res) ->
# res.redirect "/track/#{req.params.idTrack}/drive", 301
app.get '/login', routes.login
# ppec = require './paypal/expresscheckout'
qs = require 'querystring'
availablePacks =
ignition:
cost: '750'
currency: 'credits'
# name: 'Trigger Rally: Icarus Ignition'
# description: 'A new car for Trigger Rally.'
# url: 'https://triggerrally.com/ignition'
products: [ 'ignition' ]
mayhem:
cost: '400'
currency: 'credits'
# name: 'Trigger Rally: Mayhem Monster Truck'
# description: 'The Mayhem Monster Truck for Trigger Rally.'
# url: 'https://triggerrally.com/mayhem'
products: [ 'mayhem' ]
# full:
# name: 'Trigger Rally: Full Game'
# description: 'Access all tracks, the Arbusu, Mayhem and Icarus cars, and more!'
# url: 'https://triggerrally.com/purchase'
# products: [ 'packa', 'ignition', 'mayhem', 'paid' ]
addCredits = (credits, cost) ->
availablePacks["credits#{credits}"] =
name: "#{credits} Credits - Trigger Rally"
description: "A package of #{credits} credits for your Trigger Rally account."
url: "https://triggerrally.com/"
cost: cost
credits: credits
currency: 'USD'
addCredits '80', '0.99'
addCredits '200', '1.99'
addCredits '550', '4.99'
addCredits '1200', '9.99'
addCredits '2000', '14.99'
# # addCredits '80', '0.29'
# addCredits '200', '0.59'
# addCredits '550', '1.49'
# addCredits '1200', '2.99'
# addCredits '2000', '4.49'
# addCredits '200', '0.59'
# addCredits '400', '1.15'
# addCredits '750', '1.95'
# addCredits '1150', '2.95'
# addCredits '2000', '4.49'
# Add an 'id' field matching the pack key.
pack.id = id for own id, pack of availablePacks
grantPackToUser = (pack, bbUser, method, res) ->
saveData = {}
if pack.products
saveData.products = _.union (bbUser.products ? []), pack.products
if pack.credits
saveData.credits = bbUser.credits + parseInt(pack.credits)
saveData.pay_history = bbUser.pay_history ? []
saveData.pay_history.push [ Date.now(), method, pack.currency, pack.cost, pack.id ]
console.log saveData
bbUser.save saveData,
success: ->
log "PURCHASE COMPLETE for user #{bbUser.id} using #{method}"
res.redirect '/closeme'
error: ->
log "user: #{JSON.stringify bbUser}"
failure res, 500, "COMPLETE BUT FAILED TO RECORD - VERY BAD!!"
app.get '/checkout', (req, res) ->
return res.send 401 unless req.user
packId = req.query.pack
pack = availablePacks[packId]
return res.send 404 unless pack
if pack.products
# Check that user doesn't already have this pack. Prevents accidental double-purchase.
newProducts = _.difference pack.products, req.user.user.products
return res.send 409 if _.isEmpty newProducts
switch pack.currency
# Real currency payments are disabled.
# when 'USD'
# switch req.query.method
# when 'paypal' then paypalCheckout pack, req, res
# when 'stripe' then stripeCheckout pack, req, res
# else res.send 400
when 'credits' then creditsCheckout pack, req, res
else res.send 400
# freeCheckout = (pack, req, res) ->
# return res.send 402 unless pack.cost in [ 0, '0' ]
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure 500 unless bbUser
# products = bbUser.products ? []
# products = _.union products, pack.products
# bbUser.save { products },
# success: ->
# res.redirect '/closeme'
# error: ->
# res.send 500
creditsCheckout = (pack, req, res) ->
return failure res, 401 unless req.user
api.findUser req.user.user.pub_id, (bbUser) ->
return failure 500 unless bbUser
cost = parseInt(pack.cost)
return res.send 402 unless bbUser.credits >= cost
log "user #{bbUser.id} purchased #{pack.id} for #{cost} credits"
products = bbUser.products ? []
products = _.union products, pack.products
bbUser.save { products, credits: bbUser.credits - cost },
success: ->
log "saved user #{JSON.stringify bbUser}"
if req.query.popup
res.redirect '/closeme'
else
res.send 200
error: ->
res.send 500
# stripeCheckout = (pack, req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# charge = stripe.charges.create
# amount: Math.round(pack.cost * 100) # amount in cents
# currency: "usd"
# card: req.query.token
# description: "Charge for user ID #{bbUser.id}"
# , (err, charge) =>
# if err
# console.error err
# return res.send 500
# grantPackToUser pack, bbUser, 'stripe', res
getPaymentParams = (pack) ->
cost = pack.cost
PAYMENTREQUEST_0_CUSTOM: pack.id
PAYMENTREQUEST_0_PAYMENTACTION: 'Sale'
PAYMENTREQUEST_0_AMT: cost
PAYMENTREQUEST_0_ITEMAMT: cost # Required for digital goods.
RETURNURL: "#{URL_PREFIX}/checkout/return"
CANCELURL: "#{URL_PREFIX}/closeme"
REQCONFIRMSHIPPING: 0
NOSHIPPING: 1
ALLOWNOTE: 0
# HDRIMG: "https://triggerrally.com/images/TODO-750x90.png" # TODO
# HDRBORDERCOLOR
# HDRBACKCOLOR
# PAYFLOWCOLOR
# EMAIL: req.user.user.email
# LANDINGPAGE # should test results of this
BUYEREMAILOPTINENABLE: 1
# BUYERUSERNAME # May be useful to increase user confidence?
# BUYERREGISTRATIONDATE
L_PAYMENTREQUEST_0_ITEMCATEGORY0: 'Digital'
L_PAYMENTREQUEST_0_ITEMURL0: pack.url
L_PAYMENTREQUEST_0_QTY0: 1
L_PAYMENTREQUEST_0_AMT0: cost
L_PAYMENTREQUEST_0_DESC0: pack.description
L_PAYMENTREQUEST_0_NAME0: pack.name
# paypalCheckout = (pack, req, res) ->
# params = getPaymentParams pack
# return res.send 404 unless params
# params.METHOD = 'SetExpressCheckout'
# log "Calling: #{JSON.stringify params}"
# ppec.request params, (err, nvp_res) ->
# if err
# console.error "#{params.METHOD} error: #{err}"
# return res.send 500
# log "#{params.METHOD} response: #{JSON.stringify nvp_res}"
# return res.send 500 if nvp_res.ACK isnt 'Success'
# TOKEN = nvp_res.TOKEN
# return res.send 500 unless TOKEN
# res.redirect ppec.redirectUrl TOKEN
failure = (res, code, msg) ->
console.error "PURCHASE FAILED: (#{code}) #{msg}"
res.send code
# app.get '/checkout/return', (req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# params =
# METHOD: 'GetExpressCheckoutDetails'
# TOKEN: req.query.token
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_GetExpressCheckoutDetails.bind null, bbUser, req, res
# paypalResponse_GetExpressCheckoutDetails = (bbUser, req, res, err, nvp_res) ->
# method = 'GetExpressCheckoutDetails'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# packId = nvp_res.PAYMENTREQUEST_0_CUSTOM
# pack = availablePacks[packId]
# # TODO: Check that price and description match what we expect?
# params = getPaymentParams pack
# return failure res, 500 unless params
# params.METHOD = 'DoExpressCheckoutPayment'
# params.TOKEN = <PASSWORD>vp_res.TOKEN
# params.PAYERID = nvp_res.PAYERID
# params.RETURNFMFDETAILS = 1
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_DoExpressCheckoutPayment.bind null, bbUser, req, res
# paypalResponse_DoExpressCheckoutPayment = (bbUser, req, res, err, nvp_res) ->
# method = 'DoExpressCheckoutPayment'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{JSON.stringify nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# grantPackToUser pack, bbUser,'paypal', res
#
#app.post('/login',
# passport.authenticate('local', { failureRedirect: '/login?status=failed' }),
# authenticationSuccessful
#);
#
server = http.createServer(app)
io = socketio.listen(server)
server.listen PORT
log "Server listening on port #{PORT} in #{app.settings.env} mode"
# TODO: Mirror http api over socket.io.
if NODE_ENV is 'production'
io.set 'log level', 1
else
io.set 'log level', 2
showNumberConnected = ->
clients = io.sockets.clients()
numConnected = clients.length
log "Connected sockets: #{numConnected}"
io.set 'authorization', (data, accept) ->
# http://www.danielbaulig.de/socket-ioexpress/
return accept('No cookie transmitted.', false) unless data.headers.cookie
data.cookie = cookie.parse(data.headers.cookie)
sid = data.cookie['connect.sid']
return accept('No session id found.', false) unless sid
data.sessionID = sid.substring(2, 26)
# save the session store to the data object
# (as required by the Session constructor)
data.sessionStore = sessionStore
sessionStore.get data.sessionID, (err, session) ->
return accept err, false if err
return accept 'No session', false unless session
# create a session object, passing data as request and our
# just acquired session data
Session = connect.middleware.session.Session
data.session = new Session(data, session)
# TODO: accept fast, before deserialization?
passport.deserializeUser data.session.passport.user, (err, userPassport) ->
return accept 'passport error: ' + err, false if err
user = data.session.user = userPassport.user
data.session.userPassport = userPassport
return accept null, true unless user
api.findUser data.session.user.pub_id, (bbUser) ->
return accept 'failed to load backbone user' unless bbUser
data.session.bbUser = bbUser
accept null, true
io.on 'connection', (socket) ->
showNumberConnected()
socket.on 'disconnect', ->
showNumberConnected()
dbCallback = (err) ->
console.error err if err
io.of('/drive').on 'connection', (socket) ->
session = socket.handshake.session
user = session.user
bbUser = session.bbUser
run = record_i_timeline = record_p_timeline = null
do resetRun = ->
run = null
record_i_timeline = []
record_p_timeline = []
completeRun = ->
return unless run
console.log "Finalizing records for run: #{run.pub_id}"
newValues =
"record_i.timeline": record_i_timeline
"record_p.timeline": record_p_timeline
newValues.times = run.times if run.times
newValues.time = run.time if run.time?
db.runs.update { _id: run._id }, $set: newValues, dbCallback
resetRun()
socket.on 'disconnect', completeRun
# TODO: Resume connections, or notify user if recording has stopped.
socket.on 'start', (data) ->
completeRun()
resetRun()
car = track = null
done = _.after 2, ->
return unless car and track
# This is why I should have a model layer.
db.tracks.update { _id: track._id }, { $inc: { count_drive: 1 } }, dbCallback
# return # Disable run recording
return unless user
newRun =
car: car._id
pub_id: makePubId()
record_i: { keyMap: data.keyMap_i, timeline: [] }
record_p: { keyMap: data.keyMap_p, timeline: [] }
status: 'Unverified'
track: track._id
user: user._id
console.log "Started run: #{newRun.pub_id}"
db.runs.insert newRun, (err) ->
return console.error 'Run insert error: ' + err if err
return if run # Another run was already started. Discard this one.
run = newRun
db.cars.findOne pub_id: data.car, (err, doc) -> car = doc; done()
db.tracks.findOne pub_id: data.track, (err, doc) -> track = doc; done()
socket.on 'record_i', (data) ->
Array::push.apply record_i_timeline, data.samples
socket.on 'record_p', (data) ->
Array::push.apply record_p_timeline, data.samples
socket.on 'times', (data) ->
# TODO: Also buffer times in the event that the run isn't ready yet.
return unless run
# TODO: Verification!
run.times = data.times
run.time = data.times[data.times.length - 1]
awardCredit = ->
credits = bbUser.credits + 1
bbUser.save { credits }
# db.users.update { _id: user._id }, { $set: { credits: bbUser.credits } }, dbCallback
socket.emit 'updateuser',
id: user.pub_id
credits: credits
return
# awardCreditThrottled = _.throttle awardCredit, 1500, leading: no
lastCall = Date.now()
awardCreditThrottled = ->
now = Date.now()
elapsed = (now - lastCall) / 1000
lastCall = now
k = 4
k2 = k * k
x2 = elapsed * elapsed
cdf = x2 / (x2 + k2)
# cdf = Math.min 1, Math.pow(elapsed / 5000, 2)
if Math.random() < cdf
setTimeout awardCredit, 800
socket.on 'advance', (data) ->
return unless user
return unless data.cp > 0
awardCreditThrottled()
| true | "use strict"
_ = require 'underscore'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
connect = require 'connect'
compression = require 'compression'
cookie = require 'cookie'
express = require 'express'
expressSession = require 'express-session'
http = require 'http'
logger = require 'morgan'
methodOverride = require 'method-override'
mongoose = require 'mongoose'
mongoskin = require 'mongoskin'
session_mongoose = require 'session-mongoose'
socketio = require 'socket.io'
stylus = require 'stylus'
passport = require 'passport'
FacebookStrategy = require('passport-facebook').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
TwitterStrategy = require('passport-twitter').Strategy
LocalStrategy = require('passport-local').Strategy
# This has to come first to set up Mongoose schemas.
objects = require './objects'
api = require './api'
config = require './config'
{ makePubId } = require './objects/common'
routes = require './routes'
# stripe = require('stripe')(config.stripe.API_KEY)
getIsodate = -> new Date().toISOString()
logger.format 'isodate', (req, res) -> getIsodate()
log = (msg) ->
isodate = getIsodate()
console.log "[#{isodate}] #{msg}"
mongoose.set 'debug', true
mongoose.connection.on "error", (err) ->
log "Could not connect to mongo server!"
log err.message
SessionStore = session_mongoose(connect)
sessionStore = new SessionStore(
url: "mongodb://#{config.MONGODB_HOST}/sessions"
# Expiration check worker run interval in millisec (default: 60000)
interval: 120000
)
User = mongoose.model('User')
UserPassport = mongoose.model('UserPassport')
Car = mongoose.model('Car')
Track = mongoose.model('Track')
Run = mongoose.model('Run')
mongoose.connect config.MONGOOSE_URL
# Alternate DB connection
dbUrl = "#{config.db.host}:#{config.db.port}/#{config.db.name}?auto_reconnect"
db = mongoskin.db dbUrl, { safe: true }
db.bind 'cars'
db.bind 'runs'
db.bind 'tracks'
db.bind 'users'
log "Base directory: #{__dirname}"
app = module.exports = express()
DOMAIN = process.env.DOMAIN or 'triggerrally.com'
NODE_ENV = process.env.NODE_ENV
PORT = process.env.PORT or 80
PROTOCOL = process.env.PROTOCOL or 'http'
PUBLIC_PORT = if NODE_ENV is 'production' then 80 else PORT
PORT_SUFFIX = if PUBLIC_PORT is 80 then "" else ":#{PUBLIC_PORT}"
URL_PREFIX = "#{PROTOCOL}://#{DOMAIN}#{PORT_SUFFIX}"
authenticateUser = (profile, done) ->
passport_id = profile.identifier or (profile.provider + profile.id)
UserPassport
.findOne(passport_id: passport_id)
.populate('user')
.exec (error, userPassport) ->
return done error if error
user = userPassport?.user
return done null, userPassport if user
userPassport ?= new UserPassport()
# Create new user from passport profile.
user = new User
name: profile.displayName or profile.username
# user.email = profile.emails[0].value if profile.emails?[0]
user.save (error) ->
return done error if error
userPassport.profile = profile
userPassport.user = user._id
userPassport.save (error) ->
done error, userPassport
#res.redirect('/user/' + user.pub_id + '/edit');
authenticationSuccessfulAPI = (req, res) ->
throw new Error('authenticationSuccessfulAPI: req.user array') if Array.isArray req.user
res.redirect '/closeme'
authenticationSuccessful = (req, res) ->
throw new Error('authenticationSuccessful: req.user array') if Array.isArray req.user
res.redirect '/'
#passport.use new LocalStrategy(
# usernameField: 'email'
# passwordField: 'PI:PASSWORD:<PASSWORD>END_PI'
#, (email, password, done) ->
# User.findOne
# _email: email
# , (err, user) ->
# return done(err) if err
# return done(null, false) unless user
# return done(null, false) unless user.authenticate(password)
# done null, user
#)
# for i in ["", "/v1"]
# passport.use "facebook#{i}", new FacebookStrategy(
# clientID: config.FACEBOOK_APP_ID
# clientSecret: config.FACEBOOK_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/facebook/callback"
# , (accessToken, refreshToken, profile, done) ->
# profile.auth = { accessToken, refreshToken }
# authenticateUser profile, done
# )
# passport.use "google#{i}", new GoogleStrategy(
# clientID: config.GOOGLE_CLIENT_ID
# clientSecret: config.GOOGLE_CLIENT_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/google/callback"
# , (token, refreshToken, profile, done) ->
# profile.auth = { token, refreshToken }
# authenticateUser profile, done
# )
# passport.use "twitter#{i}", new TwitterStrategy(
# consumerKey: config.TWITTER_APP_KEY
# consumerSecret: config.TWITTER_APP_SECRET
# callbackURL: "#{URL_PREFIX}#{i}/auth/twitter/callback"
# , (token, tokenSecret, profile, done) ->
# profile.auth = { token, tokenSecret }
# authenticateUser profile, done
# )
passport.serializeUser (userPassport, done) ->
done null, userPassport.id
passport.deserializeUser (id, done) ->
UserPassport
.findOne(_id: id)
.populate('user')
.exec (error, userPassport) ->
done error, userPassport
app.use logger('[:isodate] :status :response-time ms :res[content-length] :method :url :referrer', format: '[:isodate] :status :response-time ms :res[content-length] :method :url :referrer')
app.disable 'x-powered-by'
app.use compression()
app.use stylus.middleware(
src: __dirname + '/stylus'
dest: __dirname + '/public'
)
app.use express.static(__dirname + '/public')
app.set 'views', __dirname + '/views'
app.set 'view engine', 'jade'
app.use (req, res, next) ->
req.rawBody = ''
# req.setEncoding('utf8')
req.on 'data', (chunk) -> req.rawBody += chunk
next()
app.use bodyParser.urlencoded({
extended: true
})
app.use bodyParser.json();
app.use cookieParser(config.SESSION_SECRET)
app.use expressSession(
secret: 'asecret'
saveUninitialized: true
resave: true
cookie:
maxAge: 4 * 7 * 24 * 60 * 60 * 1000
store: sessionStore
)
app.use passport.initialize()
app.use passport.session()
app.use methodOverride()
app.use (req, res, next) ->
# Enable Chrome Frame if installed.
res.setHeader 'X-UA-Compatible', 'chrome=1'
next()
app.use routes.defaultParams
#
#// We can delay certain resources for debugging purposes.
#app.use(function(req, res, next) {
# var delay = 0;
# if (req.path.match('nice.png')) delay = 3000;
# if (req.path.match('heightdetail1.jpg')) delay = 6000;
# setTimeout(function() {
# next();
# }, delay);
#});
#
app.use app.router
# Send any path not otherwise handled to the unified app.
# TODO: Make the app show a 404 as appropriate.
app.use routes.unified
if app.get('env') is 'development'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
call_stack: err.stack?.split('\n')
if app.get('env') is 'production'
app.use (err, req, res, next) ->
console.error err
res.json 500,
error: "Internal Server Error"
app.get '/v1/auth/facebook', passport.authenticate('facebook/v1')
app.get '/v1/auth/facebook/callback', passport.authenticate('facebook/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/google', passport.authenticate('google/v1', { scope : ['profile', 'email'] })
app.get '/v1/auth/google/callback', passport.authenticate('google/v1',
failureRedirect: '/login?popup=1'
), authenticationSuccessfulAPI
app.get '/v1/auth/twitter', passport.authenticate('twitter/v1')
app.get '/v1/auth/twitter/callback', passport.authenticate('twitter/v1'), authenticationSuccessfulAPI
app.get '/v1/auth/logout', (req, res) ->
req.logOut()
res.json status: "ok"
api.setup app, passport
app.get '/auth/facebook', passport.authenticate('facebook')
app.get '/auth/facebook/callback', passport.authenticate('facebook',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/google', passport.authenticate('google', { scope : ['profile', 'email'] })
app.get '/auth/google/callback', passport.authenticate('google',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/auth/twitter', passport.authenticate('twitter')
app.get '/auth/twitter/callback', passport.authenticate('twitter',
failureRedirect: '/login'
), authenticationSuccessful
app.get '/logout', (req, res) ->
req.logOut()
res.redirect '/'
app.get '/autologin', (req, res, next) ->
code = req.query.code
passport_id = config.autologin[code]
return res.send 401 unless passport_id
UserPassport
.findOne({ passport_id })
.populate('user')
.exec (error, userPassport) ->
return next error if error
return res.send 500 unless userPassport
req.login userPassport, (error) ->
return next error if error
res.redirect '/'
app.get '/closeme', routes.closeme
# Backward compatibility.
app.get '/drive', (req, res) ->
res.redirect '/', 301
app.get '/x/Preview/Arbusu/drive', (req, res) ->
# req.params.idTrack = 'Preview'
# req.params.idCar = 'Arbusu'
# loadUrlTrack req, res, ->
# loadUrlCar req, res, ->
# routes.drive req, res
# Preview is broken, so just redirect to home.
res.redirect '/', 301
app.get '/x/:idTrack/:idCar/drive', (req, res) ->
res.redirect "/track/#{req.params.idTrack}/drive", 301
# app.get '/track/:idTrack', (req, res) ->
# res.redirect "/track/#{req.params.idTrack}/drive", 301
app.get '/login', routes.login
# ppec = require './paypal/expresscheckout'
qs = require 'querystring'
availablePacks =
ignition:
cost: '750'
currency: 'credits'
# name: 'Trigger Rally: Icarus Ignition'
# description: 'A new car for Trigger Rally.'
# url: 'https://triggerrally.com/ignition'
products: [ 'ignition' ]
mayhem:
cost: '400'
currency: 'credits'
# name: 'Trigger Rally: Mayhem Monster Truck'
# description: 'The Mayhem Monster Truck for Trigger Rally.'
# url: 'https://triggerrally.com/mayhem'
products: [ 'mayhem' ]
# full:
# name: 'Trigger Rally: Full Game'
# description: 'Access all tracks, the Arbusu, Mayhem and Icarus cars, and more!'
# url: 'https://triggerrally.com/purchase'
# products: [ 'packa', 'ignition', 'mayhem', 'paid' ]
addCredits = (credits, cost) ->
availablePacks["credits#{credits}"] =
name: "#{credits} Credits - Trigger Rally"
description: "A package of #{credits} credits for your Trigger Rally account."
url: "https://triggerrally.com/"
cost: cost
credits: credits
currency: 'USD'
addCredits '80', '0.99'
addCredits '200', '1.99'
addCredits '550', '4.99'
addCredits '1200', '9.99'
addCredits '2000', '14.99'
# # addCredits '80', '0.29'
# addCredits '200', '0.59'
# addCredits '550', '1.49'
# addCredits '1200', '2.99'
# addCredits '2000', '4.49'
# addCredits '200', '0.59'
# addCredits '400', '1.15'
# addCredits '750', '1.95'
# addCredits '1150', '2.95'
# addCredits '2000', '4.49'
# Add an 'id' field matching the pack key.
pack.id = id for own id, pack of availablePacks
grantPackToUser = (pack, bbUser, method, res) ->
saveData = {}
if pack.products
saveData.products = _.union (bbUser.products ? []), pack.products
if pack.credits
saveData.credits = bbUser.credits + parseInt(pack.credits)
saveData.pay_history = bbUser.pay_history ? []
saveData.pay_history.push [ Date.now(), method, pack.currency, pack.cost, pack.id ]
console.log saveData
bbUser.save saveData,
success: ->
log "PURCHASE COMPLETE for user #{bbUser.id} using #{method}"
res.redirect '/closeme'
error: ->
log "user: #{JSON.stringify bbUser}"
failure res, 500, "COMPLETE BUT FAILED TO RECORD - VERY BAD!!"
app.get '/checkout', (req, res) ->
return res.send 401 unless req.user
packId = req.query.pack
pack = availablePacks[packId]
return res.send 404 unless pack
if pack.products
# Check that user doesn't already have this pack. Prevents accidental double-purchase.
newProducts = _.difference pack.products, req.user.user.products
return res.send 409 if _.isEmpty newProducts
switch pack.currency
# Real currency payments are disabled.
# when 'USD'
# switch req.query.method
# when 'paypal' then paypalCheckout pack, req, res
# when 'stripe' then stripeCheckout pack, req, res
# else res.send 400
when 'credits' then creditsCheckout pack, req, res
else res.send 400
# freeCheckout = (pack, req, res) ->
# return res.send 402 unless pack.cost in [ 0, '0' ]
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure 500 unless bbUser
# products = bbUser.products ? []
# products = _.union products, pack.products
# bbUser.save { products },
# success: ->
# res.redirect '/closeme'
# error: ->
# res.send 500
creditsCheckout = (pack, req, res) ->
return failure res, 401 unless req.user
api.findUser req.user.user.pub_id, (bbUser) ->
return failure 500 unless bbUser
cost = parseInt(pack.cost)
return res.send 402 unless bbUser.credits >= cost
log "user #{bbUser.id} purchased #{pack.id} for #{cost} credits"
products = bbUser.products ? []
products = _.union products, pack.products
bbUser.save { products, credits: bbUser.credits - cost },
success: ->
log "saved user #{JSON.stringify bbUser}"
if req.query.popup
res.redirect '/closeme'
else
res.send 200
error: ->
res.send 500
# stripeCheckout = (pack, req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# charge = stripe.charges.create
# amount: Math.round(pack.cost * 100) # amount in cents
# currency: "usd"
# card: req.query.token
# description: "Charge for user ID #{bbUser.id}"
# , (err, charge) =>
# if err
# console.error err
# return res.send 500
# grantPackToUser pack, bbUser, 'stripe', res
getPaymentParams = (pack) ->
cost = pack.cost
PAYMENTREQUEST_0_CUSTOM: pack.id
PAYMENTREQUEST_0_PAYMENTACTION: 'Sale'
PAYMENTREQUEST_0_AMT: cost
PAYMENTREQUEST_0_ITEMAMT: cost # Required for digital goods.
RETURNURL: "#{URL_PREFIX}/checkout/return"
CANCELURL: "#{URL_PREFIX}/closeme"
REQCONFIRMSHIPPING: 0
NOSHIPPING: 1
ALLOWNOTE: 0
# HDRIMG: "https://triggerrally.com/images/TODO-750x90.png" # TODO
# HDRBORDERCOLOR
# HDRBACKCOLOR
# PAYFLOWCOLOR
# EMAIL: req.user.user.email
# LANDINGPAGE # should test results of this
BUYEREMAILOPTINENABLE: 1
# BUYERUSERNAME # May be useful to increase user confidence?
# BUYERREGISTRATIONDATE
L_PAYMENTREQUEST_0_ITEMCATEGORY0: 'Digital'
L_PAYMENTREQUEST_0_ITEMURL0: pack.url
L_PAYMENTREQUEST_0_QTY0: 1
L_PAYMENTREQUEST_0_AMT0: cost
L_PAYMENTREQUEST_0_DESC0: pack.description
L_PAYMENTREQUEST_0_NAME0: pack.name
# paypalCheckout = (pack, req, res) ->
# params = getPaymentParams pack
# return res.send 404 unless params
# params.METHOD = 'SetExpressCheckout'
# log "Calling: #{JSON.stringify params}"
# ppec.request params, (err, nvp_res) ->
# if err
# console.error "#{params.METHOD} error: #{err}"
# return res.send 500
# log "#{params.METHOD} response: #{JSON.stringify nvp_res}"
# return res.send 500 if nvp_res.ACK isnt 'Success'
# TOKEN = nvp_res.TOKEN
# return res.send 500 unless TOKEN
# res.redirect ppec.redirectUrl TOKEN
failure = (res, code, msg) ->
console.error "PURCHASE FAILED: (#{code}) #{msg}"
res.send code
# app.get '/checkout/return', (req, res) ->
# return failure res, 401 unless req.user
# api.findUser req.user.user.pub_id, (bbUser) ->
# return failure res, 500 unless bbUser
# params =
# METHOD: 'GetExpressCheckoutDetails'
# TOKEN: req.query.token
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_GetExpressCheckoutDetails.bind null, bbUser, req, res
# paypalResponse_GetExpressCheckoutDetails = (bbUser, req, res, err, nvp_res) ->
# method = 'GetExpressCheckoutDetails'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# packId = nvp_res.PAYMENTREQUEST_0_CUSTOM
# pack = availablePacks[packId]
# # TODO: Check that price and description match what we expect?
# params = getPaymentParams pack
# return failure res, 500 unless params
# params.METHOD = 'DoExpressCheckoutPayment'
# params.TOKEN = PI:PASSWORD:<PASSWORD>END_PIvp_res.TOKEN
# params.PAYERID = nvp_res.PAYERID
# params.RETURNFMFDETAILS = 1
# log "Calling: #{JSON.stringify params}"
# ppec.request params, paypalResponse_DoExpressCheckoutPayment.bind null, bbUser, req, res
# paypalResponse_DoExpressCheckoutPayment = (bbUser, req, res, err, nvp_res) ->
# method = 'DoExpressCheckoutPayment'
# return failure res, 500, "#{method} error: #{err}" if err
# log "#{method} response: #{JSON.stringify nvp_res}"
# return failure res, 500 if nvp_res.ACK isnt 'Success'
# grantPackToUser pack, bbUser,'paypal', res
#
#app.post('/login',
# passport.authenticate('local', { failureRedirect: '/login?status=failed' }),
# authenticationSuccessful
#);
#
server = http.createServer(app)
io = socketio.listen(server)
server.listen PORT
log "Server listening on port #{PORT} in #{app.settings.env} mode"
# TODO: Mirror http api over socket.io.
if NODE_ENV is 'production'
io.set 'log level', 1
else
io.set 'log level', 2
showNumberConnected = ->
clients = io.sockets.clients()
numConnected = clients.length
log "Connected sockets: #{numConnected}"
io.set 'authorization', (data, accept) ->
# http://www.danielbaulig.de/socket-ioexpress/
return accept('No cookie transmitted.', false) unless data.headers.cookie
data.cookie = cookie.parse(data.headers.cookie)
sid = data.cookie['connect.sid']
return accept('No session id found.', false) unless sid
data.sessionID = sid.substring(2, 26)
# save the session store to the data object
# (as required by the Session constructor)
data.sessionStore = sessionStore
sessionStore.get data.sessionID, (err, session) ->
return accept err, false if err
return accept 'No session', false unless session
# create a session object, passing data as request and our
# just acquired session data
Session = connect.middleware.session.Session
data.session = new Session(data, session)
# TODO: accept fast, before deserialization?
passport.deserializeUser data.session.passport.user, (err, userPassport) ->
return accept 'passport error: ' + err, false if err
user = data.session.user = userPassport.user
data.session.userPassport = userPassport
return accept null, true unless user
api.findUser data.session.user.pub_id, (bbUser) ->
return accept 'failed to load backbone user' unless bbUser
data.session.bbUser = bbUser
accept null, true
io.on 'connection', (socket) ->
showNumberConnected()
socket.on 'disconnect', ->
showNumberConnected()
dbCallback = (err) ->
console.error err if err
io.of('/drive').on 'connection', (socket) ->
session = socket.handshake.session
user = session.user
bbUser = session.bbUser
run = record_i_timeline = record_p_timeline = null
do resetRun = ->
run = null
record_i_timeline = []
record_p_timeline = []
completeRun = ->
return unless run
console.log "Finalizing records for run: #{run.pub_id}"
newValues =
"record_i.timeline": record_i_timeline
"record_p.timeline": record_p_timeline
newValues.times = run.times if run.times
newValues.time = run.time if run.time?
db.runs.update { _id: run._id }, $set: newValues, dbCallback
resetRun()
socket.on 'disconnect', completeRun
# TODO: Resume connections, or notify user if recording has stopped.
socket.on 'start', (data) ->
completeRun()
resetRun()
car = track = null
done = _.after 2, ->
return unless car and track
# This is why I should have a model layer.
db.tracks.update { _id: track._id }, { $inc: { count_drive: 1 } }, dbCallback
# return # Disable run recording
return unless user
newRun =
car: car._id
pub_id: makePubId()
record_i: { keyMap: data.keyMap_i, timeline: [] }
record_p: { keyMap: data.keyMap_p, timeline: [] }
status: 'Unverified'
track: track._id
user: user._id
console.log "Started run: #{newRun.pub_id}"
db.runs.insert newRun, (err) ->
return console.error 'Run insert error: ' + err if err
return if run # Another run was already started. Discard this one.
run = newRun
db.cars.findOne pub_id: data.car, (err, doc) -> car = doc; done()
db.tracks.findOne pub_id: data.track, (err, doc) -> track = doc; done()
socket.on 'record_i', (data) ->
Array::push.apply record_i_timeline, data.samples
socket.on 'record_p', (data) ->
Array::push.apply record_p_timeline, data.samples
socket.on 'times', (data) ->
# TODO: Also buffer times in the event that the run isn't ready yet.
return unless run
# TODO: Verification!
run.times = data.times
run.time = data.times[data.times.length - 1]
awardCredit = ->
credits = bbUser.credits + 1
bbUser.save { credits }
# db.users.update { _id: user._id }, { $set: { credits: bbUser.credits } }, dbCallback
socket.emit 'updateuser',
id: user.pub_id
credits: credits
return
# awardCreditThrottled = _.throttle awardCredit, 1500, leading: no
lastCall = Date.now()
awardCreditThrottled = ->
now = Date.now()
elapsed = (now - lastCall) / 1000
lastCall = now
k = 4
k2 = k * k
x2 = elapsed * elapsed
cdf = x2 / (x2 + k2)
# cdf = Math.min 1, Math.pow(elapsed / 5000, 2)
if Math.random() < cdf
setTimeout awardCredit, 800
socket.on 'advance', (data) ->
return unless user
return unless data.cp > 0
awardCreditThrottled()
|
[
{
"context": "ion only\n * <get name> == Aiden => Your name is Aiden!\n\n + recursion\n - {@recursion}\n\n + impos",
"end": 2826,
"score": 0.9992562532424927,
"start": 2821,
"tag": "NAME",
"value": "Aiden"
}
] | test/test-rivescript.coffee | cemali/myChatBots | 1 | TestCase = require("./test-base")
################################################################################
# RiveScript API Tests
################################################################################
exports.test_load_directory_recursively = (test) ->
bot = new TestCase(test, """
+ *
- No, this failed.
""")
bot.rs.loadDirectory('./test/fixtures', ->
bot.rs.sortReplies()
bot.reply("Did the root directory rivescript load?", "Yes, the root directory rivescript loaded.")
bot.reply("Did the recursive directory rivescript load?", "Yes, the recursive directory rivescript loaded.")
test.done()
, ->
test.equal(true, false) # Throw error
)
exports.test_default_error_messages = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is Aiden!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""")
DEF_NOT_FOUND = "ERR: No Reply Found"
DEF_NOT_MATCH = "ERR: No Reply Matched"
DEF_NO_OBJECT = "[ERR: Object Not Found]"
DEF_RECURSION = "ERR: Deep Recursion Detected"
bot.reply("condition only", DEF_NOT_FOUND)
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
# Set some error handlers manually, one at a time.
bot.rs.errors.replyNotFound = "I didn't find a reply!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.replyNotMatched = "I don't even know what to say to that!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.objectNotFound = "I can't handle this object!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.deepRecursion = "I'm going too far down the rabbit hole."
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_error_constructor_configuration = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is Aiden!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""", {
errors:
replyNotFound: "I didn't find a reply!"
replyNotMatched: "I don't even know what to say to that!"
objectNotFound: "I can't handle this object!"
deepRecursion: "I'm going too far down the rabbit hole."
})
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_redirect_with_undefined_input = (test) ->
# <@> test
bot = new TestCase(test, """
+ test
- {topic=test}{@hi}
> topic test
+ hi
- hello
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
bot.reply("test", "hello")
bot.reply("?", "Wildcard \"\"!")
# empty variable test
bot = new TestCase(test, """
! var globaltest = set test name test
+ test
- {topic=test}{@<get test_name>}
+ test without redirect
- {topic=test}<get test_name>
+ set test name *
- <set test_name=<star>>{@test}
+ get global test
@ <bot globaltest>
+ get bad global test
@ <bot badglobaltest>
> topic test
+ test
- hello <get test_name>!{topic=random}
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
# No variable set, should go through wildcard
bot.reply("test", "Wildcard \"undefined\"!")
bot.reply("test without redirect", "undefined")
# Variable set, should respond with text
bot.reply("set test name test", "hello test!")
# Different variable set, should get wildcard
bot.reply("set test name newtest", "Wildcard \"newtest\"!")
# Test redirects using bot variable.
bot.reply("get global test", "hello test!")
bot.reply("get bad global test", "Wildcard \"undefined\"!")
test.done()
exports.test_initialmatch = (test) ->
bot = new TestCase(test, """
! array thanks = thanks|thank you
+ (hello|ni hao)
@ hi
+ hi
- Oh hi. {@phrase}
+ phrase
- How are you?
+ good
- That's great.
+ @thanks{weight=2}
- No problem. {@phrase}
+ *
- I don't know.
""")
bot.reply("Hello?", "Oh hi. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "(hello|ni hao)")
bot.reply("Good!", "That's great.")
bot.uservar("__lastmatch__", "good")
bot.uservar("__initialmatch__", "good")
bot.reply("Thanks!", "No problem. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "@thanks{weight=2}")
test.done()
exports.test_valid_history = (test) ->
bot = new TestCase(test, """
+ hello
- Hi!
+ bye
- Goodbye!
""")
bot.reply("Hello", "Hi!")
# Intentionally set a bad history.
bot.rs.setUservar(bot.username, "__history__", {"input": ["Hello"]})
bot.reply("Bye!", "Goodbye!")
test.done()
| 171661 | TestCase = require("./test-base")
################################################################################
# RiveScript API Tests
################################################################################
exports.test_load_directory_recursively = (test) ->
bot = new TestCase(test, """
+ *
- No, this failed.
""")
bot.rs.loadDirectory('./test/fixtures', ->
bot.rs.sortReplies()
bot.reply("Did the root directory rivescript load?", "Yes, the root directory rivescript loaded.")
bot.reply("Did the recursive directory rivescript load?", "Yes, the recursive directory rivescript loaded.")
test.done()
, ->
test.equal(true, false) # Throw error
)
exports.test_default_error_messages = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is Aiden!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""")
DEF_NOT_FOUND = "ERR: No Reply Found"
DEF_NOT_MATCH = "ERR: No Reply Matched"
DEF_NO_OBJECT = "[ERR: Object Not Found]"
DEF_RECURSION = "ERR: Deep Recursion Detected"
bot.reply("condition only", DEF_NOT_FOUND)
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
# Set some error handlers manually, one at a time.
bot.rs.errors.replyNotFound = "I didn't find a reply!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.replyNotMatched = "I don't even know what to say to that!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.objectNotFound = "I can't handle this object!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.deepRecursion = "I'm going too far down the rabbit hole."
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_error_constructor_configuration = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is <NAME>!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""", {
errors:
replyNotFound: "I didn't find a reply!"
replyNotMatched: "I don't even know what to say to that!"
objectNotFound: "I can't handle this object!"
deepRecursion: "I'm going too far down the rabbit hole."
})
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_redirect_with_undefined_input = (test) ->
# <@> test
bot = new TestCase(test, """
+ test
- {topic=test}{@hi}
> topic test
+ hi
- hello
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
bot.reply("test", "hello")
bot.reply("?", "Wildcard \"\"!")
# empty variable test
bot = new TestCase(test, """
! var globaltest = set test name test
+ test
- {topic=test}{@<get test_name>}
+ test without redirect
- {topic=test}<get test_name>
+ set test name *
- <set test_name=<star>>{@test}
+ get global test
@ <bot globaltest>
+ get bad global test
@ <bot badglobaltest>
> topic test
+ test
- hello <get test_name>!{topic=random}
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
# No variable set, should go through wildcard
bot.reply("test", "Wildcard \"undefined\"!")
bot.reply("test without redirect", "undefined")
# Variable set, should respond with text
bot.reply("set test name test", "hello test!")
# Different variable set, should get wildcard
bot.reply("set test name newtest", "Wildcard \"newtest\"!")
# Test redirects using bot variable.
bot.reply("get global test", "hello test!")
bot.reply("get bad global test", "Wildcard \"undefined\"!")
test.done()
exports.test_initialmatch = (test) ->
bot = new TestCase(test, """
! array thanks = thanks|thank you
+ (hello|ni hao)
@ hi
+ hi
- Oh hi. {@phrase}
+ phrase
- How are you?
+ good
- That's great.
+ @thanks{weight=2}
- No problem. {@phrase}
+ *
- I don't know.
""")
bot.reply("Hello?", "Oh hi. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "(hello|ni hao)")
bot.reply("Good!", "That's great.")
bot.uservar("__lastmatch__", "good")
bot.uservar("__initialmatch__", "good")
bot.reply("Thanks!", "No problem. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "@thanks{weight=2}")
test.done()
exports.test_valid_history = (test) ->
bot = new TestCase(test, """
+ hello
- Hi!
+ bye
- Goodbye!
""")
bot.reply("Hello", "Hi!")
# Intentionally set a bad history.
bot.rs.setUservar(bot.username, "__history__", {"input": ["Hello"]})
bot.reply("Bye!", "Goodbye!")
test.done()
| true | TestCase = require("./test-base")
################################################################################
# RiveScript API Tests
################################################################################
exports.test_load_directory_recursively = (test) ->
bot = new TestCase(test, """
+ *
- No, this failed.
""")
bot.rs.loadDirectory('./test/fixtures', ->
bot.rs.sortReplies()
bot.reply("Did the root directory rivescript load?", "Yes, the root directory rivescript loaded.")
bot.reply("Did the recursive directory rivescript load?", "Yes, the recursive directory rivescript loaded.")
test.done()
, ->
test.equal(true, false) # Throw error
)
exports.test_default_error_messages = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is Aiden!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""")
DEF_NOT_FOUND = "ERR: No Reply Found"
DEF_NOT_MATCH = "ERR: No Reply Matched"
DEF_NO_OBJECT = "[ERR: Object Not Found]"
DEF_RECURSION = "ERR: Deep Recursion Detected"
bot.reply("condition only", DEF_NOT_FOUND)
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
# Set some error handlers manually, one at a time.
bot.rs.errors.replyNotFound = "I didn't find a reply!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", DEF_NOT_MATCH)
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.replyNotMatched = "I don't even know what to say to that!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: #{DEF_NO_OBJECT}")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.objectNotFound = "I can't handle this object!"
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", DEF_RECURSION)
bot.rs.errors.deepRecursion = "I'm going too far down the rabbit hole."
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_error_constructor_configuration = (test) ->
bot = new TestCase(test, """
+ condition only
* <get name> == Aiden => Your name is PI:NAME:<NAME>END_PI!
+ recursion
- {@recursion}
+ impossible object
- Here we go: <call>unhandled</call>
> object unhandled rust
return "Hello world"
< object
""", {
errors:
replyNotFound: "I didn't find a reply!"
replyNotMatched: "I don't even know what to say to that!"
objectNotFound: "I can't handle this object!"
deepRecursion: "I'm going too far down the rabbit hole."
})
bot.reply("condition only", "I didn't find a reply!")
bot.reply("hello bot", "I don't even know what to say to that!")
bot.reply("impossible object", "Here we go: I can't handle this object!")
bot.reply("recursion", "I'm going too far down the rabbit hole.")
test.done()
exports.test_redirect_with_undefined_input = (test) ->
# <@> test
bot = new TestCase(test, """
+ test
- {topic=test}{@hi}
> topic test
+ hi
- hello
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
bot.reply("test", "hello")
bot.reply("?", "Wildcard \"\"!")
# empty variable test
bot = new TestCase(test, """
! var globaltest = set test name test
+ test
- {topic=test}{@<get test_name>}
+ test without redirect
- {topic=test}<get test_name>
+ set test name *
- <set test_name=<star>>{@test}
+ get global test
@ <bot globaltest>
+ get bad global test
@ <bot badglobaltest>
> topic test
+ test
- hello <get test_name>!{topic=random}
+ *
- {topic=random}<@>
< topic
+ *
- Wildcard "<star>"!
""")
# No variable set, should go through wildcard
bot.reply("test", "Wildcard \"undefined\"!")
bot.reply("test without redirect", "undefined")
# Variable set, should respond with text
bot.reply("set test name test", "hello test!")
# Different variable set, should get wildcard
bot.reply("set test name newtest", "Wildcard \"newtest\"!")
# Test redirects using bot variable.
bot.reply("get global test", "hello test!")
bot.reply("get bad global test", "Wildcard \"undefined\"!")
test.done()
exports.test_initialmatch = (test) ->
bot = new TestCase(test, """
! array thanks = thanks|thank you
+ (hello|ni hao)
@ hi
+ hi
- Oh hi. {@phrase}
+ phrase
- How are you?
+ good
- That's great.
+ @thanks{weight=2}
- No problem. {@phrase}
+ *
- I don't know.
""")
bot.reply("Hello?", "Oh hi. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "(hello|ni hao)")
bot.reply("Good!", "That's great.")
bot.uservar("__lastmatch__", "good")
bot.uservar("__initialmatch__", "good")
bot.reply("Thanks!", "No problem. How are you?")
bot.uservar("__lastmatch__", "phrase")
bot.uservar("__initialmatch__", "@thanks{weight=2}")
test.done()
exports.test_valid_history = (test) ->
bot = new TestCase(test, """
+ hello
- Hi!
+ bye
- Goodbye!
""")
bot.reply("Hello", "Hi!")
# Intentionally set a bad history.
bot.rs.setUservar(bot.username, "__history__", {"input": ["Hello"]})
bot.reply("Bye!", "Goodbye!")
test.done()
|
[
{
"context": "ocess.env?.AWS_AKI?\n\t_CONFIG.aws.secretAccessKey = process.env.AWS_SAK if process.env?.AWS_SAK?\n\t_CONFIG.aws.region = pr",
"end": 378,
"score": 0.8504992723464966,
"start": 359,
"tag": "KEY",
"value": "process.env.AWS_SAK"
}
] | test/tests/_incl/crud_test.coffee | mpneuried/simple-dynamo | 1 | module.exports = ( testTitle, _basicTable, _overwriteTable, _logTable1, _logTable2, _setTable )->
# read configuration
_CONFIG = require "../../config.js"
_ = require("underscore")
should = require('should')
# read replace AWS keys from environment
_CONFIG.aws.accessKeyId = process.env.AWS_AKI if process.env?.AWS_AKI?
_CONFIG.aws.secretAccessKey = process.env.AWS_SAK if process.env?.AWS_SAK?
_CONFIG.aws.region = process.env.AWS_REGION if process.env?.AWS_REGION?
_CONFIG.aws.tablePrefix = process.env.AWS_TABLEPREFIX if process.env?.AWS_TABLEPREFIX?
# import module to test
SimpleDynamo = require "../../../lib/dynamo/"
_utils = SimpleDynamo.utils
_DATA = require "../../testdata.js"
dynDB = null
tableG = null
describe "----- #{ testTitle } TESTS -----", ->
before ( done )->
done()
describe 'Initialization', ->
it 'init manager', ( done )->
dynDB = new SimpleDynamo( _CONFIG.aws, _CONFIG.tables )
done()
return
it 'pre connect', ( done )->
dynDB.fetched.should.be.false
dynDB.connected.should.be.false
done()
return
it 'init table objects', ( done )->
dynDB.connect ( err )->
throw err if err
tableG = dynDB.get( _basicTable )
should.exist( tableG )
done()
return
return
it 'post connect', ( done )->
dynDB.fetched.should.be.true
done()
return
return
describe "#{ testTitle } CRUD Tests", ->
_C = _CONFIG.tables[ _basicTable ]
_D = _DATA[ _basicTable ]
_G = {}
_ItemCount = 0
it "list existing items", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
_ItemCount = items.length
done()
return
return
it "create an item", ( done )->
tableG.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert1" ] = item
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "try to get the item and check the content", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "create a second item", ( done )->
tableG.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
should.exist( item.additional )
_ItemCount++
_G[ "insert2" ] = item
item.name.should.equal( _D[ "insert2" ].name )
item.email.should.equal( _D[ "insert2" ].email )
item.age.should.equal( _D[ "insert2" ].age )
item.boolean.should.equal( _D[ "insert2" ].boolean )
item.additional.should.equal( _D[ "insert2" ].additional )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "create a third item", ( done )->
tableG.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert3" ] = item
item.name.should.equal( _D[ "insert3" ].name )
item.email.should.equal( _D[ "insert3" ].email )
item.boolean.should.equal( _D[ "insert3" ].boolean )
item.age.should.equal( _D[ "insert3" ].age )
item.obj.should.eql( _D[ "insert3" ].obj )
done()
return
return
if _basicTable.slice( 0,2 ) is "C_"
it "insert a invalid item to combined table", ( done )->
tableG.set _.clone( _D[ "insert4" ] ), ( err, item )->
should.exist( err )
err.name.should.equal( "combined-hash-invalid" )
should.not.exist( item )
done()
return
return
it "list existing items after insert(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "try to get two items at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ], "xxxxxx" ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "update first item with empty string attribute", ( done )->
tableG.set _G[ "insert1" ][ _C.hashKey ], _D[ "update1" ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
_G[ "insert1" ] = item
done()
return
return
it "delete the first inserted item", ( done )->
tableG.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "try to get deleted item", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.not.exist( item )
done()
return
return
it "update second item", ( done )->
tableG.set _G[ "insert2" ][ _C.hashKey ], _D[ "update2" ], fields: [ "id", "name", "age", "obj" ], ( err, item )->
throw err if err
_G[ "insert2" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert2" ].id )
item.name.should.equal( _D[ "update2" ].name )
item.age.should.equal( _D[ "update2" ].age )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "update third item with successfull conditonal", ( done )->
_opt =
fields: [ "id", "name", "age", "obj" ]
conditionals:
"age": { "==": 78 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
throw err if err
_G[ "insert3" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _D[ "update3" ].name )
item.age.should.equal( _D[ "update3" ].age )
item.obj.should.eql( _D[ "update3" ].obj )
done()
return
return
it "update third item with failing conditonal", ( done )->
_opt =
fields: [ "id", "name", "age" ]
conditionals:
"age": { "==": 123 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
done()
return
return
it "update third item with `number` field = `null`", ( done )->
_opt =
fields: [ "id", "name", "age" ]
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3_2" ], _opt, ( err, item )->
should.not.exist( err )
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.age )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _G[ "insert3" ].name )
_G[ "insert3" ] = item
done()
return
return
it "delete the second inserted item", ( done )->
tableG.del _G[ "insert2" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "delete the third inserted item", ( done )->
tableG.del _G[ "insert3" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "check item count after update(s) and delete(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
return
describe "#{ testTitle } Overwrite Tests", ->
table = null
_C = _CONFIG.tables[ _overwriteTable ]
_D = _DATA[ _overwriteTable ]
_G = {}
_ItemCount = 0
it "get table", ( done )->
table = dynDB.get( _overwriteTable )
should.exist( table )
done()
return
it "create item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.title )
should.not.exist( item.done )
item.id.should.equal( _D[ "insert1" ].id )
item.title.should.equal( _D[ "insert1" ].title )
#item.done.should.equal( _D[ "insert1" ].done )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "try second insert with the same hash", ( done )->
table.set _D[ "insert2" ], ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
should.not.exist( item )
done()
return
return
it "list items", ( done )->
table.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "delete the first inserted item", ( done )->
table.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
describe "#{ testTitle } Range Tests", ->
table1 = null
table2 = null
_D1 = _DATA[ _logTable1 ]
_D2 = _DATA[ _logTable2 ]
_C1 = _CONFIG.tables[ _logTable1 ]
_C2 = _CONFIG.tables[ _logTable2 ]
_G1 = []
_G2 = []
_ItemCount1 = 0
_ItemCount2 = 0
last = null
pre_last = null
it "get table 1", ( done )->
table1 = dynDB.get( _logTable1 )
should.exist( table1 )
done()
return
it "get table 2", ( done )->
table2 = dynDB.get( _logTable2 )
should.exist( table2 )
done()
return
it "insert #{ _D1.inserts.length } items to range list of table 1", ( done )->
aFns = []
for insert in _D1.inserts
_throtteldSet = _.throttle( table1.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user )
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount1++
_G1.push( item )
cba( item )
, table1, insert )
_utils.runSeries aFns, ( err )->
done()
it "insert #{ _D2.inserts.length } items to range list of table 2", ( done )->
aFns = []
for insert in _D2.inserts
_throtteldSet = _.throttle( table2.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user)
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount2++
_G2.push( item )
cba( item )
, table2, insert )
_utils.runSeries aFns, ( err )->
done()
it "try to get two items at once (mget)", ( done )->
table1.mget [ [ _G1[ 1 ][ _C1.hashKey ],_G1[ 1 ][ _C1.rangeKey ] ] , [ _G1[ 5 ][ _C1.hashKey ],_G1[ 5 ][ _C1.rangeKey ] ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G1[ 1 ], _G1[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
table2.mget [ [ _G2[ 1 ][ _C2.hashKey ],_G2[ 1 ][ _C2.rangeKey ] ] , [ _G2[ 5 ][ _C2.hashKey ],_G2[ 5 ][ _C2.rangeKey ] ], [ _G2[ 3 ][ _C2.hashKey ], 999 ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G2[ 1 ], _G2[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "get a range of table 1", ( done )->
if _logTable1.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C1.name }A" }
t: { ">=": 5 }
else
_q =
id: { "==": "A" }
t: { ">=": 5 }
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( 3 )
done()
it "get a range of table 2", ( done )->
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }D" }
t: { ">=": 3 }
else
_q =
id: { "==": "D" }
t: { ">=": 3 }
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( 1 )
done()
it "get a single item of table 1", ( done )->
_item = _G1[ 4 ]
table1.get [ _item.id, _item.t ], ( err, item )->
throw err if err
item.should.eql( _item )
done()
it "should return only 3 items", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
table2.find _q, _o, ( err, items )->
throw err if err
should.exist items
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "should return the next 3 by `startAt`", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
_c = [ pre_last.id, pre_last.t ]
table2.find _q, _c, _o, ( err, items )->
throw err if err
predicted_first = items[0]
predicted_first.should.eql last
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "delete whole data from table 1", ( done )->
aFns = []
for item in _G1
_throtteldDel = _.throttle( table1.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount1--
cba()
, table1, item )
_utils.runSeries aFns, ( err )->
done()
it "delete whole data from table 2", ( done )->
aFns = []
for item in _G2
_throtteldDel = _.throttle( table2.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount2--
cba()
, table2, item )
_utils.runSeries aFns, ( err )->
done()
it "check for empty table 1", ( done )->
_q = {}
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount1 )
done()
it "check for empty table 2", ( done )->
_q = {}
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount2 )
done()
describe "#{ testTitle } Set Tests", ->
_C = _CONFIG.tables[ _setTable ]
_D = _DATA[ _setTable ]
_G = {}
_ItemCount = 0
table = null
it "get table", ( done )->
table = dynDB.get( _setTable )
should.exist( table )
done()
return
it "create the test item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a" ] )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "test raw reset", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update4" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update5" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update6" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update7" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update8" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "test $rem action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update9" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "update set to null should remove attribute", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update10" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "create the test item2 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert2" ].name )
_ItemCount++
_G[ "insert2" ] = item
done()
return
return
, 250 )
return
it "create the test item3 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert3" ].name )
_ItemCount++
_G[ "insert3" ] = item
done()
return
return
, 250 )
return
it "delete test item. ( Has delay of 250ms to prevent from throughput error )", ( done )->
_.delay( =>
table.del _G[ "insert1" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 2", ( done )->
_.delay( =>
table.del _G[ "insert2" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 3", ( done )->
_.delay( =>
table.del _G[ "insert3" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
return
| 28324 | module.exports = ( testTitle, _basicTable, _overwriteTable, _logTable1, _logTable2, _setTable )->
# read configuration
_CONFIG = require "../../config.js"
_ = require("underscore")
should = require('should')
# read replace AWS keys from environment
_CONFIG.aws.accessKeyId = process.env.AWS_AKI if process.env?.AWS_AKI?
_CONFIG.aws.secretAccessKey = <KEY> if process.env?.AWS_SAK?
_CONFIG.aws.region = process.env.AWS_REGION if process.env?.AWS_REGION?
_CONFIG.aws.tablePrefix = process.env.AWS_TABLEPREFIX if process.env?.AWS_TABLEPREFIX?
# import module to test
SimpleDynamo = require "../../../lib/dynamo/"
_utils = SimpleDynamo.utils
_DATA = require "../../testdata.js"
dynDB = null
tableG = null
describe "----- #{ testTitle } TESTS -----", ->
before ( done )->
done()
describe 'Initialization', ->
it 'init manager', ( done )->
dynDB = new SimpleDynamo( _CONFIG.aws, _CONFIG.tables )
done()
return
it 'pre connect', ( done )->
dynDB.fetched.should.be.false
dynDB.connected.should.be.false
done()
return
it 'init table objects', ( done )->
dynDB.connect ( err )->
throw err if err
tableG = dynDB.get( _basicTable )
should.exist( tableG )
done()
return
return
it 'post connect', ( done )->
dynDB.fetched.should.be.true
done()
return
return
describe "#{ testTitle } CRUD Tests", ->
_C = _CONFIG.tables[ _basicTable ]
_D = _DATA[ _basicTable ]
_G = {}
_ItemCount = 0
it "list existing items", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
_ItemCount = items.length
done()
return
return
it "create an item", ( done )->
tableG.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert1" ] = item
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "try to get the item and check the content", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "create a second item", ( done )->
tableG.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
should.exist( item.additional )
_ItemCount++
_G[ "insert2" ] = item
item.name.should.equal( _D[ "insert2" ].name )
item.email.should.equal( _D[ "insert2" ].email )
item.age.should.equal( _D[ "insert2" ].age )
item.boolean.should.equal( _D[ "insert2" ].boolean )
item.additional.should.equal( _D[ "insert2" ].additional )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "create a third item", ( done )->
tableG.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert3" ] = item
item.name.should.equal( _D[ "insert3" ].name )
item.email.should.equal( _D[ "insert3" ].email )
item.boolean.should.equal( _D[ "insert3" ].boolean )
item.age.should.equal( _D[ "insert3" ].age )
item.obj.should.eql( _D[ "insert3" ].obj )
done()
return
return
if _basicTable.slice( 0,2 ) is "C_"
it "insert a invalid item to combined table", ( done )->
tableG.set _.clone( _D[ "insert4" ] ), ( err, item )->
should.exist( err )
err.name.should.equal( "combined-hash-invalid" )
should.not.exist( item )
done()
return
return
it "list existing items after insert(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "try to get two items at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ], "xxxxxx" ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "update first item with empty string attribute", ( done )->
tableG.set _G[ "insert1" ][ _C.hashKey ], _D[ "update1" ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
_G[ "insert1" ] = item
done()
return
return
it "delete the first inserted item", ( done )->
tableG.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "try to get deleted item", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.not.exist( item )
done()
return
return
it "update second item", ( done )->
tableG.set _G[ "insert2" ][ _C.hashKey ], _D[ "update2" ], fields: [ "id", "name", "age", "obj" ], ( err, item )->
throw err if err
_G[ "insert2" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert2" ].id )
item.name.should.equal( _D[ "update2" ].name )
item.age.should.equal( _D[ "update2" ].age )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "update third item with successfull conditonal", ( done )->
_opt =
fields: [ "id", "name", "age", "obj" ]
conditionals:
"age": { "==": 78 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
throw err if err
_G[ "insert3" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _D[ "update3" ].name )
item.age.should.equal( _D[ "update3" ].age )
item.obj.should.eql( _D[ "update3" ].obj )
done()
return
return
it "update third item with failing conditonal", ( done )->
_opt =
fields: [ "id", "name", "age" ]
conditionals:
"age": { "==": 123 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
done()
return
return
it "update third item with `number` field = `null`", ( done )->
_opt =
fields: [ "id", "name", "age" ]
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3_2" ], _opt, ( err, item )->
should.not.exist( err )
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.age )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _G[ "insert3" ].name )
_G[ "insert3" ] = item
done()
return
return
it "delete the second inserted item", ( done )->
tableG.del _G[ "insert2" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "delete the third inserted item", ( done )->
tableG.del _G[ "insert3" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "check item count after update(s) and delete(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
return
describe "#{ testTitle } Overwrite Tests", ->
table = null
_C = _CONFIG.tables[ _overwriteTable ]
_D = _DATA[ _overwriteTable ]
_G = {}
_ItemCount = 0
it "get table", ( done )->
table = dynDB.get( _overwriteTable )
should.exist( table )
done()
return
it "create item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.title )
should.not.exist( item.done )
item.id.should.equal( _D[ "insert1" ].id )
item.title.should.equal( _D[ "insert1" ].title )
#item.done.should.equal( _D[ "insert1" ].done )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "try second insert with the same hash", ( done )->
table.set _D[ "insert2" ], ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
should.not.exist( item )
done()
return
return
it "list items", ( done )->
table.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "delete the first inserted item", ( done )->
table.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
describe "#{ testTitle } Range Tests", ->
table1 = null
table2 = null
_D1 = _DATA[ _logTable1 ]
_D2 = _DATA[ _logTable2 ]
_C1 = _CONFIG.tables[ _logTable1 ]
_C2 = _CONFIG.tables[ _logTable2 ]
_G1 = []
_G2 = []
_ItemCount1 = 0
_ItemCount2 = 0
last = null
pre_last = null
it "get table 1", ( done )->
table1 = dynDB.get( _logTable1 )
should.exist( table1 )
done()
return
it "get table 2", ( done )->
table2 = dynDB.get( _logTable2 )
should.exist( table2 )
done()
return
it "insert #{ _D1.inserts.length } items to range list of table 1", ( done )->
aFns = []
for insert in _D1.inserts
_throtteldSet = _.throttle( table1.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user )
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount1++
_G1.push( item )
cba( item )
, table1, insert )
_utils.runSeries aFns, ( err )->
done()
it "insert #{ _D2.inserts.length } items to range list of table 2", ( done )->
aFns = []
for insert in _D2.inserts
_throtteldSet = _.throttle( table2.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user)
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount2++
_G2.push( item )
cba( item )
, table2, insert )
_utils.runSeries aFns, ( err )->
done()
it "try to get two items at once (mget)", ( done )->
table1.mget [ [ _G1[ 1 ][ _C1.hashKey ],_G1[ 1 ][ _C1.rangeKey ] ] , [ _G1[ 5 ][ _C1.hashKey ],_G1[ 5 ][ _C1.rangeKey ] ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G1[ 1 ], _G1[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
table2.mget [ [ _G2[ 1 ][ _C2.hashKey ],_G2[ 1 ][ _C2.rangeKey ] ] , [ _G2[ 5 ][ _C2.hashKey ],_G2[ 5 ][ _C2.rangeKey ] ], [ _G2[ 3 ][ _C2.hashKey ], 999 ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G2[ 1 ], _G2[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "get a range of table 1", ( done )->
if _logTable1.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C1.name }A" }
t: { ">=": 5 }
else
_q =
id: { "==": "A" }
t: { ">=": 5 }
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( 3 )
done()
it "get a range of table 2", ( done )->
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }D" }
t: { ">=": 3 }
else
_q =
id: { "==": "D" }
t: { ">=": 3 }
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( 1 )
done()
it "get a single item of table 1", ( done )->
_item = _G1[ 4 ]
table1.get [ _item.id, _item.t ], ( err, item )->
throw err if err
item.should.eql( _item )
done()
it "should return only 3 items", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
table2.find _q, _o, ( err, items )->
throw err if err
should.exist items
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "should return the next 3 by `startAt`", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
_c = [ pre_last.id, pre_last.t ]
table2.find _q, _c, _o, ( err, items )->
throw err if err
predicted_first = items[0]
predicted_first.should.eql last
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "delete whole data from table 1", ( done )->
aFns = []
for item in _G1
_throtteldDel = _.throttle( table1.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount1--
cba()
, table1, item )
_utils.runSeries aFns, ( err )->
done()
it "delete whole data from table 2", ( done )->
aFns = []
for item in _G2
_throtteldDel = _.throttle( table2.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount2--
cba()
, table2, item )
_utils.runSeries aFns, ( err )->
done()
it "check for empty table 1", ( done )->
_q = {}
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount1 )
done()
it "check for empty table 2", ( done )->
_q = {}
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount2 )
done()
describe "#{ testTitle } Set Tests", ->
_C = _CONFIG.tables[ _setTable ]
_D = _DATA[ _setTable ]
_G = {}
_ItemCount = 0
table = null
it "get table", ( done )->
table = dynDB.get( _setTable )
should.exist( table )
done()
return
it "create the test item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a" ] )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "test raw reset", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update4" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update5" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update6" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update7" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update8" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "test $rem action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update9" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "update set to null should remove attribute", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update10" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "create the test item2 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert2" ].name )
_ItemCount++
_G[ "insert2" ] = item
done()
return
return
, 250 )
return
it "create the test item3 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert3" ].name )
_ItemCount++
_G[ "insert3" ] = item
done()
return
return
, 250 )
return
it "delete test item. ( Has delay of 250ms to prevent from throughput error )", ( done )->
_.delay( =>
table.del _G[ "insert1" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 2", ( done )->
_.delay( =>
table.del _G[ "insert2" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 3", ( done )->
_.delay( =>
table.del _G[ "insert3" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
return
| true | module.exports = ( testTitle, _basicTable, _overwriteTable, _logTable1, _logTable2, _setTable )->
# read configuration
_CONFIG = require "../../config.js"
_ = require("underscore")
should = require('should')
# read replace AWS keys from environment
_CONFIG.aws.accessKeyId = process.env.AWS_AKI if process.env?.AWS_AKI?
_CONFIG.aws.secretAccessKey = PI:KEY:<KEY>END_PI if process.env?.AWS_SAK?
_CONFIG.aws.region = process.env.AWS_REGION if process.env?.AWS_REGION?
_CONFIG.aws.tablePrefix = process.env.AWS_TABLEPREFIX if process.env?.AWS_TABLEPREFIX?
# import module to test
SimpleDynamo = require "../../../lib/dynamo/"
_utils = SimpleDynamo.utils
_DATA = require "../../testdata.js"
dynDB = null
tableG = null
describe "----- #{ testTitle } TESTS -----", ->
before ( done )->
done()
describe 'Initialization', ->
it 'init manager', ( done )->
dynDB = new SimpleDynamo( _CONFIG.aws, _CONFIG.tables )
done()
return
it 'pre connect', ( done )->
dynDB.fetched.should.be.false
dynDB.connected.should.be.false
done()
return
it 'init table objects', ( done )->
dynDB.connect ( err )->
throw err if err
tableG = dynDB.get( _basicTable )
should.exist( tableG )
done()
return
return
it 'post connect', ( done )->
dynDB.fetched.should.be.true
done()
return
return
describe "#{ testTitle } CRUD Tests", ->
_C = _CONFIG.tables[ _basicTable ]
_D = _DATA[ _basicTable ]
_G = {}
_ItemCount = 0
it "list existing items", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
_ItemCount = items.length
done()
return
return
it "create an item", ( done )->
tableG.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert1" ] = item
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "try to get the item and check the content", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
item.id.should.equal( _D[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
done()
return
return
it "create a second item", ( done )->
tableG.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
should.exist( item.additional )
_ItemCount++
_G[ "insert2" ] = item
item.name.should.equal( _D[ "insert2" ].name )
item.email.should.equal( _D[ "insert2" ].email )
item.age.should.equal( _D[ "insert2" ].age )
item.boolean.should.equal( _D[ "insert2" ].boolean )
item.additional.should.equal( _D[ "insert2" ].additional )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "create a third item", ( done )->
tableG.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.email )
should.exist( item.age )
_ItemCount++
_G[ "insert3" ] = item
item.name.should.equal( _D[ "insert3" ].name )
item.email.should.equal( _D[ "insert3" ].email )
item.boolean.should.equal( _D[ "insert3" ].boolean )
item.age.should.equal( _D[ "insert3" ].age )
item.obj.should.eql( _D[ "insert3" ].obj )
done()
return
return
if _basicTable.slice( 0,2 ) is "C_"
it "insert a invalid item to combined table", ( done )->
tableG.set _.clone( _D[ "insert4" ] ), ( err, item )->
should.exist( err )
err.name.should.equal( "combined-hash-invalid" )
should.not.exist( item )
done()
return
return
it "list existing items after insert(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "try to get two items at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
tableG.mget [ _G[ "insert1" ][ _C.hashKey ], _G[ "insert2" ][ _C.hashKey ], "xxxxxx" ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G[ "insert1" ], _G[ "insert2" ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "update first item with empty string attribute", ( done )->
tableG.set _G[ "insert1" ][ _C.hashKey ], _D[ "update1" ], ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert1" ].id )
item.name.should.equal( _D[ "insert1" ].name )
item.email.should.equal( _D[ "insert1" ].email )
item.age.should.equal( _D[ "insert1" ].age )
_G[ "insert1" ] = item
done()
return
return
it "delete the first inserted item", ( done )->
tableG.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "try to get deleted item", ( done )->
tableG.get _G[ "insert1" ][ _C.hashKey ], ( err, item )->
throw err if err
should.not.exist( item )
done()
return
return
it "update second item", ( done )->
tableG.set _G[ "insert2" ][ _C.hashKey ], _D[ "update2" ], fields: [ "id", "name", "age", "obj" ], ( err, item )->
throw err if err
_G[ "insert2" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert2" ].id )
item.name.should.equal( _D[ "update2" ].name )
item.age.should.equal( _D[ "update2" ].age )
item.obj.should.eql( _D[ "insert2" ].obj )
done()
return
return
it "update third item with successfull conditonal", ( done )->
_opt =
fields: [ "id", "name", "age", "obj" ]
conditionals:
"age": { "==": 78 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
throw err if err
_G[ "insert3" ] = item
should.exist( item.id )
should.exist( item.name )
should.exist( item.age )
should.exist( item.obj )
should.not.exist( item.email )
should.not.exist( item.additional )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _D[ "update3" ].name )
item.age.should.equal( _D[ "update3" ].age )
item.obj.should.eql( _D[ "update3" ].obj )
done()
return
return
it "update third item with failing conditonal", ( done )->
_opt =
fields: [ "id", "name", "age" ]
conditionals:
"age": { "==": 123 }
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3" ], _opt, ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
done()
return
return
it "update third item with `number` field = `null`", ( done )->
_opt =
fields: [ "id", "name", "age" ]
tableG.set _G[ "insert3" ][ _C.hashKey ], _D[ "update3_2" ], _opt, ( err, item )->
should.not.exist( err )
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.age )
item.id.should.equal( _G[ "insert3" ].id )
item.name.should.equal( _G[ "insert3" ].name )
_G[ "insert3" ] = item
done()
return
return
it "delete the second inserted item", ( done )->
tableG.del _G[ "insert2" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "delete the third inserted item", ( done )->
tableG.del _G[ "insert3" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
it "check item count after update(s) and delete(s)", ( done )->
tableG.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
return
describe "#{ testTitle } Overwrite Tests", ->
table = null
_C = _CONFIG.tables[ _overwriteTable ]
_D = _DATA[ _overwriteTable ]
_G = {}
_ItemCount = 0
it "get table", ( done )->
table = dynDB.get( _overwriteTable )
should.exist( table )
done()
return
it "create item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.title )
should.not.exist( item.done )
item.id.should.equal( _D[ "insert1" ].id )
item.title.should.equal( _D[ "insert1" ].title )
#item.done.should.equal( _D[ "insert1" ].done )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "try second insert with the same hash", ( done )->
table.set _D[ "insert2" ], ( err, item )->
should.exist( err )
err.name.should.equal( "conditional-check-failed" )
should.not.exist( item )
done()
return
return
it "list items", ( done )->
table.find ( err, items )->
throw err if err
items.should.an.instanceof( Array )
items.length.should.equal( _ItemCount )
done()
return
return
it "delete the first inserted item", ( done )->
table.del _G[ "insert1" ][ _C.hashKey ], ( err )->
throw err if err
_ItemCount--
done()
return
return
describe "#{ testTitle } Range Tests", ->
table1 = null
table2 = null
_D1 = _DATA[ _logTable1 ]
_D2 = _DATA[ _logTable2 ]
_C1 = _CONFIG.tables[ _logTable1 ]
_C2 = _CONFIG.tables[ _logTable2 ]
_G1 = []
_G2 = []
_ItemCount1 = 0
_ItemCount2 = 0
last = null
pre_last = null
it "get table 1", ( done )->
table1 = dynDB.get( _logTable1 )
should.exist( table1 )
done()
return
it "get table 2", ( done )->
table2 = dynDB.get( _logTable2 )
should.exist( table2 )
done()
return
it "insert #{ _D1.inserts.length } items to range list of table 1", ( done )->
aFns = []
for insert in _D1.inserts
_throtteldSet = _.throttle( table1.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user )
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount1++
_G1.push( item )
cba( item )
, table1, insert )
_utils.runSeries aFns, ( err )->
done()
it "insert #{ _D2.inserts.length } items to range list of table 2", ( done )->
aFns = []
for insert in _D2.inserts
_throtteldSet = _.throttle( table2.set, 250 )
aFns.push _.bind( ( insert, cba )->
tbl = @
_throtteldSet _.clone( insert ), ( err, item )->
throw err if err
if tbl.isCombinedTable
item.id.should.equal( tbl.name + tbl.combinedHashDelimiter + insert.user )
else
item.id.should.equal( insert.user)
item.t.should.equal( insert.t )
item.user.should.equal( insert.user )
item.title.should.equal( insert.title )
_ItemCount2++
_G2.push( item )
cba( item )
, table2, insert )
_utils.runSeries aFns, ( err )->
done()
it "try to get two items at once (mget)", ( done )->
table1.mget [ [ _G1[ 1 ][ _C1.hashKey ],_G1[ 1 ][ _C1.rangeKey ] ] , [ _G1[ 5 ][ _C1.hashKey ],_G1[ 5 ][ _C1.rangeKey ] ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G1[ 1 ], _G1[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "try to get two items plus a unkown at once (mget)", ( done )->
table2.mget [ [ _G2[ 1 ][ _C2.hashKey ],_G2[ 1 ][ _C2.rangeKey ] ] , [ _G2[ 5 ][ _C2.hashKey ],_G2[ 5 ][ _C2.rangeKey ] ], [ _G2[ 3 ][ _C2.hashKey ], 999 ] ], ( err, items )->
throw err if err
items.should.have.length( 2 )
aPred = [ _G2[ 1 ], _G2[ 5 ] ]
for item in items
aPred.should.containEql( item )
done()
return
return
it "get a range of table 1", ( done )->
if _logTable1.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C1.name }A" }
t: { ">=": 5 }
else
_q =
id: { "==": "A" }
t: { ">=": 5 }
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( 3 )
done()
it "get a range of table 2", ( done )->
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }D" }
t: { ">=": 3 }
else
_q =
id: { "==": "D" }
t: { ">=": 3 }
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( 1 )
done()
it "get a single item of table 1", ( done )->
_item = _G1[ 4 ]
table1.get [ _item.id, _item.t ], ( err, item )->
throw err if err
item.should.eql( _item )
done()
it "should return only 3 items", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
table2.find _q, _o, ( err, items )->
throw err if err
should.exist items
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "should return the next 3 by `startAt`", (done) ->
_count = 3
if _logTable2.slice( 0,2 ) is "C_"
_q =
id: { "==": "#{ _C2.name }A" }
t: { ">=": 0 }
else
_q =
id: { "==": "A" }
t: { ">=": 0 }
_o =
limit: _count
_c = [ pre_last.id, pre_last.t ]
table2.find _q, _c, _o, ( err, items )->
throw err if err
predicted_first = items[0]
predicted_first.should.eql last
items.length.should.equal _count
last = items[_count - 1]
pre_last = items[_count - 2]
done()
it "delete whole data from table 1", ( done )->
aFns = []
for item in _G1
_throtteldDel = _.throttle( table1.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount1--
cba()
, table1, item )
_utils.runSeries aFns, ( err )->
done()
it "delete whole data from table 2", ( done )->
aFns = []
for item in _G2
_throtteldDel = _.throttle( table2.del, 250 )
aFns.push _.bind( ( item, cba )->
_throtteldDel [ item.id, item.t ], ( err )->
throw err if err
_ItemCount2--
cba()
, table2, item )
_utils.runSeries aFns, ( err )->
done()
it "check for empty table 1", ( done )->
_q = {}
table1.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount1 )
done()
it "check for empty table 2", ( done )->
_q = {}
table2.find _q, ( err, items )->
throw err if err
items.length.should.equal( _ItemCount2 )
done()
describe "#{ testTitle } Set Tests", ->
_C = _CONFIG.tables[ _setTable ]
_D = _DATA[ _setTable ]
_G = {}
_ItemCount = 0
table = null
it "get table", ( done )->
table = dynDB.get( _setTable )
should.exist( table )
done()
return
it "create the test item", ( done )->
table.set _.clone( _D[ "insert1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a" ] )
_ItemCount++
_G[ "insert1" ] = item
done()
return
return
it "test raw reset", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update1" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "a", "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "b", "c" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update4" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update5" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "x", "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $rem action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update6" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y", "z" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $reset action with string", ( done )->
table.set _G[ "insert1" ].id, _.clone( _D[ "update7" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
it "test $add action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update8" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "test $rem action with empty array", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update9" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
item.users.should.eql( [ "y" ] )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "update set to null should remove attribute", ( done )->
_.delay( =>
table.set _G[ "insert1" ].id, _.clone( _D[ "update10" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert1" ].name )
_G[ "insert1" ] = item
done()
return
return
, 250 )
return
it "create the test item2 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert2" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert2" ].name )
_ItemCount++
_G[ "insert2" ] = item
done()
return
return
, 250 )
return
it "create the test item3 with empty array as set", ( done )->
_.delay( =>
table.set _.clone( _D[ "insert3" ] ), ( err, item )->
throw err if err
should.exist( item.id )
should.exist( item.name )
should.not.exist( item.users )
item.name.should.equal( _D[ "insert3" ].name )
_ItemCount++
_G[ "insert3" ] = item
done()
return
return
, 250 )
return
it "delete test item. ( Has delay of 250ms to prevent from throughput error )", ( done )->
_.delay( =>
table.del _G[ "insert1" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 2", ( done )->
_.delay( =>
table.del _G[ "insert2" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
it "delete test item 3", ( done )->
_.delay( =>
table.del _G[ "insert3" ].id, ( err )->
throw err if err
_ItemCount--
done()
return
return
, 250 )
return
return
|
[
{
"context": "###\nContent: tabs de maps.\n@autor Ronny Cabrera\n###\nyOSON.AppCore.addModule \"getMap\", (Sb) ->\n\tde",
"end": 47,
"score": 0.9998655319213867,
"start": 34,
"tag": "NAME",
"value": "Ronny Cabrera"
},
{
"context": "s.googleapis.com/maps/api/js?libraries=places&key=AIzaSyAi47sP6N9N9vcIQN-CvXvBZKo9ndlvzAU\"]\n",
"end": 1604,
"score": 0.9996458888053894,
"start": 1565,
"tag": "KEY",
"value": "AIzaSyAi47sP6N9N9vcIQN-CvXvBZKo9ndlvzAU"
}
] | frontend/resources/coffee/modules/inicio/inicio/getMap.coffee | ronnyfly2/openvios | 0 | ###
Content: tabs de maps.
@autor Ronny Cabrera
###
yOSON.AppCore.addModule "getMap", (Sb) ->
defaults =
btnTab : '.row_contact .list li'
zoomMap : 17
colorMapSilver : '#C0C0C0'
saturationMap : -98
lightnessMap : 2
st = {}
dom = {}
latGeo = -12.10419
longGeo = -76.939422
urlImgPin = '../img/pin.png'
catchDom = (st)->
dom.btnTab = $(st.btnTab)
return
suscribeEvents = ->
dom.btnTab.on "click", events.getMap
if $('.row_contact').length > 0
events.getMapBox()
return
events =
getMap:(e)->
ele = $(this)
dom.btnTab.removeClass('actived')
ele.addClass('actived')
if ele != undefined
latGeo = ele.data('lat')
longGeo = ele.data('long')
urlImgPin = ele.data('ping')
events.getMapBox()
else
console.log 'no hiciste nada'
return
getMapBox:(map)->
map = new google.maps.Map $("#map")[0],{
zoom: st.zoomMap
center: {
lat: latGeo
lng: longGeo
}
styles: [{
stylers: [{
hue: st.colorSilver
}
{
saturation: st.saturationMap
}
{
lightness: st.lightnessMap
}
]
}]
}
events.getMarker(map)
return
getMarker:(map)->
locationMarker = new google.maps.Marker {
zoom: st.zoomMap
position: {
lat: latGeo
lng: longGeo
}
map : map
icon: urlImgPin
}
return
functions = {}
initialize = (opts) ->
st = $.extend({}, defaults, opts)
catchDom(st)
suscribeEvents()
return
return {
init: initialize
}
,["https://maps.googleapis.com/maps/api/js?libraries=places&key=AIzaSyAi47sP6N9N9vcIQN-CvXvBZKo9ndlvzAU"]
| 92288 | ###
Content: tabs de maps.
@autor <NAME>
###
yOSON.AppCore.addModule "getMap", (Sb) ->
defaults =
btnTab : '.row_contact .list li'
zoomMap : 17
colorMapSilver : '#C0C0C0'
saturationMap : -98
lightnessMap : 2
st = {}
dom = {}
latGeo = -12.10419
longGeo = -76.939422
urlImgPin = '../img/pin.png'
catchDom = (st)->
dom.btnTab = $(st.btnTab)
return
suscribeEvents = ->
dom.btnTab.on "click", events.getMap
if $('.row_contact').length > 0
events.getMapBox()
return
events =
getMap:(e)->
ele = $(this)
dom.btnTab.removeClass('actived')
ele.addClass('actived')
if ele != undefined
latGeo = ele.data('lat')
longGeo = ele.data('long')
urlImgPin = ele.data('ping')
events.getMapBox()
else
console.log 'no hiciste nada'
return
getMapBox:(map)->
map = new google.maps.Map $("#map")[0],{
zoom: st.zoomMap
center: {
lat: latGeo
lng: longGeo
}
styles: [{
stylers: [{
hue: st.colorSilver
}
{
saturation: st.saturationMap
}
{
lightness: st.lightnessMap
}
]
}]
}
events.getMarker(map)
return
getMarker:(map)->
locationMarker = new google.maps.Marker {
zoom: st.zoomMap
position: {
lat: latGeo
lng: longGeo
}
map : map
icon: urlImgPin
}
return
functions = {}
initialize = (opts) ->
st = $.extend({}, defaults, opts)
catchDom(st)
suscribeEvents()
return
return {
init: initialize
}
,["https://maps.googleapis.com/maps/api/js?libraries=places&key=<KEY>"]
| true | ###
Content: tabs de maps.
@autor PI:NAME:<NAME>END_PI
###
yOSON.AppCore.addModule "getMap", (Sb) ->
defaults =
btnTab : '.row_contact .list li'
zoomMap : 17
colorMapSilver : '#C0C0C0'
saturationMap : -98
lightnessMap : 2
st = {}
dom = {}
latGeo = -12.10419
longGeo = -76.939422
urlImgPin = '../img/pin.png'
catchDom = (st)->
dom.btnTab = $(st.btnTab)
return
suscribeEvents = ->
dom.btnTab.on "click", events.getMap
if $('.row_contact').length > 0
events.getMapBox()
return
events =
getMap:(e)->
ele = $(this)
dom.btnTab.removeClass('actived')
ele.addClass('actived')
if ele != undefined
latGeo = ele.data('lat')
longGeo = ele.data('long')
urlImgPin = ele.data('ping')
events.getMapBox()
else
console.log 'no hiciste nada'
return
getMapBox:(map)->
map = new google.maps.Map $("#map")[0],{
zoom: st.zoomMap
center: {
lat: latGeo
lng: longGeo
}
styles: [{
stylers: [{
hue: st.colorSilver
}
{
saturation: st.saturationMap
}
{
lightness: st.lightnessMap
}
]
}]
}
events.getMarker(map)
return
getMarker:(map)->
locationMarker = new google.maps.Marker {
zoom: st.zoomMap
position: {
lat: latGeo
lng: longGeo
}
map : map
icon: urlImgPin
}
return
functions = {}
initialize = (opts) ->
st = $.extend({}, defaults, opts)
catchDom(st)
suscribeEvents()
return
return {
init: initialize
}
,["https://maps.googleapis.com/maps/api/js?libraries=places&key=PI:KEY:<KEY>END_PI"]
|
[
{
"context": "js\n\n PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.",
"end": 197,
"score": 0.9998756647109985,
"start": 180,
"tag": "NAME",
"value": "Benjamin Blundell"
},
{
"context": " PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.com\n\nThis softwa",
"end": 213,
"score": 0.9999307990074158,
"start": 200,
"tag": "EMAIL",
"value": "ben@pxljs.com"
}
] | test/mathTest.coffee | OniDaito/pxljs | 1 | ### ABOUT
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
Benjamin Blundell - ben@pxljs.com
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
Maths Tests
http://net.tutsplus.com/tutorials/javascript-ajax/better-coffeescript-testing-with-mocha/
http://visionmedia.github.com/mocha/
###
chai = require 'chai'
chai.should()
PXLMath = require '../src/math/math'
PXLCamera = require '../src/camera/camera'
PXLCurve = require '../src/math/curve'
PXLUberPath = require '../src/gl/uber_shader_paths'
describe 'Maths tests: ', ->
q = new PXLMath.Quaternion()
it 'Quaternion should not be undefined', ->
q.should.not.equal undefined
it 'Quaternion should not affect a matrix', ->
m = new PXLMath.Matrix4()
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(0))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1])
it 'Quaternion should create a 90 degree rotation matrix and rotate back from axis angle', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(-90))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion 90 Degree should transform (2,0,0) to (0,0,-2)', ->
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
v = new PXLMath.Vec3(2,0,0)
theta = PXLMath.degToRad(90)
q.transVec3(v)
EPSILON = 1.0e-6
a = [v.x,v.y,v.z]
b = [0,0,-2]
for n in [0..2]
c = a[n]
d = b[n]
(Math.abs(Math.abs(c) - Math.abs(d)) < EPSILON).should.be.true
it 'Quaternion should create a 90 degree rotation matrix and rotate back from 3 euler angles', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromRotations 0,PXLMath.degToRad(90),0
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromRotations 0,PXLMath.degToRad(-90),0
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Two Quaternions multiplied together should cancel out if they are opposite', ->
m = new PXLMath.Matrix4()
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
m.translate(new PXLMath.Vec3(0,0,1))
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qa.mult(qb).normalize()
m.mult(qa.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion should invert properly', ->
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qc = PXLMath.Quaternion.invert(qa)
(qb.x == qc.x && qb.y == qc.y && qb.z == qc.z && qb.w == qc.w).should.be.true
it 'Quaternion should rotate a vector properly', ->
qa = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
va = new PXLMath.Vec3(0,0,-1)
qa.transVec3 va
round = (n) ->
Math.round(n * 100) / 100
(round(va.x) == 1 && round(va.y) == 0 && round(va.z) == 0).should.be.true
it 'Matrix4x4 should be identity', ->
m = new PXLMath.Matrix4()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1])
it 'Matrix4x4 should inverse properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.invert(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, -2,-2,-2,1])
it 'Matrix4x4 should transpose properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.transpose(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,2,0,1,0,2,0,0,1,2,0,0,0,1])
it 'Matrix4x4 should return a proper matrix3x3', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = m.getMatrix3()
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should be identity', ->
m = new PXLMath.Matrix3()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should inverse properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.invert()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[-24,20,-5,18,-15,4,5,-4,1])
it 'Matrix3x3 should transpose properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.transpose()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,2,3,0,1,4,5,6,0])
it 'Matrix3x3 should rotate properly', ->
m = new PXLMath.Matrix3()
m.rotate new PXLMath.Vec3(0,1,0), PXLMath.degToRad 90
n = new PXLMath.Vec3(1,0,0)
m.multVec n
r = n.flatten()
q = [0,0,-1]
EPSILON = 1.0e-6
for n in [0..2]
a = r[n]
b = q[n]
(a-b < EPSILON).should.be.true
it 'CatmullRomSpline of 4 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(1.5,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
it 'CatmullRomSpline of 5 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
p4 = new PXLMath.Vec2(4,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3,p4] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(3.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
###
it 'y value for a parabola with abitrary directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,1),-0.5,0.5,0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.5857864376) < EPSILON).should.be.true
(Math.abs(y[1] - 3.4142135623) < EPSILON).should.be.true
y = p0.sample(-2.5)
(Math.abs(y[0] - 1.035898384) < EPSILON).should.be.true
(Math.abs(y[1] - 7.964101615) < EPSILON).should.be.true
it 'y value for a parabola with an X axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,0.0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 1.0) < EPSILON).should.be.true
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,1.0)
y = p0.sample(0.0)
(Math.abs(y[0] - 1.5) < EPSILON).should.be.true
it 'y value for a parabola with an Y axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,0.0)
y = p0.sample(1.1)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.6324555320336761) < EPSILON).should.be.true
(Math.abs(y[1] + 0.6324555320336761) < EPSILON).should.be.true
#p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,1.0)
#y = p0.sample(0.0)
#console.log y
#(Math.abs(y[0] + 1.5) < EPSILON).should.be.true
#(Math.abs(y[1] - 1.5) < EPSILON).should.be.true
it 'Crossing point for a parabola', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,2),1.0,1.0,0.0)
[s0,s1] = p0.lineCrossing -1,-1,5
EPSILON = 1.0e-6
(Math.abs(s0.x - 0.05051025721682212) < EPSILON).should.be.true
(Math.abs(s0.y - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.x - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.y - 0.05051025721682212) < EPSILON).should.be.true
p1 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),3.0,-2.0,-1.0)
[s0,s1] = p1.lineCrossing -1,-1,5
(Math.abs(s0.x + 32.1245154965971) < EPSILON).should.be.true
(Math.abs(s0.y - 37.1245154965971) < EPSILON).should.be.true
(Math.abs(s1.x - 0.12451549659709826) < EPSILON).should.be.true
(Math.abs(s1.y - 4.875484503402902) < EPSILON).should.be.true
###
it 'Catmull Patch', ->
p0 = new PXLMath.Vec3(0,0,0)
p1 = new PXLMath.Vec3(0,0,1)
p2 = new PXLMath.Vec3(0,0,2)
p3 = new PXLMath.Vec3(0,0,3)
p4 = new PXLMath.Vec3(2,0,0)
p5 = new PXLMath.Vec3(2,1,1)
p6 = new PXLMath.Vec3(2,1,2)
p7 = new PXLMath.Vec3(2,0,3)
p8 = new PXLMath.Vec3(4,0,0)
p9 = new PXLMath.Vec3(4,1,1)
p10 = new PXLMath.Vec3(4,1,2)
p11 = new PXLMath.Vec3(4,0,3)
p12 = new PXLMath.Vec3(8,0,0)
p13 = new PXLMath.Vec3(8,0,1)
p14 = new PXLMath.Vec3(8,0,2)
p15 = new PXLMath.Vec3(8,0,3)
points = [p0,p1,p2,p3,p4,p5,p6,p6,p8,p9,p10,p11,p12,p13,p14,p15]
cm = new PXLCurve.CatmullPatch(points)
EPSILON = 1.0e-6
tp = cm.sample( new PXLMath.Vec2(0.0, 0.0) )
(Math.abs(tp.x - 2) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 1) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(0.5, 0.25) )
(Math.abs(tp.x - 2.453125) < EPSILON).should.be.true
(Math.abs(tp.y - 1.17626953125) < EPSILON).should.be.true
(Math.abs(tp.z - 1.55419921875) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(1.0, 1.0) )
(Math.abs(tp.x - 4) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 2) < EPSILON).should.be.true
it 'Matrix Project and Un-Project', ->
c = new PXLCamera.PerspCamera new PXLMath.Vec3 0,0,10
c.m.lookAt c.pos, c.look, c.up
c.p.makePerspective(50, 640 / 480, 0.1, 100.0 )
tp = new PXLMath.Vec4 1,1,1,1
tm = PXLMath.Matrix4.mult c.p, c.m
tm.multVec(tp)
tt = new PXLMath.Vec4 tp.x / tp.w, tp.y / tp.w, tp.z / tp.w, 1
#console.log tt
it 'Ray Cast', ->
EPSILON = 1.0e-6
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
tt = c.castRay 320,240,640,480
(Math.abs(tt.x ) < EPSILON).should.be.true
(Math.abs(tt.y ) < EPSILON).should.be.true
(Math.abs(tt.z + 1) < EPSILON).should.be.true
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,-4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
#tt = c.castRay 640,480,640,480
#console.log (tt)
#(Math.abs(tt.x + 0.5242704794481596) < EPSILON).should.be.true
#(Math.abs(tt.y + 0.3932028797167508) < EPSILON).should.be.true
#(Math.abs(tt.z - 0.7553356603270172) < EPSILON).should.be.true
it 'bit operation on uber shader path', ->
(PXLUberPath.uber_clear_material(2) == 2).should.be.true
| 53704 | ### ABOUT
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
<NAME> - <EMAIL>
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
Maths Tests
http://net.tutsplus.com/tutorials/javascript-ajax/better-coffeescript-testing-with-mocha/
http://visionmedia.github.com/mocha/
###
chai = require 'chai'
chai.should()
PXLMath = require '../src/math/math'
PXLCamera = require '../src/camera/camera'
PXLCurve = require '../src/math/curve'
PXLUberPath = require '../src/gl/uber_shader_paths'
describe 'Maths tests: ', ->
q = new PXLMath.Quaternion()
it 'Quaternion should not be undefined', ->
q.should.not.equal undefined
it 'Quaternion should not affect a matrix', ->
m = new PXLMath.Matrix4()
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(0))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1])
it 'Quaternion should create a 90 degree rotation matrix and rotate back from axis angle', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(-90))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion 90 Degree should transform (2,0,0) to (0,0,-2)', ->
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
v = new PXLMath.Vec3(2,0,0)
theta = PXLMath.degToRad(90)
q.transVec3(v)
EPSILON = 1.0e-6
a = [v.x,v.y,v.z]
b = [0,0,-2]
for n in [0..2]
c = a[n]
d = b[n]
(Math.abs(Math.abs(c) - Math.abs(d)) < EPSILON).should.be.true
it 'Quaternion should create a 90 degree rotation matrix and rotate back from 3 euler angles', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromRotations 0,PXLMath.degToRad(90),0
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromRotations 0,PXLMath.degToRad(-90),0
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Two Quaternions multiplied together should cancel out if they are opposite', ->
m = new PXLMath.Matrix4()
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
m.translate(new PXLMath.Vec3(0,0,1))
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qa.mult(qb).normalize()
m.mult(qa.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion should invert properly', ->
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qc = PXLMath.Quaternion.invert(qa)
(qb.x == qc.x && qb.y == qc.y && qb.z == qc.z && qb.w == qc.w).should.be.true
it 'Quaternion should rotate a vector properly', ->
qa = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
va = new PXLMath.Vec3(0,0,-1)
qa.transVec3 va
round = (n) ->
Math.round(n * 100) / 100
(round(va.x) == 1 && round(va.y) == 0 && round(va.z) == 0).should.be.true
it 'Matrix4x4 should be identity', ->
m = new PXLMath.Matrix4()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1])
it 'Matrix4x4 should inverse properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.invert(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, -2,-2,-2,1])
it 'Matrix4x4 should transpose properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.transpose(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,2,0,1,0,2,0,0,1,2,0,0,0,1])
it 'Matrix4x4 should return a proper matrix3x3', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = m.getMatrix3()
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should be identity', ->
m = new PXLMath.Matrix3()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should inverse properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.invert()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[-24,20,-5,18,-15,4,5,-4,1])
it 'Matrix3x3 should transpose properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.transpose()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,2,3,0,1,4,5,6,0])
it 'Matrix3x3 should rotate properly', ->
m = new PXLMath.Matrix3()
m.rotate new PXLMath.Vec3(0,1,0), PXLMath.degToRad 90
n = new PXLMath.Vec3(1,0,0)
m.multVec n
r = n.flatten()
q = [0,0,-1]
EPSILON = 1.0e-6
for n in [0..2]
a = r[n]
b = q[n]
(a-b < EPSILON).should.be.true
it 'CatmullRomSpline of 4 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(1.5,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
it 'CatmullRomSpline of 5 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
p4 = new PXLMath.Vec2(4,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3,p4] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(3.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
###
it 'y value for a parabola with abitrary directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,1),-0.5,0.5,0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.5857864376) < EPSILON).should.be.true
(Math.abs(y[1] - 3.4142135623) < EPSILON).should.be.true
y = p0.sample(-2.5)
(Math.abs(y[0] - 1.035898384) < EPSILON).should.be.true
(Math.abs(y[1] - 7.964101615) < EPSILON).should.be.true
it 'y value for a parabola with an X axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,0.0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 1.0) < EPSILON).should.be.true
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,1.0)
y = p0.sample(0.0)
(Math.abs(y[0] - 1.5) < EPSILON).should.be.true
it 'y value for a parabola with an Y axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,0.0)
y = p0.sample(1.1)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.6324555320336761) < EPSILON).should.be.true
(Math.abs(y[1] + 0.6324555320336761) < EPSILON).should.be.true
#p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,1.0)
#y = p0.sample(0.0)
#console.log y
#(Math.abs(y[0] + 1.5) < EPSILON).should.be.true
#(Math.abs(y[1] - 1.5) < EPSILON).should.be.true
it 'Crossing point for a parabola', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,2),1.0,1.0,0.0)
[s0,s1] = p0.lineCrossing -1,-1,5
EPSILON = 1.0e-6
(Math.abs(s0.x - 0.05051025721682212) < EPSILON).should.be.true
(Math.abs(s0.y - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.x - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.y - 0.05051025721682212) < EPSILON).should.be.true
p1 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),3.0,-2.0,-1.0)
[s0,s1] = p1.lineCrossing -1,-1,5
(Math.abs(s0.x + 32.1245154965971) < EPSILON).should.be.true
(Math.abs(s0.y - 37.1245154965971) < EPSILON).should.be.true
(Math.abs(s1.x - 0.12451549659709826) < EPSILON).should.be.true
(Math.abs(s1.y - 4.875484503402902) < EPSILON).should.be.true
###
it 'Catmull Patch', ->
p0 = new PXLMath.Vec3(0,0,0)
p1 = new PXLMath.Vec3(0,0,1)
p2 = new PXLMath.Vec3(0,0,2)
p3 = new PXLMath.Vec3(0,0,3)
p4 = new PXLMath.Vec3(2,0,0)
p5 = new PXLMath.Vec3(2,1,1)
p6 = new PXLMath.Vec3(2,1,2)
p7 = new PXLMath.Vec3(2,0,3)
p8 = new PXLMath.Vec3(4,0,0)
p9 = new PXLMath.Vec3(4,1,1)
p10 = new PXLMath.Vec3(4,1,2)
p11 = new PXLMath.Vec3(4,0,3)
p12 = new PXLMath.Vec3(8,0,0)
p13 = new PXLMath.Vec3(8,0,1)
p14 = new PXLMath.Vec3(8,0,2)
p15 = new PXLMath.Vec3(8,0,3)
points = [p0,p1,p2,p3,p4,p5,p6,p6,p8,p9,p10,p11,p12,p13,p14,p15]
cm = new PXLCurve.CatmullPatch(points)
EPSILON = 1.0e-6
tp = cm.sample( new PXLMath.Vec2(0.0, 0.0) )
(Math.abs(tp.x - 2) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 1) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(0.5, 0.25) )
(Math.abs(tp.x - 2.453125) < EPSILON).should.be.true
(Math.abs(tp.y - 1.17626953125) < EPSILON).should.be.true
(Math.abs(tp.z - 1.55419921875) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(1.0, 1.0) )
(Math.abs(tp.x - 4) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 2) < EPSILON).should.be.true
it 'Matrix Project and Un-Project', ->
c = new PXLCamera.PerspCamera new PXLMath.Vec3 0,0,10
c.m.lookAt c.pos, c.look, c.up
c.p.makePerspective(50, 640 / 480, 0.1, 100.0 )
tp = new PXLMath.Vec4 1,1,1,1
tm = PXLMath.Matrix4.mult c.p, c.m
tm.multVec(tp)
tt = new PXLMath.Vec4 tp.x / tp.w, tp.y / tp.w, tp.z / tp.w, 1
#console.log tt
it 'Ray Cast', ->
EPSILON = 1.0e-6
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
tt = c.castRay 320,240,640,480
(Math.abs(tt.x ) < EPSILON).should.be.true
(Math.abs(tt.y ) < EPSILON).should.be.true
(Math.abs(tt.z + 1) < EPSILON).should.be.true
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,-4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
#tt = c.castRay 640,480,640,480
#console.log (tt)
#(Math.abs(tt.x + 0.5242704794481596) < EPSILON).should.be.true
#(Math.abs(tt.y + 0.3932028797167508) < EPSILON).should.be.true
#(Math.abs(tt.z - 0.7553356603270172) < EPSILON).should.be.true
it 'bit operation on uber shader path', ->
(PXLUberPath.uber_clear_material(2) == 2).should.be.true
| true | ### ABOUT
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
PI:NAME:<NAME>END_PI - PI:EMAIL:<EMAIL>END_PI
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
Maths Tests
http://net.tutsplus.com/tutorials/javascript-ajax/better-coffeescript-testing-with-mocha/
http://visionmedia.github.com/mocha/
###
chai = require 'chai'
chai.should()
PXLMath = require '../src/math/math'
PXLCamera = require '../src/camera/camera'
PXLCurve = require '../src/math/curve'
PXLUberPath = require '../src/gl/uber_shader_paths'
describe 'Maths tests: ', ->
q = new PXLMath.Quaternion()
it 'Quaternion should not be undefined', ->
q.should.not.equal undefined
it 'Quaternion should not affect a matrix', ->
m = new PXLMath.Matrix4()
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(0))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1])
it 'Quaternion should create a 90 degree rotation matrix and rotate back from axis angle', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(-90))
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion 90 Degree should transform (2,0,0) to (0,0,-2)', ->
q.fromAxisAngle(new PXLMath.Vec3(0,1,0),PXLMath.degToRad(90))
v = new PXLMath.Vec3(2,0,0)
theta = PXLMath.degToRad(90)
q.transVec3(v)
EPSILON = 1.0e-6
a = [v.x,v.y,v.z]
b = [0,0,-2]
for n in [0..2]
c = a[n]
d = b[n]
(Math.abs(Math.abs(c) - Math.abs(d)) < EPSILON).should.be.true
it 'Quaternion should create a 90 degree rotation matrix and rotate back from 3 euler angles', ->
m = new PXLMath.Matrix4()
m.translate(new PXLMath.Vec3(0,0,1))
q.fromRotations 0,PXLMath.degToRad(90),0
m.mult(q.getMatrix4())
theta = PXLMath.degToRad(90)
r = [Math.cos(theta),0,-Math.sin(theta),0, 0,1,0,0, Math.sin(theta),0, Math.cos(theta),0,0,0,1,1]
EPSILON = 1.0e-6
for n in [0..15]
a = m.a[n]
b = r[n]
(Math.abs(Math.abs(a) - Math.abs(b)) < EPSILON).should.be.true
q.fromRotations 0,PXLMath.degToRad(-90),0
m.mult(q.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Two Quaternions multiplied together should cancel out if they are opposite', ->
m = new PXLMath.Matrix4()
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
m.translate(new PXLMath.Vec3(0,0,1))
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qa.mult(qb).normalize()
m.mult(qa.getMatrix4())
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1])
it 'Quaternion should invert properly', ->
qa = new PXLMath.Quaternion()
qb = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
qb.fromRotations 0,PXLMath.degToRad(-90),0
qc = PXLMath.Quaternion.invert(qa)
(qb.x == qc.x && qb.y == qc.y && qb.z == qc.z && qb.w == qc.w).should.be.true
it 'Quaternion should rotate a vector properly', ->
qa = new PXLMath.Quaternion()
qa.fromRotations 0,PXLMath.degToRad(90),0
va = new PXLMath.Vec3(0,0,-1)
qa.transVec3 va
round = (n) ->
Math.round(n * 100) / 100
(round(va.x) == 1 && round(va.y) == 0 && round(va.z) == 0).should.be.true
it 'Matrix4x4 should be identity', ->
m = new PXLMath.Matrix4()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1])
it 'Matrix4x4 should inverse properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.invert(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0, 0,1,0,0, 0,0,1,0, -2,-2,-2,1])
it 'Matrix4x4 should transpose properly', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = PXLMath.Matrix4.transpose(m)
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,2,0,1,0,2,0,0,1,2,0,0,0,1])
it 'Matrix4x4 should return a proper matrix3x3', ->
m = new PXLMath.Matrix4([1,0,0,0, 0,1,0,0, 0,0,1,0, 2,2,2,1])
i = m.getMatrix3()
b = []
for n in i.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should be identity', ->
m = new PXLMath.Matrix3()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,0,0,0,1,0,0,0,1])
it 'Matrix3x3 should inverse properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.invert()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[-24,20,-5,18,-15,4,5,-4,1])
it 'Matrix3x3 should transpose properly', ->
m = new PXLMath.Matrix3([1,0,5,2,1,6,3,4,0])
m.transpose()
b = []
for n in m.a
b.push n
chai.assert.deepEqual(b,[1,2,3,0,1,4,5,6,0])
it 'Matrix3x3 should rotate properly', ->
m = new PXLMath.Matrix3()
m.rotate new PXLMath.Vec3(0,1,0), PXLMath.degToRad 90
n = new PXLMath.Vec3(1,0,0)
m.multVec n
r = n.flatten()
q = [0,0,-1]
EPSILON = 1.0e-6
for n in [0..2]
a = r[n]
b = q[n]
(a-b < EPSILON).should.be.true
it 'CatmullRomSpline of 4 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(1.5,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
it 'CatmullRomSpline of 5 points should interpolate nicely', ->
p0 = new PXLMath.Vec2(0,0)
p1 = new PXLMath.Vec2(1,0)
p2 = new PXLMath.Vec2(2,0)
p3 = new PXLMath.Vec2(3,0)
p4 = new PXLMath.Vec2(4,0)
c = new PXLCurve.CatmullRomSpline([p0,p1,p2,p3,p4] )
a = c.pointOnCurve(0.5)
b = new PXLMath.Vec2(2.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(0.0)
b = new PXLMath.Vec2(1.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
a = c.pointOnCurve(1.0)
b = new PXLMath.Vec2(3.0,0)
EPSILON = 1.0e-6
(a.sub(b).length() < EPSILON).should.be.true
###
it 'y value for a parabola with abitrary directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,1),-0.5,0.5,0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.5857864376) < EPSILON).should.be.true
(Math.abs(y[1] - 3.4142135623) < EPSILON).should.be.true
y = p0.sample(-2.5)
(Math.abs(y[0] - 1.035898384) < EPSILON).should.be.true
(Math.abs(y[1] - 7.964101615) < EPSILON).should.be.true
it 'y value for a parabola with an X axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,0.0)
y = p0.sample(0.0)
EPSILON = 1.0e-6
(Math.abs(y[0] - 1.0) < EPSILON).should.be.true
p0 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),0,1.0,1.0)
y = p0.sample(0.0)
(Math.abs(y[0] - 1.5) < EPSILON).should.be.true
it 'y value for a parabola with an Y axis directrix', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,0.0)
y = p0.sample(1.1)
EPSILON = 1.0e-6
(Math.abs(y[0] - 0.6324555320336761) < EPSILON).should.be.true
(Math.abs(y[1] + 0.6324555320336761) < EPSILON).should.be.true
#p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,0),1.0,0.0,1.0)
#y = p0.sample(0.0)
#console.log y
#(Math.abs(y[0] + 1.5) < EPSILON).should.be.true
#(Math.abs(y[1] - 1.5) < EPSILON).should.be.true
it 'Crossing point for a parabola', ->
p0 = new PXLMath.Parabola(new PXLMath.Vec2(2,2),1.0,1.0,0.0)
[s0,s1] = p0.lineCrossing -1,-1,5
EPSILON = 1.0e-6
(Math.abs(s0.x - 0.05051025721682212) < EPSILON).should.be.true
(Math.abs(s0.y - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.x - 4.949489742783178) < EPSILON).should.be.true
(Math.abs(s1.y - 0.05051025721682212) < EPSILON).should.be.true
p1 = new PXLMath.Parabola(new PXLMath.Vec2(0,2),3.0,-2.0,-1.0)
[s0,s1] = p1.lineCrossing -1,-1,5
(Math.abs(s0.x + 32.1245154965971) < EPSILON).should.be.true
(Math.abs(s0.y - 37.1245154965971) < EPSILON).should.be.true
(Math.abs(s1.x - 0.12451549659709826) < EPSILON).should.be.true
(Math.abs(s1.y - 4.875484503402902) < EPSILON).should.be.true
###
it 'Catmull Patch', ->
p0 = new PXLMath.Vec3(0,0,0)
p1 = new PXLMath.Vec3(0,0,1)
p2 = new PXLMath.Vec3(0,0,2)
p3 = new PXLMath.Vec3(0,0,3)
p4 = new PXLMath.Vec3(2,0,0)
p5 = new PXLMath.Vec3(2,1,1)
p6 = new PXLMath.Vec3(2,1,2)
p7 = new PXLMath.Vec3(2,0,3)
p8 = new PXLMath.Vec3(4,0,0)
p9 = new PXLMath.Vec3(4,1,1)
p10 = new PXLMath.Vec3(4,1,2)
p11 = new PXLMath.Vec3(4,0,3)
p12 = new PXLMath.Vec3(8,0,0)
p13 = new PXLMath.Vec3(8,0,1)
p14 = new PXLMath.Vec3(8,0,2)
p15 = new PXLMath.Vec3(8,0,3)
points = [p0,p1,p2,p3,p4,p5,p6,p6,p8,p9,p10,p11,p12,p13,p14,p15]
cm = new PXLCurve.CatmullPatch(points)
EPSILON = 1.0e-6
tp = cm.sample( new PXLMath.Vec2(0.0, 0.0) )
(Math.abs(tp.x - 2) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 1) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(0.5, 0.25) )
(Math.abs(tp.x - 2.453125) < EPSILON).should.be.true
(Math.abs(tp.y - 1.17626953125) < EPSILON).should.be.true
(Math.abs(tp.z - 1.55419921875) < EPSILON).should.be.true
tp = cm.sample( new PXLMath.Vec2(1.0, 1.0) )
(Math.abs(tp.x - 4) < EPSILON).should.be.true
(Math.abs(tp.y - 1) < EPSILON).should.be.true
(Math.abs(tp.z - 2) < EPSILON).should.be.true
it 'Matrix Project and Un-Project', ->
c = new PXLCamera.PerspCamera new PXLMath.Vec3 0,0,10
c.m.lookAt c.pos, c.look, c.up
c.p.makePerspective(50, 640 / 480, 0.1, 100.0 )
tp = new PXLMath.Vec4 1,1,1,1
tm = PXLMath.Matrix4.mult c.p, c.m
tm.multVec(tp)
tt = new PXLMath.Vec4 tp.x / tp.w, tp.y / tp.w, tp.z / tp.w, 1
#console.log tt
it 'Ray Cast', ->
EPSILON = 1.0e-6
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
tt = c.castRay 320,240,640,480
(Math.abs(tt.x ) < EPSILON).should.be.true
(Math.abs(tt.y ) < EPSILON).should.be.true
(Math.abs(tt.z + 1) < EPSILON).should.be.true
c = new PXLCamera.PerspCamera new PXLMath.Vec3(0,0,-4.0),
new PXLMath.Vec3(0,0,0), new PXLMath.Vec3(0,1,0), 55.0, 0.1, 10.0
c.update 640,480
#tt = c.castRay 640,480,640,480
#console.log (tt)
#(Math.abs(tt.x + 0.5242704794481596) < EPSILON).should.be.true
#(Math.abs(tt.y + 0.3932028797167508) < EPSILON).should.be.true
#(Math.abs(tt.z - 0.7553356603270172) < EPSILON).should.be.true
it 'bit operation on uber shader path', ->
(PXLUberPath.uber_clear_material(2) == 2).should.be.true
|
[
{
"context": " it )\n it\n\n## for locally access\n\nauthors =\n 'zenithar':\n name: 'Thibault NORMAND'\n email: 'me@zen",
"end": 211,
"score": 0.9988731741905212,
"start": 203,
"tag": "USERNAME",
"value": "zenithar"
},
{
"context": "ocally access\n\nauthors =\n 'zenithar':\n name: 'Thibault NORMAND'\n email: 'me@zenithar.org'\n github: 'zenith",
"end": 241,
"score": 0.9998719096183777,
"start": 225,
"tag": "NAME",
"value": "Thibault NORMAND"
},
{
"context": "nithar':\n name: 'Thibault NORMAND'\n email: 'me@zenithar.org'\n github: 'zenithar'\n twitter: 'zenithar'\n ",
"end": 270,
"score": 0.9999297261238098,
"start": 255,
"tag": "EMAIL",
"value": "me@zenithar.org"
},
{
"context": "ORMAND'\n email: 'me@zenithar.org'\n github: 'zenithar'\n twitter: 'zenithar'\n gravata: '2694a5501e",
"end": 293,
"score": 0.998964786529541,
"start": 285,
"tag": "USERNAME",
"value": "zenithar"
},
{
"context": "enithar.org'\n github: 'zenithar'\n twitter: 'zenithar'\n gravata: '2694a5501ec37eab0c6d4bf98c30303a'\n",
"end": 317,
"score": 0.9992911219596863,
"start": 309,
"tag": "USERNAME",
"value": "zenithar"
},
{
"context": "r templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ\n\n templateData:\n\n # Spe",
"end": 700,
"score": 0.850388765335083,
"start": 695,
"tag": "USERNAME",
"value": "bevry"
},
{
"context": " # The website author's name\n author: \"Thibault NORMAND\"\n\n # The website author's email\n ",
"end": 1531,
"score": 0.9998734593391418,
"start": 1515,
"tag": "NAME",
"value": "Thibault NORMAND"
},
{
"context": " # The website author's email\n email: \"me@zenithar.org\"\n\n disqusShortName: \"zenithar\"\n\n ",
"end": 1610,
"score": 0.9999298453330994,
"start": 1595,
"tag": "EMAIL",
"value": "me@zenithar.org"
},
{
"context": " genAuthors: (name)->\n name = 'zenithar' unless name?\n\n names = name\n ",
"end": 2145,
"score": 0.9996376037597656,
"start": 2137,
"tag": "USERNAME",
"value": "zenithar"
},
{
"context": "mes) ->\n ret = []\n names = \"zenithar\" unless names?\n\n names = names.split '",
"end": 2621,
"score": 0.9997029304504395,
"start": 2613,
"tag": "USERNAME",
"value": "zenithar"
}
] | docpad.coffee | Zenithar/zenithar.docpad | 0 | moment = require 'moment'
_ = require 'underscore'
hl = require 'highlight.js'
moment.lang('fr')
to =
value: (it) ->
return it() if _.isFunction( it )
it
## for locally access
authors =
'zenithar':
name: 'Thibault NORMAND'
email: 'me@zenithar.org'
github: 'zenithar'
twitter: 'zenithar'
gravata: '2694a5501ec37eab0c6d4bf98c30303a'
# The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://blog.zenithar.org"
# The default title of our website
title: "Il existe moins bien mais c'est plus cher !"
# The website description (for SEO)
description: """
Ancien ingénieur sécurité passionné, acteur dans le monde du logiciel 'libre'. Je suis en veille technologique permanente car toujours à la recherche du 'meilleur tournevis'.
"""
# The website keywords (for SEO) separated by commas
keywords: """
zenithar,toulouse,java,security,engineer
"""
# The website author's name
author: "Thibault NORMAND"
# The website author's email
email: "me@zenithar.org"
disqusShortName: "zenithar"
# helpers
helper:
formatDate: (date)->
moment( date ).format('YYYY MMM DD')
genTags: (tag)->
return '' if !tag
tags = tag
tags = tag.split ',' if _.isString tag
_.map(tags, (name)->
name = name.trim()
"""<a href="/site/tagmap.html##{name.toLowerCase()}" class="tag">#{name}</a>"""
).join ' '
genAuthors: (name)->
name = 'zenithar' unless name?
names = name
names = name.split ',' if _.isString name
_.map(names, (name)->
name = name.trim()
author = authors[ name ]
return to.value(author.page) if author.hasOwnProperty( 'page' )
"""<a href="https://twitter.com/#{author.twitter}/">#{author.name}</a>"""
).join ', '
genTwitter: (names) ->
ret = []
names = "zenithar" unless names?
names = names.split ','
for name in names
name = name.trim()
if authors.hasOwnProperty name
ret.push '@' + authors[ name ].twitter
ret.join ' '
# tools
tool:
'_': _
summary: (contentRendered) ->
splited = contentRendered.split(/<h[123456]>/)
splited[0]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.tags or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'articles'}, [date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Ammend our Template Data
renderBefore: ({collection, templateData}, next) ->
#sorting documents
collection.comparator = (model) ->
-model.get('date').getTime()
collection.sort()
# Continue onto the next plugin
next()
# =================================
# Plugins
# Enabled Plugins
enableUnlistedPlugins: true
# Configure Plugins
plugins:
# Enable NIB in the Stylus Plugin
stylus:
useNib: true
robotskirt:
inline: (src, hash, houdini)->
out = src
#for people
out = out.replace /(^|[ \t]+)@([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="https://twitter.com/' + m2 + '">@' + m2 + '</a>'
#for hash tag·
out = out.replace /(^|[ \t]+)#([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="/site/tagmap.html#' + m2 + '">#' + m2 + '</a>'
out
environments:
w:
ignoreCustomPatterns: /2005|2006|2007|2008|2009|2010|2011|2012/
}
# Export our DocPad Configuration
module.exports = docpadConfig | 224507 | moment = require 'moment'
_ = require 'underscore'
hl = require 'highlight.js'
moment.lang('fr')
to =
value: (it) ->
return it() if _.isFunction( it )
it
## for locally access
authors =
'zenithar':
name: '<NAME>'
email: '<EMAIL>'
github: 'zenithar'
twitter: 'zenithar'
gravata: '2694a5501ec37eab0c6d4bf98c30303a'
# The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://blog.zenithar.org"
# The default title of our website
title: "Il existe moins bien mais c'est plus cher !"
# The website description (for SEO)
description: """
Ancien ingénieur sécurité passionné, acteur dans le monde du logiciel 'libre'. Je suis en veille technologique permanente car toujours à la recherche du 'meilleur tournevis'.
"""
# The website keywords (for SEO) separated by commas
keywords: """
zenithar,toulouse,java,security,engineer
"""
# The website author's name
author: "<NAME>"
# The website author's email
email: "<EMAIL>"
disqusShortName: "zenithar"
# helpers
helper:
formatDate: (date)->
moment( date ).format('YYYY MMM DD')
genTags: (tag)->
return '' if !tag
tags = tag
tags = tag.split ',' if _.isString tag
_.map(tags, (name)->
name = name.trim()
"""<a href="/site/tagmap.html##{name.toLowerCase()}" class="tag">#{name}</a>"""
).join ' '
genAuthors: (name)->
name = 'zenithar' unless name?
names = name
names = name.split ',' if _.isString name
_.map(names, (name)->
name = name.trim()
author = authors[ name ]
return to.value(author.page) if author.hasOwnProperty( 'page' )
"""<a href="https://twitter.com/#{author.twitter}/">#{author.name}</a>"""
).join ', '
genTwitter: (names) ->
ret = []
names = "zenithar" unless names?
names = names.split ','
for name in names
name = name.trim()
if authors.hasOwnProperty name
ret.push '@' + authors[ name ].twitter
ret.join ' '
# tools
tool:
'_': _
summary: (contentRendered) ->
splited = contentRendered.split(/<h[123456]>/)
splited[0]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.tags or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'articles'}, [date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Ammend our Template Data
renderBefore: ({collection, templateData}, next) ->
#sorting documents
collection.comparator = (model) ->
-model.get('date').getTime()
collection.sort()
# Continue onto the next plugin
next()
# =================================
# Plugins
# Enabled Plugins
enableUnlistedPlugins: true
# Configure Plugins
plugins:
# Enable NIB in the Stylus Plugin
stylus:
useNib: true
robotskirt:
inline: (src, hash, houdini)->
out = src
#for people
out = out.replace /(^|[ \t]+)@([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="https://twitter.com/' + m2 + '">@' + m2 + '</a>'
#for hash tag·
out = out.replace /(^|[ \t]+)#([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="/site/tagmap.html#' + m2 + '">#' + m2 + '</a>'
out
environments:
w:
ignoreCustomPatterns: /2005|2006|2007|2008|2009|2010|2011|2012/
}
# Export our DocPad Configuration
module.exports = docpadConfig | true | moment = require 'moment'
_ = require 'underscore'
hl = require 'highlight.js'
moment.lang('fr')
to =
value: (it) ->
return it() if _.isFunction( it )
it
## for locally access
authors =
'zenithar':
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
github: 'zenithar'
twitter: 'zenithar'
gravata: '2694a5501ec37eab0c6d4bf98c30303a'
# The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
# Specify some site properties
site:
# The production url of our website
url: "http://blog.zenithar.org"
# The default title of our website
title: "Il existe moins bien mais c'est plus cher !"
# The website description (for SEO)
description: """
Ancien ingénieur sécurité passionné, acteur dans le monde du logiciel 'libre'. Je suis en veille technologique permanente car toujours à la recherche du 'meilleur tournevis'.
"""
# The website keywords (for SEO) separated by commas
keywords: """
zenithar,toulouse,java,security,engineer
"""
# The website author's name
author: "PI:NAME:<NAME>END_PI"
# The website author's email
email: "PI:EMAIL:<EMAIL>END_PI"
disqusShortName: "zenithar"
# helpers
helper:
formatDate: (date)->
moment( date ).format('YYYY MMM DD')
genTags: (tag)->
return '' if !tag
tags = tag
tags = tag.split ',' if _.isString tag
_.map(tags, (name)->
name = name.trim()
"""<a href="/site/tagmap.html##{name.toLowerCase()}" class="tag">#{name}</a>"""
).join ' '
genAuthors: (name)->
name = 'zenithar' unless name?
names = name
names = name.split ',' if _.isString name
_.map(names, (name)->
name = name.trim()
author = authors[ name ]
return to.value(author.page) if author.hasOwnProperty( 'page' )
"""<a href="https://twitter.com/#{author.twitter}/">#{author.name}</a>"""
).join ', '
genTwitter: (names) ->
ret = []
names = "zenithar" unless names?
names = names.split ','
for name in names
name = name.trim()
if authors.hasOwnProperty name
ret.push '@' + authors[ name ].twitter
ret.join ' '
# tools
tool:
'_': _
summary: (contentRendered) ->
splited = contentRendered.split(/<h[123456]>/)
splited[0]
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@document.title} | #{@site.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.tags or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'articles'}, [date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Ammend our Template Data
renderBefore: ({collection, templateData}, next) ->
#sorting documents
collection.comparator = (model) ->
-model.get('date').getTime()
collection.sort()
# Continue onto the next plugin
next()
# =================================
# Plugins
# Enabled Plugins
enableUnlistedPlugins: true
# Configure Plugins
plugins:
# Enable NIB in the Stylus Plugin
stylus:
useNib: true
robotskirt:
inline: (src, hash, houdini)->
out = src
#for people
out = out.replace /(^|[ \t]+)@([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="https://twitter.com/' + m2 + '">@' + m2 + '</a>'
#for hash tag·
out = out.replace /(^|[ \t]+)#([a-zA-Z0-9]+)/g, (whole, m1, m2) ->
hash m1 + '<a href="/site/tagmap.html#' + m2 + '">#' + m2 + '</a>'
out
environments:
w:
ignoreCustomPatterns: /2005|2006|2007|2008|2009|2010|2011|2012/
}
# Export our DocPad Configuration
module.exports = docpadConfig |
[
{
"context": "string for alignment test\n text = \"\"\"\n foo = \"olá\"\n foobar = {\"key\": 1, \"value\": \"hello, world\"}",
"end": 209,
"score": 0.991783618927002,
"start": 206,
"tag": "NAME",
"value": "olá"
},
{
"context": "t(text)).to.be.equal \"\"\"\n foo = \"olá\"\n foobar = {\"key\": 1, \"value\": \"hel",
"end": 677,
"score": 0.9795883893966675,
"start": 674,
"tag": "NAME",
"value": "olá"
}
] | test/002-formatter-test.coffee | paulodiovani/atom-align-tab | 0 | expect = require("chai").expect
Parser = require("../lib/parser.coffee")
Formatter = require("../lib/formatter.coffee")
describe "Formatter", ->
# string for alignment test
text = """
foo = "olá"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
"""
parser = new Parser '=/f'
formatter = new Formatter
formatter.setParser parser
it "is a Formatter", ->
expect(formatter).to.be.instanceof Formatter
it "has a parser", ->
expect(formatter.parser).to.be.instanceof Parser
it "parse equals", ->
expect(formatter.format(text)).to.be.equal """
foo = "olá"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
""" | 139045 | expect = require("chai").expect
Parser = require("../lib/parser.coffee")
Formatter = require("../lib/formatter.coffee")
describe "Formatter", ->
# string for alignment test
text = """
foo = "<NAME>"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
"""
parser = new Parser '=/f'
formatter = new Formatter
formatter.setParser parser
it "is a Formatter", ->
expect(formatter).to.be.instanceof Formatter
it "has a parser", ->
expect(formatter.parser).to.be.instanceof Parser
it "parse equals", ->
expect(formatter.format(text)).to.be.equal """
foo = "<NAME>"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
""" | true | expect = require("chai").expect
Parser = require("../lib/parser.coffee")
Formatter = require("../lib/formatter.coffee")
describe "Formatter", ->
# string for alignment test
text = """
foo = "PI:NAME:<NAME>END_PI"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
"""
parser = new Parser '=/f'
formatter = new Formatter
formatter.setParser parser
it "is a Formatter", ->
expect(formatter).to.be.instanceof Formatter
it "has a parser", ->
expect(formatter.parser).to.be.instanceof Parser
it "parse equals", ->
expect(formatter.format(text)).to.be.equal """
foo = "PI:NAME:<NAME>END_PI"
foobar = {"key": 1, "value": "hello, world"}
myTinyLittleVar = {"key": 999, "value": "hello, world, again"}
""" |
[
{
"context": "e 'uuid').v4\nmd5 = require 'blueimp-md5'\n\nmKey = 'key'\npullUrl = 'http://xxx'\npushUrl = 'rtmp://xxx'\nff",
"end": 129,
"score": 0.9944922924041748,
"start": 126,
"tag": "KEY",
"value": "key"
},
{
"context": "sh = md5 url\n \"#{pullUrl}/p/#{id}.flv?auth_key=#{time}-#{rand}-0-#{hash}\"\n\nEventEmitter = (require 'events').EventEmitter",
"end": 446,
"score": 0.9818089604377747,
"start": 424,
"tag": "KEY",
"value": "time}-#{rand}-0-#{hash"
}
] | src/push.coffee | Anillc/ytbpush | 2 | ytbdl = require 'youtube-dl'
ffmpeg = require 'fluent-ffmpeg'
uuid = (require 'uuid').v4
md5 = require 'blueimp-md5'
mKey = 'key'
pullUrl = 'http://xxx'
pushUrl = 'rtmp://xxx'
ffmpegPath = '/path/to/ffmpeg'
genUrl = (id) ->
time = (Math.round (new Date).getTime()/1000) + 60 * 60 * 4
rand = uuid().replace /-/g, ''
url = "/p/#{id}.flv-#{time}-#{rand}-0-#{mKey}"
hash = md5 url
"#{pullUrl}/p/#{id}.flv?auth_key=#{time}-#{rand}-0-#{hash}"
EventEmitter = (require 'events').EventEmitter
module.exports = (url) ->
emitter = new EventEmitter
ytbdl.exec url, ['-g','--format=best'], {}, (err, out) ->
if err then emitter.emit 'error', err
id = uuid().replace /-/g, ''
cmd = ffmpeg out[0]
.setFfmpegPath ffmpegPath
.videoCodec 'copy'
.audioCodec 'copy'
.inputOptions '-re'
.format 'flv'
.output "#{pullUrl}/p/#{id}"
.on 'start', ->
emitter.id = id
emitter.url = genUrl id
emitter.emit 'start', id
.on 'error', (e) -> emitter.emit 'error', e, id
.on 'end', -> emitter.emit 'end', id
emitter.stop = -> cmd.kill()
cmd.run()
emitter
| 213305 | ytbdl = require 'youtube-dl'
ffmpeg = require 'fluent-ffmpeg'
uuid = (require 'uuid').v4
md5 = require 'blueimp-md5'
mKey = '<KEY>'
pullUrl = 'http://xxx'
pushUrl = 'rtmp://xxx'
ffmpegPath = '/path/to/ffmpeg'
genUrl = (id) ->
time = (Math.round (new Date).getTime()/1000) + 60 * 60 * 4
rand = uuid().replace /-/g, ''
url = "/p/#{id}.flv-#{time}-#{rand}-0-#{mKey}"
hash = md5 url
"#{pullUrl}/p/#{id}.flv?auth_key=#{<KEY>}"
EventEmitter = (require 'events').EventEmitter
module.exports = (url) ->
emitter = new EventEmitter
ytbdl.exec url, ['-g','--format=best'], {}, (err, out) ->
if err then emitter.emit 'error', err
id = uuid().replace /-/g, ''
cmd = ffmpeg out[0]
.setFfmpegPath ffmpegPath
.videoCodec 'copy'
.audioCodec 'copy'
.inputOptions '-re'
.format 'flv'
.output "#{pullUrl}/p/#{id}"
.on 'start', ->
emitter.id = id
emitter.url = genUrl id
emitter.emit 'start', id
.on 'error', (e) -> emitter.emit 'error', e, id
.on 'end', -> emitter.emit 'end', id
emitter.stop = -> cmd.kill()
cmd.run()
emitter
| true | ytbdl = require 'youtube-dl'
ffmpeg = require 'fluent-ffmpeg'
uuid = (require 'uuid').v4
md5 = require 'blueimp-md5'
mKey = 'PI:KEY:<KEY>END_PI'
pullUrl = 'http://xxx'
pushUrl = 'rtmp://xxx'
ffmpegPath = '/path/to/ffmpeg'
genUrl = (id) ->
time = (Math.round (new Date).getTime()/1000) + 60 * 60 * 4
rand = uuid().replace /-/g, ''
url = "/p/#{id}.flv-#{time}-#{rand}-0-#{mKey}"
hash = md5 url
"#{pullUrl}/p/#{id}.flv?auth_key=#{PI:KEY:<KEY>END_PI}"
EventEmitter = (require 'events').EventEmitter
module.exports = (url) ->
emitter = new EventEmitter
ytbdl.exec url, ['-g','--format=best'], {}, (err, out) ->
if err then emitter.emit 'error', err
id = uuid().replace /-/g, ''
cmd = ffmpeg out[0]
.setFfmpegPath ffmpegPath
.videoCodec 'copy'
.audioCodec 'copy'
.inputOptions '-re'
.format 'flv'
.output "#{pullUrl}/p/#{id}"
.on 'start', ->
emitter.id = id
emitter.url = genUrl id
emitter.emit 'start', id
.on 'error', (e) -> emitter.emit 'error', e, id
.on 'end', -> emitter.emit 'end', id
emitter.stop = -> cmd.kill()
cmd.run()
emitter
|
[
{
"context": "= false\n\n @client = new Dropbox.Client({ key: \"hlzfj39a4cfzpri\" })\n if cordova?\n @client.authDriver(new ",
"end": 282,
"score": 0.9987335205078125,
"start": 267,
"tag": "KEY",
"value": "hlzfj39a4cfzpri"
}
] | app/scripts/services/dropbox_service.coffee | scalableminds/fivepad | 1 | ### define
dropbox : Dropbox
lodash : _
app : app
###
# if not window.Dropbox?
# Dropbox = window.Dropbox = nodereq("./scripts/lib/dropbox-datastores-1.1.0")
class DropboxService
constructor : ->
@isReady = false
@client = new Dropbox.Client({ key: "hlzfj39a4cfzpri" })
if cordova?
@client.authDriver(new Dropbox.AuthDriver.Cordova())
else if chrome?.storage?
@client.authDriver(new Dropbox.AuthDriver.ChromeApp())
else
@client.authDriver(
new Dropbox.AuthDriver.Popup({
receiverUrl: "https://scalableminds.github.io/fivepad/oauth_receiver.html"
}))
@client.authenticate({ interactive : false }, (error) =>
if error
console.error("dropboxService:authenticationError", error)
else if @isAuthenticated()
@initDatastore()
app.trigger("dropboxService:authenticated")
)
authenticate : ->
@client.authenticate((error) =>
if error
console.error("dropboxService:authenticationError", error)
else
app.trigger("dropboxService:authenticated")
@initDatastore()
)
return this
initDatastore : ->
datastoreManager = @client.getDatastoreManager()
datastoreManager.openDefaultDatastore((error, datastore) =>
if error
console.error("dropboxService:datastoreError", error)
@initDatastore()
else
@datastore = datastore
@notesTable = @datastore.getTable("notes")
@datastore.syncStatusChanged.addListener( =>
console.log("dropbox syncing", @datastore.getSyncStatus().uploading)
if @datastore.getSyncStatus().uploading
app.trigger("dropboxService:syncing")
else
app.trigger("dropboxService:synced")
return
)
@datastore.recordsChanged.addListener((changes) =>
if changes.isLocal()
app.trigger("dropboxService:recordsChangedLocal", changes)
else
app.trigger("dropboxService:recordsChangedRemote", changes)
app.trigger("dropboxService:recordsChanged", changes)
return
)
if not @isReady
@isReady = true
app.trigger("dropboxService:ready", this)
)
return this
isAuthenticated : ->
return @client.isAuthenticated()
isTransient : ->
return @datastore?.getSyncStatus().uploading
updateNote : (id, obj) ->
if @isReady
if record = @notesTable.get("note-#{id}")
record.update(obj)
else
@notesTable.getOrInsert("note-#{id}", obj)
getNote : (id) ->
if @isReady
return @notesTable.get("note-#{id}")?.getFields()
| 39813 | ### define
dropbox : Dropbox
lodash : _
app : app
###
# if not window.Dropbox?
# Dropbox = window.Dropbox = nodereq("./scripts/lib/dropbox-datastores-1.1.0")
class DropboxService
constructor : ->
@isReady = false
@client = new Dropbox.Client({ key: "<KEY>" })
if cordova?
@client.authDriver(new Dropbox.AuthDriver.Cordova())
else if chrome?.storage?
@client.authDriver(new Dropbox.AuthDriver.ChromeApp())
else
@client.authDriver(
new Dropbox.AuthDriver.Popup({
receiverUrl: "https://scalableminds.github.io/fivepad/oauth_receiver.html"
}))
@client.authenticate({ interactive : false }, (error) =>
if error
console.error("dropboxService:authenticationError", error)
else if @isAuthenticated()
@initDatastore()
app.trigger("dropboxService:authenticated")
)
authenticate : ->
@client.authenticate((error) =>
if error
console.error("dropboxService:authenticationError", error)
else
app.trigger("dropboxService:authenticated")
@initDatastore()
)
return this
initDatastore : ->
datastoreManager = @client.getDatastoreManager()
datastoreManager.openDefaultDatastore((error, datastore) =>
if error
console.error("dropboxService:datastoreError", error)
@initDatastore()
else
@datastore = datastore
@notesTable = @datastore.getTable("notes")
@datastore.syncStatusChanged.addListener( =>
console.log("dropbox syncing", @datastore.getSyncStatus().uploading)
if @datastore.getSyncStatus().uploading
app.trigger("dropboxService:syncing")
else
app.trigger("dropboxService:synced")
return
)
@datastore.recordsChanged.addListener((changes) =>
if changes.isLocal()
app.trigger("dropboxService:recordsChangedLocal", changes)
else
app.trigger("dropboxService:recordsChangedRemote", changes)
app.trigger("dropboxService:recordsChanged", changes)
return
)
if not @isReady
@isReady = true
app.trigger("dropboxService:ready", this)
)
return this
isAuthenticated : ->
return @client.isAuthenticated()
isTransient : ->
return @datastore?.getSyncStatus().uploading
updateNote : (id, obj) ->
if @isReady
if record = @notesTable.get("note-#{id}")
record.update(obj)
else
@notesTable.getOrInsert("note-#{id}", obj)
getNote : (id) ->
if @isReady
return @notesTable.get("note-#{id}")?.getFields()
| true | ### define
dropbox : Dropbox
lodash : _
app : app
###
# if not window.Dropbox?
# Dropbox = window.Dropbox = nodereq("./scripts/lib/dropbox-datastores-1.1.0")
class DropboxService
constructor : ->
@isReady = false
@client = new Dropbox.Client({ key: "PI:KEY:<KEY>END_PI" })
if cordova?
@client.authDriver(new Dropbox.AuthDriver.Cordova())
else if chrome?.storage?
@client.authDriver(new Dropbox.AuthDriver.ChromeApp())
else
@client.authDriver(
new Dropbox.AuthDriver.Popup({
receiverUrl: "https://scalableminds.github.io/fivepad/oauth_receiver.html"
}))
@client.authenticate({ interactive : false }, (error) =>
if error
console.error("dropboxService:authenticationError", error)
else if @isAuthenticated()
@initDatastore()
app.trigger("dropboxService:authenticated")
)
authenticate : ->
@client.authenticate((error) =>
if error
console.error("dropboxService:authenticationError", error)
else
app.trigger("dropboxService:authenticated")
@initDatastore()
)
return this
initDatastore : ->
datastoreManager = @client.getDatastoreManager()
datastoreManager.openDefaultDatastore((error, datastore) =>
if error
console.error("dropboxService:datastoreError", error)
@initDatastore()
else
@datastore = datastore
@notesTable = @datastore.getTable("notes")
@datastore.syncStatusChanged.addListener( =>
console.log("dropbox syncing", @datastore.getSyncStatus().uploading)
if @datastore.getSyncStatus().uploading
app.trigger("dropboxService:syncing")
else
app.trigger("dropboxService:synced")
return
)
@datastore.recordsChanged.addListener((changes) =>
if changes.isLocal()
app.trigger("dropboxService:recordsChangedLocal", changes)
else
app.trigger("dropboxService:recordsChangedRemote", changes)
app.trigger("dropboxService:recordsChanged", changes)
return
)
if not @isReady
@isReady = true
app.trigger("dropboxService:ready", this)
)
return this
isAuthenticated : ->
return @client.isAuthenticated()
isTransient : ->
return @datastore?.getSyncStatus().uploading
updateNote : (id, obj) ->
if @isReady
if record = @notesTable.get("note-#{id}")
record.update(obj)
else
@notesTable.getOrInsert("note-#{id}", obj)
getNote : (id) ->
if @isReady
return @notesTable.get("note-#{id}")?.getFields()
|
[
{
"context": "\n###\nTest CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)\n###\n\nrequire 'c",
"end": 37,
"score": 0.9998576641082764,
"start": 26,
"tag": "NAME",
"value": "David Worms"
},
{
"context": "\n###\nTest CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)\n###\n\nrequire 'coffee-script'\nfs =",
"end": 55,
"score": 0.9999299645423889,
"start": 39,
"tag": "EMAIL",
"value": "open@adaltas.com"
}
] | Tools/phantalyzer-master/node_modules/csv/test/quotes.coffee | pianomanx/watchdog | 377 |
###
Test CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'quotes', ->
it 'Test regular quotes', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.8017226E7,"ABC,45","2000-01-01"
28392898392,1974.0,"8.8392926E7",DEF,23,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,8.8017226E7,"ABC,45",2000-01-01
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should read quoted values containing delimiters and write around quote only the value containing delimiters', (next) ->
csv()
.from.string("""
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0","8.8392926E7","DEF,23,2050-11-27"
""")
.to.string (data) ->
data.should.eql """
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0",8.8392926E7,"DEF,23,2050-11-27"
"""
next()
it 'Test quotes inside field', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,2"3,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,"8.801""7226E7",ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,"2""3",2050-11-27
"""
next()
it 'Test empty value', (next) ->
csv()
.from.string("""
20322051544,"",8.8017226E7,45,""
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.to.string (data) ->
data.should.eql """
20322051544,,8.8017226E7,45,
,1974,8.8392926E7,,
"""
next()
it 'should read values with quotes, escaped as double quotes, and write empty values as not quoted', (next) ->
csv()
.from.string("""
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
if index is 0
record[1].should.eql '"'
record[4].should.eql '"ok"'
.to.string (data) ->
data.should.eql """
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
,1974,8.8392926E7,,
"""
next()
it 'should accept line breaks inside quotes', (next) ->
csv()
.from.string("""
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,"","
"
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
.to.string (data) ->
data.should.eql """
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,,"
"
"""
next()
it 'Test unclosed quote', (next) ->
csv()
.from.string("""
"",1974,8.8392926E7,"","
""", quote: '"', escape: '"')
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Quoted field not terminated at line 1'
next()
it 'Test invalid quotes', (next) ->
csv()
.from.string("""
"" 1974 8.8392926E7 ""t ""
"" 1974 8.8392926E7 "" ""
""", quote: '"', escape: '"', delimiter: "\t")
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Invalid closing quote at line 1; found " " instead of delimiter "\\t"'
next()
it 'Test invalid quotes from string', (next) ->
csv()
.from.string '"",1974,8.8392926E7,""t,""',
quote: '"'
escape: '"'
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.match /Invalid closing quote/
next()
it 'should quotes all fields', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC
"283928""98392",1974.0,8.8392926E7,DEF
""")
.on 'error', (e) ->
false.should.be.ok
.to.string( (data) ->
data.should.eql """
"20322051544","1979.0","8.801""7226E7","ABC"
"283928""98392","1974.0","8.8392926E7","DEF"
"""
next()
, quoted: true )
| 225622 |
###
Test CSV - Copyright <NAME> <<EMAIL>> (BSD Licensed)
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'quotes', ->
it 'Test regular quotes', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.8017226E7,"ABC,45","2000-01-01"
28392898392,1974.0,"8.8392926E7",DEF,23,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,8.8017226E7,"ABC,45",2000-01-01
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should read quoted values containing delimiters and write around quote only the value containing delimiters', (next) ->
csv()
.from.string("""
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0","8.8392926E7","DEF,23,2050-11-27"
""")
.to.string (data) ->
data.should.eql """
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0",8.8392926E7,"DEF,23,2050-11-27"
"""
next()
it 'Test quotes inside field', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,2"3,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,"8.801""7226E7",ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,"2""3",2050-11-27
"""
next()
it 'Test empty value', (next) ->
csv()
.from.string("""
20322051544,"",8.8017226E7,45,""
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.to.string (data) ->
data.should.eql """
20322051544,,8.8017226E7,45,
,1974,8.8392926E7,,
"""
next()
it 'should read values with quotes, escaped as double quotes, and write empty values as not quoted', (next) ->
csv()
.from.string("""
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
if index is 0
record[1].should.eql '"'
record[4].should.eql '"ok"'
.to.string (data) ->
data.should.eql """
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
,1974,8.8392926E7,,
"""
next()
it 'should accept line breaks inside quotes', (next) ->
csv()
.from.string("""
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,"","
"
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
.to.string (data) ->
data.should.eql """
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,,"
"
"""
next()
it 'Test unclosed quote', (next) ->
csv()
.from.string("""
"",1974,8.8392926E7,"","
""", quote: '"', escape: '"')
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Quoted field not terminated at line 1'
next()
it 'Test invalid quotes', (next) ->
csv()
.from.string("""
"" 1974 8.8392926E7 ""t ""
"" 1974 8.8392926E7 "" ""
""", quote: '"', escape: '"', delimiter: "\t")
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Invalid closing quote at line 1; found " " instead of delimiter "\\t"'
next()
it 'Test invalid quotes from string', (next) ->
csv()
.from.string '"",1974,8.8392926E7,""t,""',
quote: '"'
escape: '"'
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.match /Invalid closing quote/
next()
it 'should quotes all fields', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC
"283928""98392",1974.0,8.8392926E7,DEF
""")
.on 'error', (e) ->
false.should.be.ok
.to.string( (data) ->
data.should.eql """
"20322051544","1979.0","8.801""7226E7","ABC"
"283928""98392","1974.0","8.8392926E7","DEF"
"""
next()
, quoted: true )
| true |
###
Test CSV - Copyright PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (BSD Licensed)
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'quotes', ->
it 'Test regular quotes', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.8017226E7,"ABC,45","2000-01-01"
28392898392,1974.0,"8.8392926E7",DEF,23,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,8.8017226E7,"ABC,45",2000-01-01
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should read quoted values containing delimiters and write around quote only the value containing delimiters', (next) ->
csv()
.from.string("""
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0","8.8392926E7","DEF,23,2050-11-27"
""")
.to.string (data) ->
data.should.eql """
20322051544,",1979.0,8.8017226E7,ABC,45,2000-01-01"
28392898392,1974.0,8.8392926E7,DEF,23,2050-11-27
"28392898392,1974.0",8.8392926E7,"DEF,23,2050-11-27"
"""
next()
it 'Test quotes inside field', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,2"3,2050-11-27
""")
.to.string (data) ->
data.should.eql """
20322051544,1979.0,"8.801""7226E7",ABC,45,2000-01-01
28392898392,1974.0,8.8392926E7,DEF,"2""3",2050-11-27
"""
next()
it 'Test empty value', (next) ->
csv()
.from.string("""
20322051544,"",8.8017226E7,45,""
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.to.string (data) ->
data.should.eql """
20322051544,,8.8017226E7,45,
,1974,8.8392926E7,,
"""
next()
it 'should read values with quotes, escaped as double quotes, and write empty values as not quoted', (next) ->
csv()
.from.string("""
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
"",1974,8.8392926E7,"",""
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
if index is 0
record[1].should.eql '"'
record[4].should.eql '"ok"'
.to.string (data) ->
data.should.eql """
20322051544,\"\"\"\",8.8017226E7,45,\"\"\"ok\"\"\"
,1974,8.8392926E7,,
"""
next()
it 'should accept line breaks inside quotes', (next) ->
csv()
.from.string("""
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,"","
"
""", quote: '"', escape: '"')
.on 'record', (record,index) ->
record.length.should.eql 5
.to.string (data) ->
data.should.eql """
20322051544,"
",8.8017226E7,45,"
ok
"
"
",1974,8.8392926E7,,"
"
"""
next()
it 'Test unclosed quote', (next) ->
csv()
.from.string("""
"",1974,8.8392926E7,"","
""", quote: '"', escape: '"')
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Quoted field not terminated at line 1'
next()
it 'Test invalid quotes', (next) ->
csv()
.from.string("""
"" 1974 8.8392926E7 ""t ""
"" 1974 8.8392926E7 "" ""
""", quote: '"', escape: '"', delimiter: "\t")
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.eql 'Invalid closing quote at line 1; found " " instead of delimiter "\\t"'
next()
it 'Test invalid quotes from string', (next) ->
csv()
.from.string '"",1974,8.8392926E7,""t,""',
quote: '"'
escape: '"'
.on 'close', ->
false.should.be.ok
.on 'error', (e) ->
e.message.should.match /Invalid closing quote/
next()
it 'should quotes all fields', (next) ->
csv()
.from.string("""
20322051544,"1979.0",8.801"7226E7,ABC
"283928""98392",1974.0,8.8392926E7,DEF
""")
.on 'error', (e) ->
false.should.be.ok
.to.string( (data) ->
data.should.eql """
"20322051544","1979.0","8.801""7226E7","ABC"
"283928""98392","1974.0","8.8392926E7","DEF"
"""
next()
, quoted: true )
|
[
{
"context": "### Copyright (c) 2015 Magnus Leo. All rights reserved. ###\n\nActor = require('./Act",
"end": 33,
"score": 0.9998639225959778,
"start": 23,
"tag": "NAME",
"value": "Magnus Leo"
}
] | src/classes/Player.coffee | magnusleo/Leo-Engine | 1 | ### Copyright (c) 2015 Magnus Leo. All rights reserved. ###
Actor = require('./Actor')
collision = require('../modules/collision')
environment = require('../modules/environment')
event = require('../modules/event')
io = require('../modules/io')
layers = require('../modules/layers')
util = require('../modules/util')
module.exports =
class Player extends Actor
constructor: (data) -> # Player::constructor
super
@accX = 0
@dirPhysical = 0
@dirVisual = 1
@state = new PlayerStateStanding(this)
@stateBefore = null
setState: (state) -> # Player::setState
if @state instanceof state
return
@stateBefore = @state
@state = new state(this)
stateIs: (state) -> # Player::stateIs
return @state instanceof state
handleInput: (e) -> # Player::handleInput
@state.handleInput(e)
update: (cycleLength) -> # Player::update
@speedY += environment.gravity * cycleLength
@speedX += @accX * cycleLength
@speedX = Math.min(@speedX, @speedXMax)
@speedX = Math.max(@speedX, -@speedXMax)
super(cycleLength)
@state.update(cycleLength)
collisions = collision.actorToLayer this, layers.get('ground'),
reposition: true
# Update player state
if collisions.bottom
if @dirPhysical == 0
@setState PlayerStateStanding
@decelerate('x', collisions.friction * @decelerationGround * cycleLength)
else
@setState PlayerStateRunning
else if not @stateIs PlayerStateJumping
@setState PlayerStateFalling
if @dirPhysical == 0
@decelerate('x', @decelerationAir * cycleLength)
# PlayerState
# |
# |__PlayerStateAir
# | |__PlayerStateJumping
# |
# |__PlayerStateGround
# |__PlayerStateStanding
# |__PlayerStateRunning
PlayerState =
class PlayerState
constructor: (@parent) -> # PlayerState::constructor
handleInput: (e) -> # PlayerState::handleInput
key = util.KEY_CODES
switch e.keyCode
when key.LEFT
@parent.dirPhysical = -1
@parent.dirVisual = -1
when key.RIGHT
@parent.dirPhysical = 1
@parent.dirVisual = 1
update: (cycleLength) -> # PlayerState::update
PlayerStateGround =
class PlayerStateGround extends PlayerState
constructor: (data) -> # PlayerStateGround::constructor
super(data)
handleInput: (e) -> # PlayerStateGround::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.UP, key.Z
@parent.setState PlayerStateJumping
PlayerStateStanding =
class PlayerStateStanding extends PlayerStateGround
constructor: (data) -> # PlayerStateStanding::constructor
super(data)
@parent.accX = 0
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'standingRight'
else
@parent.sprite.setAnimation 'standingLeft'
handleInput: (e) -> # PlayerStateStanding::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@parent.setState PlayerStateRunning
PlayerStateRunning =
class PlayerStateRunning extends PlayerStateGround
constructor: (data) -> # PlayerStateRunning::constructor
super(data)
@_setSpeedAndAnim()
if @parent.stateBefore instanceof PlayerStateAir
@parent.sprite.getCurrentAnimation().jumpToFrame(1)
handleInput: (e) -> # PlayerStateRunning::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.setState PlayerStateStanding
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: (options = {})-> # PlayerStateRunning::_setSpeedAndAnim
@parent.accX = @parent.accelerationGround * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'runningRight', options.frameNum
else
@parent.sprite.setAnimation 'runningLeft', options.frameNum
PlayerStateAir =
class PlayerStateAir extends PlayerState
constructor: (data) -> # PlayerStateAir::constructor
super
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
handleInput: (e) -> # PlayerStateAir::handleInput
super
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: -> # PlayerStateAir::_setSpeedAndAnim
@parent.accX = @parent.accelerationAir * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
update: (cycleLength) -> # PlayerStateAir::update
super
PlayerStateJumping =
class PlayerStateJumping extends PlayerStateAir
constructor: (data) -> # PlayerStateJumping::constructor
super
@parent.speedY = -21
handleInput: (e) -> # PlayerStateJumping::handleInput
super
key = util.KEY_CODES
if e.type is 'keyup'
switch e.keyCode
when key.UP, key.Z
@parent.speedY *= 0.5
@parent.setState PlayerStateFalling
update: (cycleLength) -> # PlayerStateJumping::update
if @parent.speedY >= 0
@parent.setState PlayerStateFalling
PlayerStateFalling =
class PlayerStateFalling extends PlayerStateAir
| 219157 | ### Copyright (c) 2015 <NAME>. All rights reserved. ###
Actor = require('./Actor')
collision = require('../modules/collision')
environment = require('../modules/environment')
event = require('../modules/event')
io = require('../modules/io')
layers = require('../modules/layers')
util = require('../modules/util')
module.exports =
class Player extends Actor
constructor: (data) -> # Player::constructor
super
@accX = 0
@dirPhysical = 0
@dirVisual = 1
@state = new PlayerStateStanding(this)
@stateBefore = null
setState: (state) -> # Player::setState
if @state instanceof state
return
@stateBefore = @state
@state = new state(this)
stateIs: (state) -> # Player::stateIs
return @state instanceof state
handleInput: (e) -> # Player::handleInput
@state.handleInput(e)
update: (cycleLength) -> # Player::update
@speedY += environment.gravity * cycleLength
@speedX += @accX * cycleLength
@speedX = Math.min(@speedX, @speedXMax)
@speedX = Math.max(@speedX, -@speedXMax)
super(cycleLength)
@state.update(cycleLength)
collisions = collision.actorToLayer this, layers.get('ground'),
reposition: true
# Update player state
if collisions.bottom
if @dirPhysical == 0
@setState PlayerStateStanding
@decelerate('x', collisions.friction * @decelerationGround * cycleLength)
else
@setState PlayerStateRunning
else if not @stateIs PlayerStateJumping
@setState PlayerStateFalling
if @dirPhysical == 0
@decelerate('x', @decelerationAir * cycleLength)
# PlayerState
# |
# |__PlayerStateAir
# | |__PlayerStateJumping
# |
# |__PlayerStateGround
# |__PlayerStateStanding
# |__PlayerStateRunning
PlayerState =
class PlayerState
constructor: (@parent) -> # PlayerState::constructor
handleInput: (e) -> # PlayerState::handleInput
key = util.KEY_CODES
switch e.keyCode
when key.LEFT
@parent.dirPhysical = -1
@parent.dirVisual = -1
when key.RIGHT
@parent.dirPhysical = 1
@parent.dirVisual = 1
update: (cycleLength) -> # PlayerState::update
PlayerStateGround =
class PlayerStateGround extends PlayerState
constructor: (data) -> # PlayerStateGround::constructor
super(data)
handleInput: (e) -> # PlayerStateGround::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.UP, key.Z
@parent.setState PlayerStateJumping
PlayerStateStanding =
class PlayerStateStanding extends PlayerStateGround
constructor: (data) -> # PlayerStateStanding::constructor
super(data)
@parent.accX = 0
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'standingRight'
else
@parent.sprite.setAnimation 'standingLeft'
handleInput: (e) -> # PlayerStateStanding::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@parent.setState PlayerStateRunning
PlayerStateRunning =
class PlayerStateRunning extends PlayerStateGround
constructor: (data) -> # PlayerStateRunning::constructor
super(data)
@_setSpeedAndAnim()
if @parent.stateBefore instanceof PlayerStateAir
@parent.sprite.getCurrentAnimation().jumpToFrame(1)
handleInput: (e) -> # PlayerStateRunning::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.setState PlayerStateStanding
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: (options = {})-> # PlayerStateRunning::_setSpeedAndAnim
@parent.accX = @parent.accelerationGround * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'runningRight', options.frameNum
else
@parent.sprite.setAnimation 'runningLeft', options.frameNum
PlayerStateAir =
class PlayerStateAir extends PlayerState
constructor: (data) -> # PlayerStateAir::constructor
super
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
handleInput: (e) -> # PlayerStateAir::handleInput
super
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: -> # PlayerStateAir::_setSpeedAndAnim
@parent.accX = @parent.accelerationAir * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
update: (cycleLength) -> # PlayerStateAir::update
super
PlayerStateJumping =
class PlayerStateJumping extends PlayerStateAir
constructor: (data) -> # PlayerStateJumping::constructor
super
@parent.speedY = -21
handleInput: (e) -> # PlayerStateJumping::handleInput
super
key = util.KEY_CODES
if e.type is 'keyup'
switch e.keyCode
when key.UP, key.Z
@parent.speedY *= 0.5
@parent.setState PlayerStateFalling
update: (cycleLength) -> # PlayerStateJumping::update
if @parent.speedY >= 0
@parent.setState PlayerStateFalling
PlayerStateFalling =
class PlayerStateFalling extends PlayerStateAir
| true | ### Copyright (c) 2015 PI:NAME:<NAME>END_PI. All rights reserved. ###
Actor = require('./Actor')
collision = require('../modules/collision')
environment = require('../modules/environment')
event = require('../modules/event')
io = require('../modules/io')
layers = require('../modules/layers')
util = require('../modules/util')
module.exports =
class Player extends Actor
constructor: (data) -> # Player::constructor
super
@accX = 0
@dirPhysical = 0
@dirVisual = 1
@state = new PlayerStateStanding(this)
@stateBefore = null
setState: (state) -> # Player::setState
if @state instanceof state
return
@stateBefore = @state
@state = new state(this)
stateIs: (state) -> # Player::stateIs
return @state instanceof state
handleInput: (e) -> # Player::handleInput
@state.handleInput(e)
update: (cycleLength) -> # Player::update
@speedY += environment.gravity * cycleLength
@speedX += @accX * cycleLength
@speedX = Math.min(@speedX, @speedXMax)
@speedX = Math.max(@speedX, -@speedXMax)
super(cycleLength)
@state.update(cycleLength)
collisions = collision.actorToLayer this, layers.get('ground'),
reposition: true
# Update player state
if collisions.bottom
if @dirPhysical == 0
@setState PlayerStateStanding
@decelerate('x', collisions.friction * @decelerationGround * cycleLength)
else
@setState PlayerStateRunning
else if not @stateIs PlayerStateJumping
@setState PlayerStateFalling
if @dirPhysical == 0
@decelerate('x', @decelerationAir * cycleLength)
# PlayerState
# |
# |__PlayerStateAir
# | |__PlayerStateJumping
# |
# |__PlayerStateGround
# |__PlayerStateStanding
# |__PlayerStateRunning
PlayerState =
class PlayerState
constructor: (@parent) -> # PlayerState::constructor
handleInput: (e) -> # PlayerState::handleInput
key = util.KEY_CODES
switch e.keyCode
when key.LEFT
@parent.dirPhysical = -1
@parent.dirVisual = -1
when key.RIGHT
@parent.dirPhysical = 1
@parent.dirVisual = 1
update: (cycleLength) -> # PlayerState::update
PlayerStateGround =
class PlayerStateGround extends PlayerState
constructor: (data) -> # PlayerStateGround::constructor
super(data)
handleInput: (e) -> # PlayerStateGround::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.UP, key.Z
@parent.setState PlayerStateJumping
PlayerStateStanding =
class PlayerStateStanding extends PlayerStateGround
constructor: (data) -> # PlayerStateStanding::constructor
super(data)
@parent.accX = 0
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'standingRight'
else
@parent.sprite.setAnimation 'standingLeft'
handleInput: (e) -> # PlayerStateStanding::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@parent.setState PlayerStateRunning
PlayerStateRunning =
class PlayerStateRunning extends PlayerStateGround
constructor: (data) -> # PlayerStateRunning::constructor
super(data)
@_setSpeedAndAnim()
if @parent.stateBefore instanceof PlayerStateAir
@parent.sprite.getCurrentAnimation().jumpToFrame(1)
handleInput: (e) -> # PlayerStateRunning::handleInput
super(e)
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.setState PlayerStateStanding
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: (options = {})-> # PlayerStateRunning::_setSpeedAndAnim
@parent.accX = @parent.accelerationGround * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'runningRight', options.frameNum
else
@parent.sprite.setAnimation 'runningLeft', options.frameNum
PlayerStateAir =
class PlayerStateAir extends PlayerState
constructor: (data) -> # PlayerStateAir::constructor
super
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
handleInput: (e) -> # PlayerStateAir::handleInput
super
key = util.KEY_CODES
if e.type is 'keydown'
switch e.keyCode
when key.LEFT, key.RIGHT
@_setSpeedAndAnim()
else if e.type is 'keyup'
switch e.keyCode
when key.LEFT, key.RIGHT
rightPressed = io.isKeyPressed(key.RIGHT)
leftPressed = io.isKeyPressed(key.LEFT)
if not leftPressed and not rightPressed
@parent.dirPhysical = 0
@parent.accX = 0
else if leftPressed and not rightPressed
@parent.dirPhysical = -1
@parent.dirVisual = -1
@_setSpeedAndAnim { frameNum: 1 }
else # if not leftPressed and rightPressed
@parent.dirPhysical = 1
@parent.dirVisual = 1
@_setSpeedAndAnim { frameNum: 1 }
_setSpeedAndAnim: -> # PlayerStateAir::_setSpeedAndAnim
@parent.accX = @parent.accelerationAir * @parent.dirPhysical
if @parent.dirVisual > 0
@parent.sprite.setAnimation 'jumpingRight'
else
@parent.sprite.setAnimation 'jumpingLeft'
update: (cycleLength) -> # PlayerStateAir::update
super
PlayerStateJumping =
class PlayerStateJumping extends PlayerStateAir
constructor: (data) -> # PlayerStateJumping::constructor
super
@parent.speedY = -21
handleInput: (e) -> # PlayerStateJumping::handleInput
super
key = util.KEY_CODES
if e.type is 'keyup'
switch e.keyCode
when key.UP, key.Z
@parent.speedY *= 0.5
@parent.setState PlayerStateFalling
update: (cycleLength) -> # PlayerStateJumping::update
if @parent.speedY >= 0
@parent.setState PlayerStateFalling
PlayerStateFalling =
class PlayerStateFalling extends PlayerStateAir
|
[
{
"context": " miles. See the rest: 2013.artsy.net'\n 'Turns out @Artsy has a gene for Eye Contact, and it makes me uncom",
"end": 894,
"score": 0.9617022275924683,
"start": 888,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": "rtable: See the rest: 2013.artsy.net'\n 'Turns out @Artsy had over 10 million artworks viewed when it launc",
"end": 1003,
"score": 0.9891635179519653,
"start": 997,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": "w. See the rest: 2013.artsy.net'\n 'Turns out that @Artsy has partnered with over 200 institutions includin",
"end": 1128,
"score": 0.9780153036117554,
"start": 1122,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": ". See the rest: 2013.artsy.net'\n 'Turns out the @Artsy team ran 197 miles in 26 hours. But were beaten b",
"end": 1255,
"score": 0.9748406410217285,
"start": 1250,
"tag": "USERNAME",
"value": "Artsy"
},
{
"context": " 197 miles in 26 hours. But were beaten by team “Fanny Pack Gold.” See the rest: 2013.artsy.net'\n 'Turns out",
"end": 1323,
"score": 0.5314842462539673,
"start": 1314,
"tag": "NAME",
"value": "anny Pack"
},
{
"context": "See the rest: 2013.artsy.net'\n 'Turns out that JR (@JRart) made portraits of the entire @Artsy team and tur",
"end": 1389,
"score": 0.9944307208061218,
"start": 1382,
"tag": "USERNAME",
"value": "(@JRart"
},
{
"context": "ut that JR (@JRart) made portraits of the entire @Artsy team and turned their office into an artwork. See",
"end": 1426,
"score": 0.71938157081604,
"start": 1421,
"tag": "USERNAME",
"value": "Artsy"
},
{
"context": "k. See the rest: 2013.artsy.net'\n 'Turns out that @Artsy has released 37 open-source projects: See the res",
"end": 1527,
"score": 0.9423400163650513,
"start": 1521,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": "s: See the rest: 2013.artsy.net'\n 'Turns out 7 of @Artsy’s engineers are artists, and 1 is an artist on Ar",
"end": 1620,
"score": 0.8981146812438965,
"start": 1614,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": "\n 'Turns out over 120,000 people downloaded the @Artsy iPhone app. See the rest: 2013.artsy.net'\n 'Turn",
"end": 1770,
"score": 0.9348594546318054,
"start": 1765,
"tag": "USERNAME",
"value": "Artsy"
},
{
"context": "ne app. See the rest: 2013.artsy.net'\n 'Turns out @Artsy’s 90,000 artworks are now part of NYC’s Public Sc",
"end": 1832,
"score": 0.8960167169570923,
"start": 1826,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": "iculum. See the rest: 2013.artsy.net'\n \"Turns out @Artsy opened ICI's (@CuratorsINTL) benefit auction to t",
"end": 1966,
"score": 0.9641987085342407,
"start": 1960,
"tag": "USERNAME",
"value": "@Artsy"
},
{
"context": ": 2013.artsy.net'\n \"Turns out @Artsy opened ICI's (@CuratorsINTL) benefit auction to the whole world. See the rest",
"end": 1994,
"score": 0.9978049993515015,
"start": 1980,
"tag": "USERNAME",
"value": "(@CuratorsINTL"
},
{
"context": " world. See the rest: 2013.artsy.net\"\n 'Turns out @Artsy introed more collectors to galleries in the last ",
"end": 2081,
"score": 0.9444792866706848,
"start": 2075,
"tag": "USERNAME",
"value": "@Artsy"
}
] | src/scripts/index.coffee | MarcelRittershaus/2013.ARTSY.NET | 0 | _ = require 'underscore'
IScroll = require 'iscroll/build/iscroll-probe.js'
require './vendor/zepto.js'
require './vendor/zepto.touch.js'
morpheus = require 'morpheus'
easings = require './vendor/morpheus-easings.js'
# Constants
# ---------
MIXPANEL_ID = "297ce2530b6c87b16195b5fb6556b38f"
# The time it takes to scroll to an element with iscroll
SCROLL_TO_EL_TIME = 700
# The gap between items is based on the viewport size
GAP_PERCENT_OF_VIEWPORT = 0.6
# The gap between the content and the header
CONTENT_GAP_PERCENT_OF_VIEWPORT = 0.8
# The gap between fades of each item. e.g. 0.5 will mean the fade out of the first
# item will end right when the fade in of the next item starts.
FADE_GAP_OFFSET = 0.4
# Shre on Twitter texts ordered by the content.
TWITTER_TEXTS = [
'Turns out the average sale on Artsy travels over 2,000 miles. See the rest: 2013.artsy.net'
'Turns out @Artsy has a gene for Eye Contact, and it makes me uncomfortable: See the rest: 2013.artsy.net'
'Turns out @Artsy had over 10 million artworks viewed when it launched The Armory Show. See the rest: 2013.artsy.net'
'Turns out that @Artsy has partnered with over 200 institutions including The Getty and SFMOMA. See the rest: 2013.artsy.net'
'Turns out the @Artsy team ran 197 miles in 26 hours. But were beaten by team “Fanny Pack Gold.” See the rest: 2013.artsy.net'
'Turns out that JR (@JRart) made portraits of the entire @Artsy team and turned their office into an artwork. See the rest: 2013.artsy.net'
'Turns out that @Artsy has released 37 open-source projects: See the rest: 2013.artsy.net'
'Turns out 7 of @Artsy’s engineers are artists, and 1 is an artist on Artsy... so meta. See the rest: 2013.artsy.net'
'Turns out over 120,000 people downloaded the @Artsy iPhone app. See the rest: 2013.artsy.net'
'Turns out @Artsy’s 90,000 artworks are now part of NYC’s Public Schools Digital Literacy curriculum. See the rest: 2013.artsy.net'
"Turns out @Artsy opened ICI's (@CuratorsINTL) benefit auction to the whole world. See the rest: 2013.artsy.net"
'Turns out @Artsy introed more collectors to galleries in the last week of December, than all of 2012. See the rest: 2013.artsy.net'
]
IS_IPHONE = navigator.userAgent.match(/iPhone/i)
IS_IPAD = navigator.userAgent.match(/iPad/i)
IS_IOS6 = (IS_IPAD or IS_IPHONE) and navigator.userAgent.match('Version/6.0')
# Top-level variables
# -------------------
# Cached elements
$scroller = null
$backgroundItems = null
$foreground = null
$mainHeader = null
$content = null
$wrapper = null
$mainArrow = null
$foregroundItems = null
$footer = null
$background = null
$fgFacebookLink = null
$fgTwitterLink = null
$headerLogo = null
$headerBackgrounds = null
$firstForegroundItem = null
$viewportHeights = null
$halfViewportHeights = null
$codeMask = null
$code = null
$headerBackground = null
$headerGradient = null
$graphWrapper = null
$graphLine = null
$facebookLinks = null
$twitterLinks = null
$graphContainer = null
$window = null
$body = null
$imgs = null
# Cached values
totalHeaderBackgrounds = 0 # Used in slideshow
currentItemIndex = 0 # The current index of the item being viewed
graphLineLength = 0 # The total length of the graph line for SVG animation
slideshowTimeout = null # Timeout until next slide is show
stopSlideShow = false # Used to stop the slideshow after scrolling down
myScroll = null # Reference to iScroll instance
contentGap = 0 # The distance from the top of the page to the content
# Use a custom scrollTop & viewportHeight variable in place of window references for
# iScroll support.
scrollTop = 0
viewportHeight = 0
viewportWidth = null
# Setup functions
# ---------------
init = ->
cacheElements()
totalHeaderBackgrounds = $headerBackgrounds.length - 1
setupGraph()
$window.on 'resize', _.throttle onResize, 100
onResize()
adjustForDevices()
setContentGap()
renderSocialShares()
refreshIScrollOnImageLoads()
mixpanel.init MIXPANEL_ID
mixpanel.track "Viewed page"
copyForegroundContentToBackgroundForPhone()
attachClickHandlers()
revealOnFirstBannerLoad()
adjustForDevices = ->
# Use IScroll to handle scroll events on an IPad, otherwise normal scroll handlers.
# Phone uses a more responsive technique which will just toggle off the `onScroll`
# handler based on screen size.
if IS_IPAD
setupIScroll()
else if not IS_IPHONE
$window.on 'scroll', onScroll
onScroll()
$body.addClass 'ios6' if IS_IOS6
setupGraph = ->
graphLineLength = $graphLine[0].getTotalLength()
$graphLine.css 'stroke-dasharray': graphLineLength
revealOnFirstBannerLoad = ->
firstHeader = $headerBackgrounds.first().css('background-image')
firstHeader = firstHeader.replace('url(','').replace(')','')
onLoadImg 'images/logo.png', 500, ->
$('body').removeClass 'logo-loading'
onLoadImg firstHeader, 3000, ->
$('body').removeClass 'body-loading'
setTimeout ->
morpheus.tween 600, ((pos) =>
$mainArrow.css { bottom: -100 + (pos * 100) }
), (->), easings.swingTo
$mainArrow.css opacity: 1
nextHeaderSlide()
, 1000
onLoadImg = (src, timeout, callback) ->
callback = _.once callback
image = new Image
image.src = src
image.onload = callback
image.onerror = callback
setTimeout callback, timeout
renderSocialShares = ->
shareUrl = location.href
$.ajax
url: "http://api.facebook.com/restserver.php?method=links.getStats&urls[]=#{shareUrl}"
success: (res) ->
$('#social-button-facebook-count')
.html($(res).find('share_count').text() or 0).show()
window.twitterCountJSONPCallback = (res) ->
return unless res.count?
$('#social-button-twitter-count').html(res.count or 0).show()
$.ajax
url: "http://urls.api.twitter.com/1/urls/count.json?url=#{shareUrl}&callback=twitterCountJSONPCallback"
dataType: 'jsonp'
setupIScroll = ->
$body.addClass 'iscroll'
$wrapper.height viewportHeight
window.myScroll = myScroll = new IScroll '#wrapper',
probeType: 3
mouseWheel: true
scrollbars: true
interactiveScrollbars: true
myScroll.on('scroll', onScroll)
document.addEventListener 'touchmove', ((e) -> e.preventDefault()), false
copyForegroundContentToBackgroundForPhone = ->
$foregroundItems.each (i, el) ->
$container = $backgroundItems.eq(i).find('.phone-foreground-container')
$container.html(
"<div class='phone-foreground-content'>" +
$(el).html() +
"</div>"
)
cacheElements = ->
$scroller = $('#scroller')
$backgroundItems = $('#background-content > li')
$foreground = $('#foreground')
$foregroundItems = $("#foreground li")
$mainHeader = $('#main-header')
$content = $('#content')
$wrapper = $('#wrapper')
$mainArrow = $('#main-header-down-arrow')
$footer = $('#footer')
$background = $('#background')
$facebookLinks = $('.social-button-facebook')
$twitterLinks = $('.social-button-twitter')
$headerBackground = $('#header-background')
$headerBackgrounds = $('#header-background li')
$headerGradient = $('#header-background-gradient')
$firstForegroundItem = $('#foreground li:first-child')
$viewportHeights = $('.viewport-height')
$halfViewportHeights = $('.half-viewport-height')
$codeMask = $('#background-code-mask')
$code = $('#background-code')
$graphLine = $('#graph-line')
$graphWrapper = $('#graph-wrapper')
$graphContainer = $('#graph-container')
$window = $(window)
$body = $('body')
$imgs = $('img')
refreshIScrollOnImageLoads = ->
$('#background img').on 'load', _.debounce (-> myScroll?.refresh()), 1000
# Utility functions
# -----------------
# Used instead of $(el).offset to support IScroll
offset = ($el) ->
top = -($scroller.offset()?.top - $el.offset()?.top)
{
top: top
left: $el.offset()?.left
bottom: top + $el.height()
}
# Returns how far between scrolling two points you are. e.g. If you're halway between
# the start point and end point this will return 0.5.
percentBetween = (start, end) ->
perc = 1 - (end - scrollTop) / (end - start)
perc = 0 if perc < 0
perc = 1 if perc > 1
perc
# Get scroll top from iScroll or plain ol' window
getScrollTop = ->
scrollTop = -myScroll?.getComputedPosition().y or $window.scrollTop()
# Wrapper over IScroll's scrollToElement to use normal window animation.
scrollToElement = (selector) ->
time = 1000
if myScroll
myScroll.scrollToElement selector, time, null, null, IScroll.utils.ease.quadratic
else
elTop = $(selector).offset().top
# Phone has trouble animating
if viewportWidth <= 640
$body[0].scrollTop = elTop
else
morpheus.tween time, ((pos) =>
$body[0].scrollTop = elTop * pos
), easings.quadratic
# Click handlers
# --------------
attachClickHandlers = ->
$mainArrow.on 'tap click', onClickHeaderDownArrow
$facebookLinks.on 'tap click', shareOnFacebook
$twitterLinks.on 'tap click', shareOnTwitter
$('a').on 'tap', followLinksOnTap
onClickHeaderDownArrow = ->
scrollToElement '#intro-statement-inner'
false
shareOnFacebook = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
url = "https://www.facebook.com/sharer/sharer.php?u=#{location.href}"
open url, 'facebook', opts
mixpanel.track "Shared on Facebook"
false
shareOnTwitter = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
text = if $(e.target).hasClass('final-twitter-button')
"The Year in Artsy: 2013.artsy.net"
else
TWITTER_TEXTS[currentItemIndex]
url = "https://twitter.com/intent/tweet?" +
"original_referer=#{location.href}" +
"&text=#{text}"
open url, 'twitter', opts
mixpanel.track "Shared on Twitter", { text: text }
false
followLinksOnTap = (e) ->
e.preventDefault()
_.defer -> open $(e.target).attr('href'), '_blank'
false
# On scroll functions
# -------------------
onScroll = ->
return if viewportWidth <= 640 # For phone we ignore scroll transitions
getScrollTop()
toggleSlideShow()
animateGraphLine()
fadeOutHeaderImage()
fadeInFirstForegroundItem()
fadeBetweenBackgroundItems()
popLockForeground()
popLockCodeMask()
popLockGraph()
fadeBetweenBackgroundItems = ->
for el, index in $backgroundItems
$el = $ el
# Alias current and next foreground items
$curItem = $foregroundItems.eq(index)
$nextItem = $foregroundItems.eq(index + 1)
# Alias common positions we'll be calculating
elTop = offset($el).top
elBottom = elTop + $el.height()
nextTop = elBottom + (viewportHeight * GAP_PERCENT_OF_VIEWPORT)
gapSize = nextTop - elBottom
# Values pertaining to when to start fading and when to fade in the next one
endFadeOutPoint = elBottom - (gapSize * FADE_GAP_OFFSET)
startFadeInPoint = nextTop - (gapSize * FADE_GAP_OFFSET)
endFadeOutPoint -= viewportHeight * FADE_GAP_OFFSET
startFadeInPoint -= viewportHeight * FADE_GAP_OFFSET
# In between an item so ensure that this item is at opacity 1.
if scrollTop > elTop and (scrollTop + viewportHeight) < elBottom and
currentItemIndex isnt index
$foregroundItems.css opacity: 0
$curItem.css opacity: 1
currentItemIndex = index
break
# In the gap between items so transition opacities as you scroll
else if (scrollTop + viewportHeight) > elBottom and scrollTop < nextTop
percentCurItem = 1 - percentBetween (elBottom - viewportHeight), endFadeOutPoint
percentNextItem = percentBetween startFadeInPoint, nextTop
# Fade out the entire foreground if it's the last item
if index is $backgroundItems.length - 1
$foreground.css opacity: percentCurItem
$curItem.css 'z-index': Math.round(percentCurItem)
else
$foreground.css opacity: 1
$curItem.css opacity: percentCurItem, 'z-index': Math.ceil(percentCurItem)
$nextItem.css opacity: percentNextItem, 'z-index': Math.ceil(percentNextItem)
break
fadeOutHeaderImage = ->
return if scrollTop > viewportHeight
$headerBackground.css opacity: 1 - (scrollTop / viewportHeight)
$headerGradient.css opacity: (scrollTop / viewportHeight) * 2
popLockForeground = ->
top = scrollTop - contentGap
end = (offset($background).bottom - viewportHeight - contentGap)
top = Math.round(Math.max 0, Math.min(top, end))
if myScroll?
$foreground.css(top: top)
# Because Safari can't handle manual fixing without jitters we do this
# hacky use of plain ol' fixed position... ironic iPad's Safari choke on fixed
# and desktop Safari chokes on fixed work-arounds.
else if top > 0 and top < end
$foreground.css(top: 0, position: 'fixed')
else if top <= 0
$foreground.css(top: 0, position: 'absolute')
else if top >= end
$foreground.css(bottom: 0, top: 'auto', position: 'absolute')
popLockCodeMask = ->
codeTop = offset($code).top
codeBottom = codeTop + $code.height()
return if scrollTop < codeTop or (scrollTop + viewportHeight) > codeBottom
maskTop = scrollTop - codeTop
$codeMask.css 'margin-top': maskTop
fadeInFirstForegroundItem = ->
return if $foreground.css('position') is 'fixed' or
$foreground.css('bottom') is '0px' or
parseInt($foreground.css('top')) > 0
if viewportWidth <= 1024 # iPad will see the text above fold
opacity = 1
else
end = offset($firstForegroundItem).top
start = end - (viewportHeight / 2)
opacity = (scrollTop - start) / (end - start)
$firstForegroundItem.css opacity: opacity
toggleSlideShow = ->
if stopSlideShow and scrollTop <= 10
stopSlideShow = false
nextHeaderSlide()
else if scrollTop > 10
stopSlideShow = true
clearTimeout slideshowTimeout
if scrollTop > viewportHeight
$headerBackgrounds.removeClass('active')
else
$headerBackgrounds.removeClass('active')
$headerBackgrounds.first().addClass('active')
nextHeaderSlide = ->
return if stopSlideShow
slideshowTimeout = setTimeout ->
slideshowTimeout = setTimeout ->
index = $($headerBackgrounds.filter(-> $(@).hasClass('active'))[0]).index()
nextIndex = if index + 1 > totalHeaderBackgrounds then 0 else index + 1
$cur = $ $headerBackgrounds.eq(index)
$next = $ $headerBackgrounds.eq(nextIndex)
$cur.removeClass 'active'
$next.addClass 'active'
nextHeaderSlide()
, 700
, 1500
animateGraphLine = ->
start = offset($backgroundItems.last()).top
end = start + (viewportHeight * 0.8)
pos = graphLineLength - (graphLineLength * percentBetween(start, end))
pos = Math.max pos, 0
$graphLine.css 'stroke-dashoffset': pos
popLockGraph = ->
graphContainerTop = offset($graphContainer).top
graphContainerBottom = graphContainerTop + $graphContainer.height()
return if scrollTop < graphContainerTop or scrollTop + viewportHeight >= graphContainerBottom
$graphWrapper.css 'margin-top': scrollTop - graphContainerTop
# On resize functions
# -------------------
onResize = ->
viewportHeight = $window.height()
viewportWidth = $window.width()
setBackgroundItemGap()
setContentGap()
setHeaderSize()
swapForHigherResImages()
setViewportHeights()
_.defer -> myScroll?.refresh()
setTimeout relockItems, 500
relockItems = ->
getScrollTop()
popLockForeground()
setViewportHeights = ->
$viewportHeights.height viewportHeight
$halfViewportHeights.height viewportHeight / 2
setHeaderSize = ->
$('#header-background').height viewportHeight
setContentGap = ->
contentGap = offset($content).top
setBackgroundItemGap = ->
$backgroundItems.css('margin-bottom': viewportHeight * GAP_PERCENT_OF_VIEWPORT)
$backgroundItems.last().css('margin-bottom': 0)
swapForHigherResImages = ->
if viewportWidth >= 640
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('small', 'large')
else
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('large', 'small')
# Start your engines
# ------------------
$ init
| 134561 | _ = require 'underscore'
IScroll = require 'iscroll/build/iscroll-probe.js'
require './vendor/zepto.js'
require './vendor/zepto.touch.js'
morpheus = require 'morpheus'
easings = require './vendor/morpheus-easings.js'
# Constants
# ---------
MIXPANEL_ID = "297ce2530b6c87b16195b5fb6556b38f"
# The time it takes to scroll to an element with iscroll
SCROLL_TO_EL_TIME = 700
# The gap between items is based on the viewport size
GAP_PERCENT_OF_VIEWPORT = 0.6
# The gap between the content and the header
CONTENT_GAP_PERCENT_OF_VIEWPORT = 0.8
# The gap between fades of each item. e.g. 0.5 will mean the fade out of the first
# item will end right when the fade in of the next item starts.
FADE_GAP_OFFSET = 0.4
# Shre on Twitter texts ordered by the content.
TWITTER_TEXTS = [
'Turns out the average sale on Artsy travels over 2,000 miles. See the rest: 2013.artsy.net'
'Turns out @Artsy has a gene for Eye Contact, and it makes me uncomfortable: See the rest: 2013.artsy.net'
'Turns out @Artsy had over 10 million artworks viewed when it launched The Armory Show. See the rest: 2013.artsy.net'
'Turns out that @Artsy has partnered with over 200 institutions including The Getty and SFMOMA. See the rest: 2013.artsy.net'
'Turns out the @Artsy team ran 197 miles in 26 hours. But were beaten by team “F<NAME> Gold.” See the rest: 2013.artsy.net'
'Turns out that JR (@JRart) made portraits of the entire @Artsy team and turned their office into an artwork. See the rest: 2013.artsy.net'
'Turns out that @Artsy has released 37 open-source projects: See the rest: 2013.artsy.net'
'Turns out 7 of @Artsy’s engineers are artists, and 1 is an artist on Artsy... so meta. See the rest: 2013.artsy.net'
'Turns out over 120,000 people downloaded the @Artsy iPhone app. See the rest: 2013.artsy.net'
'Turns out @Artsy’s 90,000 artworks are now part of NYC’s Public Schools Digital Literacy curriculum. See the rest: 2013.artsy.net'
"Turns out @Artsy opened ICI's (@CuratorsINTL) benefit auction to the whole world. See the rest: 2013.artsy.net"
'Turns out @Artsy introed more collectors to galleries in the last week of December, than all of 2012. See the rest: 2013.artsy.net'
]
IS_IPHONE = navigator.userAgent.match(/iPhone/i)
IS_IPAD = navigator.userAgent.match(/iPad/i)
IS_IOS6 = (IS_IPAD or IS_IPHONE) and navigator.userAgent.match('Version/6.0')
# Top-level variables
# -------------------
# Cached elements
$scroller = null
$backgroundItems = null
$foreground = null
$mainHeader = null
$content = null
$wrapper = null
$mainArrow = null
$foregroundItems = null
$footer = null
$background = null
$fgFacebookLink = null
$fgTwitterLink = null
$headerLogo = null
$headerBackgrounds = null
$firstForegroundItem = null
$viewportHeights = null
$halfViewportHeights = null
$codeMask = null
$code = null
$headerBackground = null
$headerGradient = null
$graphWrapper = null
$graphLine = null
$facebookLinks = null
$twitterLinks = null
$graphContainer = null
$window = null
$body = null
$imgs = null
# Cached values
totalHeaderBackgrounds = 0 # Used in slideshow
currentItemIndex = 0 # The current index of the item being viewed
graphLineLength = 0 # The total length of the graph line for SVG animation
slideshowTimeout = null # Timeout until next slide is show
stopSlideShow = false # Used to stop the slideshow after scrolling down
myScroll = null # Reference to iScroll instance
contentGap = 0 # The distance from the top of the page to the content
# Use a custom scrollTop & viewportHeight variable in place of window references for
# iScroll support.
scrollTop = 0
viewportHeight = 0
viewportWidth = null
# Setup functions
# ---------------
init = ->
cacheElements()
totalHeaderBackgrounds = $headerBackgrounds.length - 1
setupGraph()
$window.on 'resize', _.throttle onResize, 100
onResize()
adjustForDevices()
setContentGap()
renderSocialShares()
refreshIScrollOnImageLoads()
mixpanel.init MIXPANEL_ID
mixpanel.track "Viewed page"
copyForegroundContentToBackgroundForPhone()
attachClickHandlers()
revealOnFirstBannerLoad()
adjustForDevices = ->
# Use IScroll to handle scroll events on an IPad, otherwise normal scroll handlers.
# Phone uses a more responsive technique which will just toggle off the `onScroll`
# handler based on screen size.
if IS_IPAD
setupIScroll()
else if not IS_IPHONE
$window.on 'scroll', onScroll
onScroll()
$body.addClass 'ios6' if IS_IOS6
setupGraph = ->
graphLineLength = $graphLine[0].getTotalLength()
$graphLine.css 'stroke-dasharray': graphLineLength
revealOnFirstBannerLoad = ->
firstHeader = $headerBackgrounds.first().css('background-image')
firstHeader = firstHeader.replace('url(','').replace(')','')
onLoadImg 'images/logo.png', 500, ->
$('body').removeClass 'logo-loading'
onLoadImg firstHeader, 3000, ->
$('body').removeClass 'body-loading'
setTimeout ->
morpheus.tween 600, ((pos) =>
$mainArrow.css { bottom: -100 + (pos * 100) }
), (->), easings.swingTo
$mainArrow.css opacity: 1
nextHeaderSlide()
, 1000
onLoadImg = (src, timeout, callback) ->
callback = _.once callback
image = new Image
image.src = src
image.onload = callback
image.onerror = callback
setTimeout callback, timeout
renderSocialShares = ->
shareUrl = location.href
$.ajax
url: "http://api.facebook.com/restserver.php?method=links.getStats&urls[]=#{shareUrl}"
success: (res) ->
$('#social-button-facebook-count')
.html($(res).find('share_count').text() or 0).show()
window.twitterCountJSONPCallback = (res) ->
return unless res.count?
$('#social-button-twitter-count').html(res.count or 0).show()
$.ajax
url: "http://urls.api.twitter.com/1/urls/count.json?url=#{shareUrl}&callback=twitterCountJSONPCallback"
dataType: 'jsonp'
setupIScroll = ->
$body.addClass 'iscroll'
$wrapper.height viewportHeight
window.myScroll = myScroll = new IScroll '#wrapper',
probeType: 3
mouseWheel: true
scrollbars: true
interactiveScrollbars: true
myScroll.on('scroll', onScroll)
document.addEventListener 'touchmove', ((e) -> e.preventDefault()), false
copyForegroundContentToBackgroundForPhone = ->
$foregroundItems.each (i, el) ->
$container = $backgroundItems.eq(i).find('.phone-foreground-container')
$container.html(
"<div class='phone-foreground-content'>" +
$(el).html() +
"</div>"
)
cacheElements = ->
$scroller = $('#scroller')
$backgroundItems = $('#background-content > li')
$foreground = $('#foreground')
$foregroundItems = $("#foreground li")
$mainHeader = $('#main-header')
$content = $('#content')
$wrapper = $('#wrapper')
$mainArrow = $('#main-header-down-arrow')
$footer = $('#footer')
$background = $('#background')
$facebookLinks = $('.social-button-facebook')
$twitterLinks = $('.social-button-twitter')
$headerBackground = $('#header-background')
$headerBackgrounds = $('#header-background li')
$headerGradient = $('#header-background-gradient')
$firstForegroundItem = $('#foreground li:first-child')
$viewportHeights = $('.viewport-height')
$halfViewportHeights = $('.half-viewport-height')
$codeMask = $('#background-code-mask')
$code = $('#background-code')
$graphLine = $('#graph-line')
$graphWrapper = $('#graph-wrapper')
$graphContainer = $('#graph-container')
$window = $(window)
$body = $('body')
$imgs = $('img')
refreshIScrollOnImageLoads = ->
$('#background img').on 'load', _.debounce (-> myScroll?.refresh()), 1000
# Utility functions
# -----------------
# Used instead of $(el).offset to support IScroll
offset = ($el) ->
top = -($scroller.offset()?.top - $el.offset()?.top)
{
top: top
left: $el.offset()?.left
bottom: top + $el.height()
}
# Returns how far between scrolling two points you are. e.g. If you're halway between
# the start point and end point this will return 0.5.
percentBetween = (start, end) ->
perc = 1 - (end - scrollTop) / (end - start)
perc = 0 if perc < 0
perc = 1 if perc > 1
perc
# Get scroll top from iScroll or plain ol' window
getScrollTop = ->
scrollTop = -myScroll?.getComputedPosition().y or $window.scrollTop()
# Wrapper over IScroll's scrollToElement to use normal window animation.
scrollToElement = (selector) ->
time = 1000
if myScroll
myScroll.scrollToElement selector, time, null, null, IScroll.utils.ease.quadratic
else
elTop = $(selector).offset().top
# Phone has trouble animating
if viewportWidth <= 640
$body[0].scrollTop = elTop
else
morpheus.tween time, ((pos) =>
$body[0].scrollTop = elTop * pos
), easings.quadratic
# Click handlers
# --------------
attachClickHandlers = ->
$mainArrow.on 'tap click', onClickHeaderDownArrow
$facebookLinks.on 'tap click', shareOnFacebook
$twitterLinks.on 'tap click', shareOnTwitter
$('a').on 'tap', followLinksOnTap
onClickHeaderDownArrow = ->
scrollToElement '#intro-statement-inner'
false
shareOnFacebook = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
url = "https://www.facebook.com/sharer/sharer.php?u=#{location.href}"
open url, 'facebook', opts
mixpanel.track "Shared on Facebook"
false
shareOnTwitter = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
text = if $(e.target).hasClass('final-twitter-button')
"The Year in Artsy: 2013.artsy.net"
else
TWITTER_TEXTS[currentItemIndex]
url = "https://twitter.com/intent/tweet?" +
"original_referer=#{location.href}" +
"&text=#{text}"
open url, 'twitter', opts
mixpanel.track "Shared on Twitter", { text: text }
false
followLinksOnTap = (e) ->
e.preventDefault()
_.defer -> open $(e.target).attr('href'), '_blank'
false
# On scroll functions
# -------------------
onScroll = ->
return if viewportWidth <= 640 # For phone we ignore scroll transitions
getScrollTop()
toggleSlideShow()
animateGraphLine()
fadeOutHeaderImage()
fadeInFirstForegroundItem()
fadeBetweenBackgroundItems()
popLockForeground()
popLockCodeMask()
popLockGraph()
fadeBetweenBackgroundItems = ->
for el, index in $backgroundItems
$el = $ el
# Alias current and next foreground items
$curItem = $foregroundItems.eq(index)
$nextItem = $foregroundItems.eq(index + 1)
# Alias common positions we'll be calculating
elTop = offset($el).top
elBottom = elTop + $el.height()
nextTop = elBottom + (viewportHeight * GAP_PERCENT_OF_VIEWPORT)
gapSize = nextTop - elBottom
# Values pertaining to when to start fading and when to fade in the next one
endFadeOutPoint = elBottom - (gapSize * FADE_GAP_OFFSET)
startFadeInPoint = nextTop - (gapSize * FADE_GAP_OFFSET)
endFadeOutPoint -= viewportHeight * FADE_GAP_OFFSET
startFadeInPoint -= viewportHeight * FADE_GAP_OFFSET
# In between an item so ensure that this item is at opacity 1.
if scrollTop > elTop and (scrollTop + viewportHeight) < elBottom and
currentItemIndex isnt index
$foregroundItems.css opacity: 0
$curItem.css opacity: 1
currentItemIndex = index
break
# In the gap between items so transition opacities as you scroll
else if (scrollTop + viewportHeight) > elBottom and scrollTop < nextTop
percentCurItem = 1 - percentBetween (elBottom - viewportHeight), endFadeOutPoint
percentNextItem = percentBetween startFadeInPoint, nextTop
# Fade out the entire foreground if it's the last item
if index is $backgroundItems.length - 1
$foreground.css opacity: percentCurItem
$curItem.css 'z-index': Math.round(percentCurItem)
else
$foreground.css opacity: 1
$curItem.css opacity: percentCurItem, 'z-index': Math.ceil(percentCurItem)
$nextItem.css opacity: percentNextItem, 'z-index': Math.ceil(percentNextItem)
break
fadeOutHeaderImage = ->
return if scrollTop > viewportHeight
$headerBackground.css opacity: 1 - (scrollTop / viewportHeight)
$headerGradient.css opacity: (scrollTop / viewportHeight) * 2
popLockForeground = ->
top = scrollTop - contentGap
end = (offset($background).bottom - viewportHeight - contentGap)
top = Math.round(Math.max 0, Math.min(top, end))
if myScroll?
$foreground.css(top: top)
# Because Safari can't handle manual fixing without jitters we do this
# hacky use of plain ol' fixed position... ironic iPad's Safari choke on fixed
# and desktop Safari chokes on fixed work-arounds.
else if top > 0 and top < end
$foreground.css(top: 0, position: 'fixed')
else if top <= 0
$foreground.css(top: 0, position: 'absolute')
else if top >= end
$foreground.css(bottom: 0, top: 'auto', position: 'absolute')
popLockCodeMask = ->
codeTop = offset($code).top
codeBottom = codeTop + $code.height()
return if scrollTop < codeTop or (scrollTop + viewportHeight) > codeBottom
maskTop = scrollTop - codeTop
$codeMask.css 'margin-top': maskTop
fadeInFirstForegroundItem = ->
return if $foreground.css('position') is 'fixed' or
$foreground.css('bottom') is '0px' or
parseInt($foreground.css('top')) > 0
if viewportWidth <= 1024 # iPad will see the text above fold
opacity = 1
else
end = offset($firstForegroundItem).top
start = end - (viewportHeight / 2)
opacity = (scrollTop - start) / (end - start)
$firstForegroundItem.css opacity: opacity
toggleSlideShow = ->
if stopSlideShow and scrollTop <= 10
stopSlideShow = false
nextHeaderSlide()
else if scrollTop > 10
stopSlideShow = true
clearTimeout slideshowTimeout
if scrollTop > viewportHeight
$headerBackgrounds.removeClass('active')
else
$headerBackgrounds.removeClass('active')
$headerBackgrounds.first().addClass('active')
nextHeaderSlide = ->
return if stopSlideShow
slideshowTimeout = setTimeout ->
slideshowTimeout = setTimeout ->
index = $($headerBackgrounds.filter(-> $(@).hasClass('active'))[0]).index()
nextIndex = if index + 1 > totalHeaderBackgrounds then 0 else index + 1
$cur = $ $headerBackgrounds.eq(index)
$next = $ $headerBackgrounds.eq(nextIndex)
$cur.removeClass 'active'
$next.addClass 'active'
nextHeaderSlide()
, 700
, 1500
animateGraphLine = ->
start = offset($backgroundItems.last()).top
end = start + (viewportHeight * 0.8)
pos = graphLineLength - (graphLineLength * percentBetween(start, end))
pos = Math.max pos, 0
$graphLine.css 'stroke-dashoffset': pos
popLockGraph = ->
graphContainerTop = offset($graphContainer).top
graphContainerBottom = graphContainerTop + $graphContainer.height()
return if scrollTop < graphContainerTop or scrollTop + viewportHeight >= graphContainerBottom
$graphWrapper.css 'margin-top': scrollTop - graphContainerTop
# On resize functions
# -------------------
onResize = ->
viewportHeight = $window.height()
viewportWidth = $window.width()
setBackgroundItemGap()
setContentGap()
setHeaderSize()
swapForHigherResImages()
setViewportHeights()
_.defer -> myScroll?.refresh()
setTimeout relockItems, 500
relockItems = ->
getScrollTop()
popLockForeground()
setViewportHeights = ->
$viewportHeights.height viewportHeight
$halfViewportHeights.height viewportHeight / 2
setHeaderSize = ->
$('#header-background').height viewportHeight
setContentGap = ->
contentGap = offset($content).top
setBackgroundItemGap = ->
$backgroundItems.css('margin-bottom': viewportHeight * GAP_PERCENT_OF_VIEWPORT)
$backgroundItems.last().css('margin-bottom': 0)
swapForHigherResImages = ->
if viewportWidth >= 640
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('small', 'large')
else
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('large', 'small')
# Start your engines
# ------------------
$ init
| true | _ = require 'underscore'
IScroll = require 'iscroll/build/iscroll-probe.js'
require './vendor/zepto.js'
require './vendor/zepto.touch.js'
morpheus = require 'morpheus'
easings = require './vendor/morpheus-easings.js'
# Constants
# ---------
MIXPANEL_ID = "297ce2530b6c87b16195b5fb6556b38f"
# The time it takes to scroll to an element with iscroll
SCROLL_TO_EL_TIME = 700
# The gap between items is based on the viewport size
GAP_PERCENT_OF_VIEWPORT = 0.6
# The gap between the content and the header
CONTENT_GAP_PERCENT_OF_VIEWPORT = 0.8
# The gap between fades of each item. e.g. 0.5 will mean the fade out of the first
# item will end right when the fade in of the next item starts.
FADE_GAP_OFFSET = 0.4
# Shre on Twitter texts ordered by the content.
TWITTER_TEXTS = [
'Turns out the average sale on Artsy travels over 2,000 miles. See the rest: 2013.artsy.net'
'Turns out @Artsy has a gene for Eye Contact, and it makes me uncomfortable: See the rest: 2013.artsy.net'
'Turns out @Artsy had over 10 million artworks viewed when it launched The Armory Show. See the rest: 2013.artsy.net'
'Turns out that @Artsy has partnered with over 200 institutions including The Getty and SFMOMA. See the rest: 2013.artsy.net'
'Turns out the @Artsy team ran 197 miles in 26 hours. But were beaten by team “FPI:NAME:<NAME>END_PI Gold.” See the rest: 2013.artsy.net'
'Turns out that JR (@JRart) made portraits of the entire @Artsy team and turned their office into an artwork. See the rest: 2013.artsy.net'
'Turns out that @Artsy has released 37 open-source projects: See the rest: 2013.artsy.net'
'Turns out 7 of @Artsy’s engineers are artists, and 1 is an artist on Artsy... so meta. See the rest: 2013.artsy.net'
'Turns out over 120,000 people downloaded the @Artsy iPhone app. See the rest: 2013.artsy.net'
'Turns out @Artsy’s 90,000 artworks are now part of NYC’s Public Schools Digital Literacy curriculum. See the rest: 2013.artsy.net'
"Turns out @Artsy opened ICI's (@CuratorsINTL) benefit auction to the whole world. See the rest: 2013.artsy.net"
'Turns out @Artsy introed more collectors to galleries in the last week of December, than all of 2012. See the rest: 2013.artsy.net'
]
IS_IPHONE = navigator.userAgent.match(/iPhone/i)
IS_IPAD = navigator.userAgent.match(/iPad/i)
IS_IOS6 = (IS_IPAD or IS_IPHONE) and navigator.userAgent.match('Version/6.0')
# Top-level variables
# -------------------
# Cached elements
$scroller = null
$backgroundItems = null
$foreground = null
$mainHeader = null
$content = null
$wrapper = null
$mainArrow = null
$foregroundItems = null
$footer = null
$background = null
$fgFacebookLink = null
$fgTwitterLink = null
$headerLogo = null
$headerBackgrounds = null
$firstForegroundItem = null
$viewportHeights = null
$halfViewportHeights = null
$codeMask = null
$code = null
$headerBackground = null
$headerGradient = null
$graphWrapper = null
$graphLine = null
$facebookLinks = null
$twitterLinks = null
$graphContainer = null
$window = null
$body = null
$imgs = null
# Cached values
totalHeaderBackgrounds = 0 # Used in slideshow
currentItemIndex = 0 # The current index of the item being viewed
graphLineLength = 0 # The total length of the graph line for SVG animation
slideshowTimeout = null # Timeout until next slide is show
stopSlideShow = false # Used to stop the slideshow after scrolling down
myScroll = null # Reference to iScroll instance
contentGap = 0 # The distance from the top of the page to the content
# Use a custom scrollTop & viewportHeight variable in place of window references for
# iScroll support.
scrollTop = 0
viewportHeight = 0
viewportWidth = null
# Setup functions
# ---------------
init = ->
cacheElements()
totalHeaderBackgrounds = $headerBackgrounds.length - 1
setupGraph()
$window.on 'resize', _.throttle onResize, 100
onResize()
adjustForDevices()
setContentGap()
renderSocialShares()
refreshIScrollOnImageLoads()
mixpanel.init MIXPANEL_ID
mixpanel.track "Viewed page"
copyForegroundContentToBackgroundForPhone()
attachClickHandlers()
revealOnFirstBannerLoad()
adjustForDevices = ->
# Use IScroll to handle scroll events on an IPad, otherwise normal scroll handlers.
# Phone uses a more responsive technique which will just toggle off the `onScroll`
# handler based on screen size.
if IS_IPAD
setupIScroll()
else if not IS_IPHONE
$window.on 'scroll', onScroll
onScroll()
$body.addClass 'ios6' if IS_IOS6
setupGraph = ->
graphLineLength = $graphLine[0].getTotalLength()
$graphLine.css 'stroke-dasharray': graphLineLength
revealOnFirstBannerLoad = ->
firstHeader = $headerBackgrounds.first().css('background-image')
firstHeader = firstHeader.replace('url(','').replace(')','')
onLoadImg 'images/logo.png', 500, ->
$('body').removeClass 'logo-loading'
onLoadImg firstHeader, 3000, ->
$('body').removeClass 'body-loading'
setTimeout ->
morpheus.tween 600, ((pos) =>
$mainArrow.css { bottom: -100 + (pos * 100) }
), (->), easings.swingTo
$mainArrow.css opacity: 1
nextHeaderSlide()
, 1000
onLoadImg = (src, timeout, callback) ->
callback = _.once callback
image = new Image
image.src = src
image.onload = callback
image.onerror = callback
setTimeout callback, timeout
renderSocialShares = ->
shareUrl = location.href
$.ajax
url: "http://api.facebook.com/restserver.php?method=links.getStats&urls[]=#{shareUrl}"
success: (res) ->
$('#social-button-facebook-count')
.html($(res).find('share_count').text() or 0).show()
window.twitterCountJSONPCallback = (res) ->
return unless res.count?
$('#social-button-twitter-count').html(res.count or 0).show()
$.ajax
url: "http://urls.api.twitter.com/1/urls/count.json?url=#{shareUrl}&callback=twitterCountJSONPCallback"
dataType: 'jsonp'
setupIScroll = ->
$body.addClass 'iscroll'
$wrapper.height viewportHeight
window.myScroll = myScroll = new IScroll '#wrapper',
probeType: 3
mouseWheel: true
scrollbars: true
interactiveScrollbars: true
myScroll.on('scroll', onScroll)
document.addEventListener 'touchmove', ((e) -> e.preventDefault()), false
copyForegroundContentToBackgroundForPhone = ->
$foregroundItems.each (i, el) ->
$container = $backgroundItems.eq(i).find('.phone-foreground-container')
$container.html(
"<div class='phone-foreground-content'>" +
$(el).html() +
"</div>"
)
cacheElements = ->
$scroller = $('#scroller')
$backgroundItems = $('#background-content > li')
$foreground = $('#foreground')
$foregroundItems = $("#foreground li")
$mainHeader = $('#main-header')
$content = $('#content')
$wrapper = $('#wrapper')
$mainArrow = $('#main-header-down-arrow')
$footer = $('#footer')
$background = $('#background')
$facebookLinks = $('.social-button-facebook')
$twitterLinks = $('.social-button-twitter')
$headerBackground = $('#header-background')
$headerBackgrounds = $('#header-background li')
$headerGradient = $('#header-background-gradient')
$firstForegroundItem = $('#foreground li:first-child')
$viewportHeights = $('.viewport-height')
$halfViewportHeights = $('.half-viewport-height')
$codeMask = $('#background-code-mask')
$code = $('#background-code')
$graphLine = $('#graph-line')
$graphWrapper = $('#graph-wrapper')
$graphContainer = $('#graph-container')
$window = $(window)
$body = $('body')
$imgs = $('img')
refreshIScrollOnImageLoads = ->
$('#background img').on 'load', _.debounce (-> myScroll?.refresh()), 1000
# Utility functions
# -----------------
# Used instead of $(el).offset to support IScroll
offset = ($el) ->
top = -($scroller.offset()?.top - $el.offset()?.top)
{
top: top
left: $el.offset()?.left
bottom: top + $el.height()
}
# Returns how far between scrolling two points you are. e.g. If you're halway between
# the start point and end point this will return 0.5.
percentBetween = (start, end) ->
perc = 1 - (end - scrollTop) / (end - start)
perc = 0 if perc < 0
perc = 1 if perc > 1
perc
# Get scroll top from iScroll or plain ol' window
getScrollTop = ->
scrollTop = -myScroll?.getComputedPosition().y or $window.scrollTop()
# Wrapper over IScroll's scrollToElement to use normal window animation.
scrollToElement = (selector) ->
time = 1000
if myScroll
myScroll.scrollToElement selector, time, null, null, IScroll.utils.ease.quadratic
else
elTop = $(selector).offset().top
# Phone has trouble animating
if viewportWidth <= 640
$body[0].scrollTop = elTop
else
morpheus.tween time, ((pos) =>
$body[0].scrollTop = elTop * pos
), easings.quadratic
# Click handlers
# --------------
attachClickHandlers = ->
$mainArrow.on 'tap click', onClickHeaderDownArrow
$facebookLinks.on 'tap click', shareOnFacebook
$twitterLinks.on 'tap click', shareOnTwitter
$('a').on 'tap', followLinksOnTap
onClickHeaderDownArrow = ->
scrollToElement '#intro-statement-inner'
false
shareOnFacebook = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
url = "https://www.facebook.com/sharer/sharer.php?u=#{location.href}"
open url, 'facebook', opts
mixpanel.track "Shared on Facebook"
false
shareOnTwitter = (e) ->
opts = "status=1,width=750,height=400,top=249.5,left=1462"
text = if $(e.target).hasClass('final-twitter-button')
"The Year in Artsy: 2013.artsy.net"
else
TWITTER_TEXTS[currentItemIndex]
url = "https://twitter.com/intent/tweet?" +
"original_referer=#{location.href}" +
"&text=#{text}"
open url, 'twitter', opts
mixpanel.track "Shared on Twitter", { text: text }
false
followLinksOnTap = (e) ->
e.preventDefault()
_.defer -> open $(e.target).attr('href'), '_blank'
false
# On scroll functions
# -------------------
onScroll = ->
return if viewportWidth <= 640 # For phone we ignore scroll transitions
getScrollTop()
toggleSlideShow()
animateGraphLine()
fadeOutHeaderImage()
fadeInFirstForegroundItem()
fadeBetweenBackgroundItems()
popLockForeground()
popLockCodeMask()
popLockGraph()
fadeBetweenBackgroundItems = ->
for el, index in $backgroundItems
$el = $ el
# Alias current and next foreground items
$curItem = $foregroundItems.eq(index)
$nextItem = $foregroundItems.eq(index + 1)
# Alias common positions we'll be calculating
elTop = offset($el).top
elBottom = elTop + $el.height()
nextTop = elBottom + (viewportHeight * GAP_PERCENT_OF_VIEWPORT)
gapSize = nextTop - elBottom
# Values pertaining to when to start fading and when to fade in the next one
endFadeOutPoint = elBottom - (gapSize * FADE_GAP_OFFSET)
startFadeInPoint = nextTop - (gapSize * FADE_GAP_OFFSET)
endFadeOutPoint -= viewportHeight * FADE_GAP_OFFSET
startFadeInPoint -= viewportHeight * FADE_GAP_OFFSET
# In between an item so ensure that this item is at opacity 1.
if scrollTop > elTop and (scrollTop + viewportHeight) < elBottom and
currentItemIndex isnt index
$foregroundItems.css opacity: 0
$curItem.css opacity: 1
currentItemIndex = index
break
# In the gap between items so transition opacities as you scroll
else if (scrollTop + viewportHeight) > elBottom and scrollTop < nextTop
percentCurItem = 1 - percentBetween (elBottom - viewportHeight), endFadeOutPoint
percentNextItem = percentBetween startFadeInPoint, nextTop
# Fade out the entire foreground if it's the last item
if index is $backgroundItems.length - 1
$foreground.css opacity: percentCurItem
$curItem.css 'z-index': Math.round(percentCurItem)
else
$foreground.css opacity: 1
$curItem.css opacity: percentCurItem, 'z-index': Math.ceil(percentCurItem)
$nextItem.css opacity: percentNextItem, 'z-index': Math.ceil(percentNextItem)
break
fadeOutHeaderImage = ->
return if scrollTop > viewportHeight
$headerBackground.css opacity: 1 - (scrollTop / viewportHeight)
$headerGradient.css opacity: (scrollTop / viewportHeight) * 2
popLockForeground = ->
top = scrollTop - contentGap
end = (offset($background).bottom - viewportHeight - contentGap)
top = Math.round(Math.max 0, Math.min(top, end))
if myScroll?
$foreground.css(top: top)
# Because Safari can't handle manual fixing without jitters we do this
# hacky use of plain ol' fixed position... ironic iPad's Safari choke on fixed
# and desktop Safari chokes on fixed work-arounds.
else if top > 0 and top < end
$foreground.css(top: 0, position: 'fixed')
else if top <= 0
$foreground.css(top: 0, position: 'absolute')
else if top >= end
$foreground.css(bottom: 0, top: 'auto', position: 'absolute')
popLockCodeMask = ->
codeTop = offset($code).top
codeBottom = codeTop + $code.height()
return if scrollTop < codeTop or (scrollTop + viewportHeight) > codeBottom
maskTop = scrollTop - codeTop
$codeMask.css 'margin-top': maskTop
fadeInFirstForegroundItem = ->
return if $foreground.css('position') is 'fixed' or
$foreground.css('bottom') is '0px' or
parseInt($foreground.css('top')) > 0
if viewportWidth <= 1024 # iPad will see the text above fold
opacity = 1
else
end = offset($firstForegroundItem).top
start = end - (viewportHeight / 2)
opacity = (scrollTop - start) / (end - start)
$firstForegroundItem.css opacity: opacity
toggleSlideShow = ->
if stopSlideShow and scrollTop <= 10
stopSlideShow = false
nextHeaderSlide()
else if scrollTop > 10
stopSlideShow = true
clearTimeout slideshowTimeout
if scrollTop > viewportHeight
$headerBackgrounds.removeClass('active')
else
$headerBackgrounds.removeClass('active')
$headerBackgrounds.first().addClass('active')
nextHeaderSlide = ->
return if stopSlideShow
slideshowTimeout = setTimeout ->
slideshowTimeout = setTimeout ->
index = $($headerBackgrounds.filter(-> $(@).hasClass('active'))[0]).index()
nextIndex = if index + 1 > totalHeaderBackgrounds then 0 else index + 1
$cur = $ $headerBackgrounds.eq(index)
$next = $ $headerBackgrounds.eq(nextIndex)
$cur.removeClass 'active'
$next.addClass 'active'
nextHeaderSlide()
, 700
, 1500
animateGraphLine = ->
start = offset($backgroundItems.last()).top
end = start + (viewportHeight * 0.8)
pos = graphLineLength - (graphLineLength * percentBetween(start, end))
pos = Math.max pos, 0
$graphLine.css 'stroke-dashoffset': pos
popLockGraph = ->
graphContainerTop = offset($graphContainer).top
graphContainerBottom = graphContainerTop + $graphContainer.height()
return if scrollTop < graphContainerTop or scrollTop + viewportHeight >= graphContainerBottom
$graphWrapper.css 'margin-top': scrollTop - graphContainerTop
# On resize functions
# -------------------
onResize = ->
viewportHeight = $window.height()
viewportWidth = $window.width()
setBackgroundItemGap()
setContentGap()
setHeaderSize()
swapForHigherResImages()
setViewportHeights()
_.defer -> myScroll?.refresh()
setTimeout relockItems, 500
relockItems = ->
getScrollTop()
popLockForeground()
setViewportHeights = ->
$viewportHeights.height viewportHeight
$halfViewportHeights.height viewportHeight / 2
setHeaderSize = ->
$('#header-background').height viewportHeight
setContentGap = ->
contentGap = offset($content).top
setBackgroundItemGap = ->
$backgroundItems.css('margin-bottom': viewportHeight * GAP_PERCENT_OF_VIEWPORT)
$backgroundItems.last().css('margin-bottom': 0)
swapForHigherResImages = ->
if viewportWidth >= 640
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('small', 'large')
else
$imgs.each -> $(@).attr 'src', $(@).attr('src').replace('large', 'small')
# Start your engines
# ------------------
$ init
|
[
{
"context": "rid: user.userid\n ip: user.ip\n name: user.name\n comment: log.comment\n timestamp: l",
"end": 933,
"score": 0.8481467366218567,
"start": 924,
"tag": "NAME",
"value": "user.name"
}
] | server/log.coffee | Davdegreat/werewolfweb | 0 | # logging feature
###
# speaklog =
# type: "speak"
# roomid: number
# logtype: "..."
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# lobbylog =
# type: "lobby"
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# loginlog =
# type: "login"
# userid: "..."
# ip: "..."
# timestamp: number
#
# makeroomlog =
# type: "makeroom"
# userid: "..."
# ip: "..."
# roomid: number
# name: "..."
# comment: "..."
# timestamp: number
###
saveInLogs = (log)->
M.logs.insert log
# speak in room log
exports.speakInRoom = (roomid, log, user)->
return unless Config.logging
return unless log?
return unless user?
log =
type: "speak"
roomid: roomid
logtype: log.mode
userid: user.userid
ip: user.ip
name: user.name
comment: log.comment
timestamp: log.time
saveInLogs log
# speak in lobby
exports.speakInLobby = (user, log)->
return unless Config.logging
return unless user?
return unless log?
log =
type: "lobby"
userid: user.userid
ip: user.ip
name: user.name
comment: log.comment
timestamp: log.time
saveInLogs log
# login
exports.login = (user)->
return unless Config.logging
return unless user?
log =
type: "login"
userid: user.userid
ip: user.ip
timestamp: Date.now()
saveInLogs log
# make room
exports.makeroom = (user, room)->
return unless Config.logging
return unless user?
return unless room?
log =
type: "makeroom"
userid: user.userid
ip: user.ip
roomid: room.id
name: room.name
comment: room.comment
timestamp: room.made
saveInLogs log
| 9951 | # logging feature
###
# speaklog =
# type: "speak"
# roomid: number
# logtype: "..."
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# lobbylog =
# type: "lobby"
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# loginlog =
# type: "login"
# userid: "..."
# ip: "..."
# timestamp: number
#
# makeroomlog =
# type: "makeroom"
# userid: "..."
# ip: "..."
# roomid: number
# name: "..."
# comment: "..."
# timestamp: number
###
saveInLogs = (log)->
M.logs.insert log
# speak in room log
exports.speakInRoom = (roomid, log, user)->
return unless Config.logging
return unless log?
return unless user?
log =
type: "speak"
roomid: roomid
logtype: log.mode
userid: user.userid
ip: user.ip
name: <NAME>
comment: log.comment
timestamp: log.time
saveInLogs log
# speak in lobby
exports.speakInLobby = (user, log)->
return unless Config.logging
return unless user?
return unless log?
log =
type: "lobby"
userid: user.userid
ip: user.ip
name: user.name
comment: log.comment
timestamp: log.time
saveInLogs log
# login
exports.login = (user)->
return unless Config.logging
return unless user?
log =
type: "login"
userid: user.userid
ip: user.ip
timestamp: Date.now()
saveInLogs log
# make room
exports.makeroom = (user, room)->
return unless Config.logging
return unless user?
return unless room?
log =
type: "makeroom"
userid: user.userid
ip: user.ip
roomid: room.id
name: room.name
comment: room.comment
timestamp: room.made
saveInLogs log
| true | # logging feature
###
# speaklog =
# type: "speak"
# roomid: number
# logtype: "..."
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# lobbylog =
# type: "lobby"
# userid: "..."
# ip: "..."
# name: "..."
# comment: "..."
# timestamp: number
#
# loginlog =
# type: "login"
# userid: "..."
# ip: "..."
# timestamp: number
#
# makeroomlog =
# type: "makeroom"
# userid: "..."
# ip: "..."
# roomid: number
# name: "..."
# comment: "..."
# timestamp: number
###
saveInLogs = (log)->
M.logs.insert log
# speak in room log
exports.speakInRoom = (roomid, log, user)->
return unless Config.logging
return unless log?
return unless user?
log =
type: "speak"
roomid: roomid
logtype: log.mode
userid: user.userid
ip: user.ip
name: PI:NAME:<NAME>END_PI
comment: log.comment
timestamp: log.time
saveInLogs log
# speak in lobby
exports.speakInLobby = (user, log)->
return unless Config.logging
return unless user?
return unless log?
log =
type: "lobby"
userid: user.userid
ip: user.ip
name: user.name
comment: log.comment
timestamp: log.time
saveInLogs log
# login
exports.login = (user)->
return unless Config.logging
return unless user?
log =
type: "login"
userid: user.userid
ip: user.ip
timestamp: Date.now()
saveInLogs log
# make room
exports.makeroom = (user, room)->
return unless Config.logging
return unless user?
return unless room?
log =
type: "makeroom"
userid: user.userid
ip: user.ip
roomid: room.id
name: room.name
comment: room.comment
timestamp: room.made
saveInLogs log
|
[
{
"context": "20,\n \"zoom\": 11\n },\n {\n \"Location name\": \"Washington, DC\",\n \"State\": \"DC\",\n \"Days At Lo",
"end": 308,
"score": 0.5587575435638428,
"start": 307,
"tag": "NAME",
"value": "W"
},
{
"context": "60,\n \"zoom\": 11\n },\n {\n \"Location name\": \"Raleigh\",\n \"State\": \"NC\",\n \"Days At Location\": 1,\n ",
"end": 513,
"score": 0.9422270655632019,
"start": 506,
"tag": "NAME",
"value": "Raleigh"
},
{
"context": " 20,\n \"zoom\": 9\n },\n {\n \"Location name\": \"Fayetteville\",\n \"State\": \"NC\",\n \"Days At Location\": 1,\n ",
"end": 710,
"score": 0.9203532338142395,
"start": 698,
"tag": "NAME",
"value": "Fayetteville"
},
{
"context": ": 0,\n \"zoom\": 8\n },\n {\n \"Location name\": \"Asheville\",\n \"State\": \"NC\",\n \"Days At Location\": 2,\n ",
"end": 902,
"score": 0.8863704204559326,
"start": 893,
"tag": "NAME",
"value": "Asheville"
},
{
"context": "60,\n \"zoom\": 10\n },\n {\n \"Location name\": \"Atlanta\",\n \"State\": \"GA\",\n \"Days At Location\": 2,\n ",
"end": 1308,
"score": 0.9996495246887207,
"start": 1301,
"tag": "NAME",
"value": "Atlanta"
},
{
"context": " 40,\n \"zoom\": 8\n },\n {\n \"Location name\": \"Savannah\",\n \"State\": \"GA\",\n \"Days At Location\": 4,\n ",
"end": 1501,
"score": 0.9997262358665466,
"start": 1493,
"tag": "NAME",
"value": "Savannah"
},
{
"context": "20,\n \"zoom\": 12\n },\n {\n \"Location name\": \"Montgomery\",\n \"State\": \"AL\",\n \"Days At Location\": 1,\n ",
"end": 1698,
"score": 0.9995289444923401,
"start": 1688,
"tag": "NAME",
"value": "Montgomery"
},
{
"context": " 10,\n \"zoom\": 9\n },\n {\n \"Location name\": \"Selma\",\n \"State\": \"AL\",\n \"Days At Location\": 1,\n ",
"end": 1886,
"score": 0.9996709823608398,
"start": 1881,
"tag": "NAME",
"value": "Selma"
},
{
"context": " 40,\n \"zoom\": 8\n },\n {\n \"Location name\": \"Dallas\",\n \"State\": \"TX\",\n \"Days At Location\": 0,\n ",
"end": 2687,
"score": 0.9854186177253723,
"start": 2681,
"tag": "NAME",
"value": "Dallas"
},
{
"context": " 30,\n \"zoom\": 7\n },\n {\n \"Location name\": \"Amarillo\",\n \"State\": \"TX\",\n \"Days At Location\": 1,\n ",
"end": 2880,
"score": 0.9990411400794983,
"start": 2872,
"tag": "NAME",
"value": "Amarillo"
},
{
"context": " 30,\n \"zoom\": 7\n },\n {\n \"Location name\": \"Colorado Springs\",\n \"State\": \"CO\",\n \"Days At Location\": 2,\n ",
"end": 3082,
"score": 0.7084198594093323,
"start": 3066,
"tag": "NAME",
"value": "Colorado Springs"
},
{
"context": "50,\n \"zoom\": 12\n },\n {\n \"Location name\": \"Denver\",\n \"State\": \"CO\",\n \"Days At Location\": 2,\n ",
"end": 3276,
"score": 0.9994885921478271,
"start": 3270,
"tag": "NAME",
"value": "Denver"
},
{
"context": "70,\n \"zoom\": 11\n },\n {\n \"Location name\": \"Grand Mesa\",\n \"State\": \"CO\",\n \"Days At Location\": 1,\n ",
"end": 3668,
"score": 0.965635359287262,
"start": 3658,
"tag": "NAME",
"value": "Grand Mesa"
},
{
"context": "90,\n \"zoom\": 10\n },\n {\n \"Location name\": \"Bryce\",\n \"State\": \"UT\",\n \"Days At Location\": 5,\n ",
"end": 3861,
"score": 0.9833077788352966,
"start": 3856,
"tag": "NAME",
"value": "Bryce"
},
{
"context": "70,\n \"zoom\": 11\n },\n {\n \"Location name\": \"Olympia\",\n \"State\": \"WA\",\n \"Days At Location\": 3,\n ",
"end": 6078,
"score": 0.8289291858673096,
"start": 6071,
"tag": "NAME",
"value": "Olympia"
},
{
"context": " 35,\n \"zoom\": 9\n },\n {\n \"Location name\": \"Detroit\",\n \"State\": \"MI\",\n \"Days At Location\": 3,\n ",
"end": 7498,
"score": 0.9996061325073242,
"start": 7491,
"tag": "NAME",
"value": "Detroit"
},
{
"context": " 35,\n \"zoom\": 9\n },\n {\n \"Location name\": \"Toronoto\",\n \"State\": \"ON\",\n \"Days At Location\": 1,\n ",
"end": 7691,
"score": 0.998560905456543,
"start": 7683,
"tag": "NAME",
"value": "Toronoto"
},
{
"context": " 85,\n \"zoom\": 9\n },\n {\n \"Location name\": \"Niagra Falls\",\n \"State\": \"NY\",\n \"Days At Location\": 1,\n ",
"end": 7888,
"score": 0.9990699887275696,
"start": 7876,
"tag": "NAME",
"value": "Niagra Falls"
},
{
"context": "GeoJsonPoints(locations)\n\nmapboxgl.accessToken = 'pk.eyJ1IjoiZWpmb3giLCJhIjoiY2lyZjd0bXltMDA4b2dma3JzNnA0ajh1bSJ9.iCmlE7gmJubz2RtL4RFzIw'\n\nuriStop = getParameterByName('stop')\n\nif uriSto",
"end": 9451,
"score": 0.999740719795227,
"start": 9365,
"tag": "KEY",
"value": "pk.eyJ1IjoiZWpmb3giLCJhIjoiY2lyZjd0bXltMDA4b2dma3JzNnA0ajh1bSJ9.iCmlE7gmJubz2RtL4RFzIw"
},
{
"context": "styles/mapbox/dark-v9'\n #style: 'mapbox://styles/ejfox/cirf7uxgm0001gwno49bk38eg'\n center: [locations[w",
"end": 10001,
"score": 0.999327540397644,
"start": 9996,
"tag": "USERNAME",
"value": "ejfox"
}
] | src/coffee/app.coffee | ejfox/roadtrip-site | 0 | $ = require('jquery')
URI = require('uri-js')
d3 = require('d3')
locations = [
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 1,
"lat": 40.6957,
"lon": -73.9409,
"bearing": -50,
"pitch": 20,
"zoom": 11
},
{
"Location name": "Washington, DC",
"State": "DC",
"Days At Location": 2,
"trip": 1,
"lat": 38.9072,
"lon": -77.0369
"bearing": 0,
"pitch": 60,
"zoom": 11
},
{
"Location name": "Raleigh",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.7796,
"lon": -78.6382
"bearing": 12,
"pitch": 20,
"zoom": 9
},
{
"Location name": "Fayetteville",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.0527,
"lon": -78.8784
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "Asheville",
"State": "NC",
"Days At Location": 2,
"trip": 1,
"lat": 35.5951,
"lon": -82.5515
"bearing": -50,
"pitch": 80,
"zoom": 11
},
{
"Location name": "Nantahala National Forest",
"State": "NC",
"Days At Location": 3,
"trip": 1,
"lat": 35.2338,
"lon": -83.5593
"bearing": -30,
"pitch": 60,
"zoom": 10
},
{
"Location name": "Atlanta",
"State": "GA",
"Days At Location": 2,
"trip": 1,
"lat": 33.749,
"lon": -84.388,
"bearing": 120,
"pitch": 40,
"zoom": 8
},
{
"Location name": "Savannah",
"State": "GA",
"Days At Location": 4,
"trip": 1,
"lat": 32.0835,
"lon": -81.0998,
"bearing": 90,
"pitch": 20,
"zoom": 12
},
{
"Location name": "Montgomery",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.3668,
"lon": -86.3,
"bearing": 40,
"pitch": 10,
"zoom": 9
},
{
"Location name": "Selma",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.4074,
"lon": -87.0211
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "De Soto National Forest",
"State": "MS",
"Days At Location": 5,
"trip": 1,
"lat": 31.0669,
"lon": -88.9833
"bearing": 0,
"pitch": 50,
"zoom": 9
},
{
"Location name": "New Orleans",
"State": "LA",
"Days At Location": 3,
"trip": 1,
"lat": 29.9511,
"lon": -90.0715,
"bearing": -40,
"pitch": 50,
"zoom": 9
},
{
"Location name": "Sabine National Forest",
"State": "TX",
"Days At Location": 5,
"trip": 1,
"lat": 31.4439,
"lon": -93.77,
"bearing": -30,
"pitch": 40,
"zoom": 8
},
{
"Location name": "Dallas",
"State": "TX",
"Days At Location": 0,
"trip": 1,
"lat": 32.7767,
"lon": -96.797
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "Amarillo",
"State": "TX",
"Days At Location": 1,
"trip": 1,
"lat": 35.222,
"lon": -101.8313
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "Colorado Springs",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 38.8339,
"lon": -104.8214
"bearing": -20,
"pitch": 50,
"zoom": 12
},
{
"Location name": "Denver",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 39.7392,
"lon": -104.9903
"bearing": -40,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Boulder",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 40.015,
"lon": -105.2705
"bearing": -40,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Grand Mesa",
"State": "CO",
"Days At Location": 1,
"trip": 1,
"lat": 39.0403,
"lon": -107.9498
"bearing": -90,
"pitch": 90,
"zoom": 10
},
{
"Location name": "Bryce",
"State": "UT",
"Days At Location": 5,
"trip": 1,
"lat": 37.6728,
"lon": -112.1573
"bearing": 0,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Zion National Park",
"State": "UT",
"Days At Location": 7,
"trip": 1,
"lat": 37.2982,
"lon": -113.0263
"bearing": -20,
"pitch": 60,
"zoom": 12
},
{
"Location name": "Las Vegas",
"State": "NV",
"Days At Location": 3,
"trip": 1,
"lat": 36.1699,
"lon": -115.1398
"bearing": -40,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Mojave National Preserve",
"State": "CA",
"Days At Location": 3,
"trip": 1,
"lat": 35.011,
"lon": -115.4734
"bearing": -20,
"pitch": 25,
"zoom": 10
},
{
"Location name": "Los Angeles",
"State": "CA",
"Days At Location": 2,
"trip": 1,
"lat": 34.0522,
"lon": -118.2437
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Yosemite National Park",
"State": "CA",
"Days At Location": 5,
"trip": 1,
"lat": 37.8651,
"lon": -119.5383
"bearing": 0,
"pitch": 50,
"zoom": 12
},
{
"Location name": "San Francisco",
"State": "CA",
"Days At Location": 6,
"trip": 1,
"lat": 37.7749,
"lon": -122.4194
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Mendocino National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 39.653,
"lon": -122.9496
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Redwoods National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 41.2132,
"lon": -124.0046
"bearing": 50,
"pitch": 40,
"zoom": 10
},
{
"Location name": "Eugene",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 44.0521,
"lon": -123.0868
"bearing": 25,
"pitch": 30,
"zoom": 10
},
{
"Location name": "Portland",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 45.5231,
"lon": -122.6765
"bearing": 0,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Olympia",
"State": "WA",
"Days At Location": 3,
"trip": 2,
"lat": 47.0379,
"lon": -122.9007
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Mt Hood National Forest",
"State": "OR",
"Days At Location": 2,
"trip": 2,
"lat": 45.33,
"lon": -121.7089
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Grand Teton National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 43.7904,
"lon": -110.6818
"bearing": 45,
"pitch": 50,
"zoom": 10
},
{
"Location name": "Yellowstone National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 44.428,
"lon": -110.5885
"bearing": 55,
"pitch": 70,
"zoom": 9
},
{
"Location name": "Wind River Reservation",
"State": "WY",
"Days At Location": 1,
"trip": 2,
"lat": 43.2833,
"lon": -108.834
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Black Hills",
"State": "SD",
"Days At Location": 7,
"trip": 2,
"lat": 43.9939,
"lon": -103.7718
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Chicago",
"State": "IL",
"Days At Location": 2,
"trip": 2,
"lat": 41.8781,
"lon": -87.6298
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Detroit",
"State": "MI",
"Days At Location": 3,
"trip": 2,
"lat": 42.3314,
"lon": -83.0458
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Toronoto",
"State": "ON",
"Days At Location": 1,
"trip": 2,
"lat": 43.6532,
"lon": -79.3832
"bearing": 25,
"pitch": 85,
"zoom": 9
},
{
"Location name": "Niagra Falls",
"State": "NY",
"Days At Location": 1,
"trip": 2,
"lat": 43.0962,
"lon": -79.0377
"bearing": 25,
"pitch": 75,
"zoom": 9
},
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 2,
"lat": 40.7128,
"lon": -74.0059
"bearing": 0,
"pitch": 0,
"zoom": 9
}
]
window.locations = locations
window.autoplay = false
getParameterByName = (name, url) ->
if !url
url = window.location.href
name = name.replace(/[\[\]]/g, '\\$&')
regex = new RegExp('[?&]' + name + '(=([^&#]*)|&|#|$)')
results = regex.exec(url)
if !results
return null
if !results[2]
return ''
decodeURIComponent results[2].replace(/\+/g, ' ')
route =
'type': 'FeatureCollection'
'features': [ {
'type': 'Feature'
'geometry':
'type': 'LineString'
'coordinates': []
} ]
listToGeoJson = (list) ->
for stop in list
route.features[0].geometry.coordinates.push [stop.lon,stop.lat]
#console.log 'route -->', route
listToGeoJson(locations)
points =
type: 'geojson'
data: {
type: 'FeatureCollection'
features: [
]
}
listToGeoJsonPoints = (list) ->
for stop in list
points.data.features.push {
type: 'Feature'
geometry: {
type: 'Point',
coordinates: [stop.lon,stop.lat]
},
properties: {
title: stop['Location name']
icon: 'circle'
}
}
listToGeoJsonPoints(locations)
mapboxgl.accessToken = 'pk.eyJ1IjoiZWpmb3giLCJhIjoiY2lyZjd0bXltMDA4b2dma3JzNnA0ajh1bSJ9.iCmlE7gmJubz2RtL4RFzIw'
uriStop = getParameterByName('stop')
if uriStop is null
window.currentStop = 0
else
window.currentStop = uriStop
map = new mapboxgl.Map {
container: 'map'
style: 'mapbox://styles/mapbox/light-v9'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
}
map2 = new mapboxgl.Map {
container: 'map2'
style: 'mapbox://styles/mapbox/dark-v9'
#style: 'mapbox://styles/ejfox/cirf7uxgm0001gwno49bk38eg'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 10
pitch: locations[window.currentStop].pitch / 10
zoom: 4
}
map.on 'load', ->
map.addSource('route', {
"type": "geojson",
"data": route
})
map.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 8,
"line-color": "black"
"line-opacity": 0.5
"line-blur": 6
}
}, 'place-city-sm')
console.log 'points', points
map.addSource('points', points)
map.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "{icon}-15",
#"icon-optional": true
#"icon-size": 2
"icon-padding": 4
#"text-field": "{title}",
"text-font": ["Open Sans Semibold", "Arial Unicode MS Bold"],
#"text-offset": [0.4, 0],
#"text-offset": [0, 1.3],
"text-anchor": "top"
"text-padding": 12
"text-optional": true
"symbol-avoid-edges": true
"text-allow-overlap": false
#{}"text-transform": "uppercase"
}
}, 'place-city-sm')
map2.addSource('points', points)
map2.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "circle-11",
"icon-size": 0.75
}
}, 'water')
map2.on 'load', ->
map2.addSource('route', {
"type": "geojson",
"data": route
})
map2.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 2,
"line-color": "white"
"line-opacity": 1
"line-blur": 1
}
}, 'place-city-sm')
###
minimap = new mapboxgl.Minimap()
map.on 'style.load', ->
map.addControl minimap
###
$('#location-name').text locations[window.currentStop]['Location name']
$('#navigation #prev-button').hide()
flyToStop = ->
map.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
speed: 0.8
}
map2.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 2.75
pitch: locations[window.currentStop].pitch / 10
#zoom: locations[window.currentStop].zoom / 1.8
zoom: locations[window.currentStop].zoom / 2.1
speed: 0.5
}
$('#stoplist li').removeClass('current-stop')
$('#stoplist [data-stop-id="'+window.currentStop+'"]').addClass('current-stop')
$('#location-name').text locations[window.currentStop]['Location name']
if window.currentStop > 0
$('#navigation #prev-button').show()
else
$('#navigation #prev-button').hide()
stoplist = d3.select('#stoplist')
stoplist.selectAll('li')
.data locations
.enter().append('li')
.text (d,i) ->
console.log 'd',d
return d['Location name']
.attr 'data-stop-id', (d,i) -> i
.attr 'class', (d,i) ->
if i is 0
'current-stop'
.style 'cursor', 'pointer'
.on 'click', (d,i) ->
window.currentStop = i
flyToStop()
flyToNext = ->
if window.currentStop < locations.length
window.currentStop++
else
window.currentStop = 0
flyToStop()
flyToPrev = ->
if window.currentStop > 0
window.currentStop--
else
window.currentStop = locations.length
flyToStop()
$('#navigation #next-button').click ->
flyToNext()
$('#navigation #prev-button').click ->
flyToPrev()
$('#autoplay-button').click ->
$(this).toggleClass('autoplay-active')
if window.autoplay is false
window.autoplay != false
flyToNext()
window.autoplay = setInterval ->
flyToNext()
, 3500
else
clearInterval window.autoplay
| 91125 | $ = require('jquery')
URI = require('uri-js')
d3 = require('d3')
locations = [
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 1,
"lat": 40.6957,
"lon": -73.9409,
"bearing": -50,
"pitch": 20,
"zoom": 11
},
{
"Location name": "<NAME>ashington, DC",
"State": "DC",
"Days At Location": 2,
"trip": 1,
"lat": 38.9072,
"lon": -77.0369
"bearing": 0,
"pitch": 60,
"zoom": 11
},
{
"Location name": "<NAME>",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.7796,
"lon": -78.6382
"bearing": 12,
"pitch": 20,
"zoom": 9
},
{
"Location name": "<NAME>",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.0527,
"lon": -78.8784
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "<NAME>",
"State": "NC",
"Days At Location": 2,
"trip": 1,
"lat": 35.5951,
"lon": -82.5515
"bearing": -50,
"pitch": 80,
"zoom": 11
},
{
"Location name": "Nantahala National Forest",
"State": "NC",
"Days At Location": 3,
"trip": 1,
"lat": 35.2338,
"lon": -83.5593
"bearing": -30,
"pitch": 60,
"zoom": 10
},
{
"Location name": "<NAME>",
"State": "GA",
"Days At Location": 2,
"trip": 1,
"lat": 33.749,
"lon": -84.388,
"bearing": 120,
"pitch": 40,
"zoom": 8
},
{
"Location name": "<NAME>",
"State": "GA",
"Days At Location": 4,
"trip": 1,
"lat": 32.0835,
"lon": -81.0998,
"bearing": 90,
"pitch": 20,
"zoom": 12
},
{
"Location name": "<NAME>",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.3668,
"lon": -86.3,
"bearing": 40,
"pitch": 10,
"zoom": 9
},
{
"Location name": "<NAME>",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.4074,
"lon": -87.0211
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "De Soto National Forest",
"State": "MS",
"Days At Location": 5,
"trip": 1,
"lat": 31.0669,
"lon": -88.9833
"bearing": 0,
"pitch": 50,
"zoom": 9
},
{
"Location name": "New Orleans",
"State": "LA",
"Days At Location": 3,
"trip": 1,
"lat": 29.9511,
"lon": -90.0715,
"bearing": -40,
"pitch": 50,
"zoom": 9
},
{
"Location name": "Sabine National Forest",
"State": "TX",
"Days At Location": 5,
"trip": 1,
"lat": 31.4439,
"lon": -93.77,
"bearing": -30,
"pitch": 40,
"zoom": 8
},
{
"Location name": "<NAME>",
"State": "TX",
"Days At Location": 0,
"trip": 1,
"lat": 32.7767,
"lon": -96.797
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "<NAME>",
"State": "TX",
"Days At Location": 1,
"trip": 1,
"lat": 35.222,
"lon": -101.8313
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "<NAME>",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 38.8339,
"lon": -104.8214
"bearing": -20,
"pitch": 50,
"zoom": 12
},
{
"Location name": "<NAME>",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 39.7392,
"lon": -104.9903
"bearing": -40,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Boulder",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 40.015,
"lon": -105.2705
"bearing": -40,
"pitch": 70,
"zoom": 11
},
{
"Location name": "<NAME>",
"State": "CO",
"Days At Location": 1,
"trip": 1,
"lat": 39.0403,
"lon": -107.9498
"bearing": -90,
"pitch": 90,
"zoom": 10
},
{
"Location name": "<NAME>",
"State": "UT",
"Days At Location": 5,
"trip": 1,
"lat": 37.6728,
"lon": -112.1573
"bearing": 0,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Zion National Park",
"State": "UT",
"Days At Location": 7,
"trip": 1,
"lat": 37.2982,
"lon": -113.0263
"bearing": -20,
"pitch": 60,
"zoom": 12
},
{
"Location name": "Las Vegas",
"State": "NV",
"Days At Location": 3,
"trip": 1,
"lat": 36.1699,
"lon": -115.1398
"bearing": -40,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Mojave National Preserve",
"State": "CA",
"Days At Location": 3,
"trip": 1,
"lat": 35.011,
"lon": -115.4734
"bearing": -20,
"pitch": 25,
"zoom": 10
},
{
"Location name": "Los Angeles",
"State": "CA",
"Days At Location": 2,
"trip": 1,
"lat": 34.0522,
"lon": -118.2437
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Yosemite National Park",
"State": "CA",
"Days At Location": 5,
"trip": 1,
"lat": 37.8651,
"lon": -119.5383
"bearing": 0,
"pitch": 50,
"zoom": 12
},
{
"Location name": "San Francisco",
"State": "CA",
"Days At Location": 6,
"trip": 1,
"lat": 37.7749,
"lon": -122.4194
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Mendocino National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 39.653,
"lon": -122.9496
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Redwoods National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 41.2132,
"lon": -124.0046
"bearing": 50,
"pitch": 40,
"zoom": 10
},
{
"Location name": "Eugene",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 44.0521,
"lon": -123.0868
"bearing": 25,
"pitch": 30,
"zoom": 10
},
{
"Location name": "Portland",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 45.5231,
"lon": -122.6765
"bearing": 0,
"pitch": 70,
"zoom": 11
},
{
"Location name": "<NAME>",
"State": "WA",
"Days At Location": 3,
"trip": 2,
"lat": 47.0379,
"lon": -122.9007
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Mt Hood National Forest",
"State": "OR",
"Days At Location": 2,
"trip": 2,
"lat": 45.33,
"lon": -121.7089
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Grand Teton National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 43.7904,
"lon": -110.6818
"bearing": 45,
"pitch": 50,
"zoom": 10
},
{
"Location name": "Yellowstone National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 44.428,
"lon": -110.5885
"bearing": 55,
"pitch": 70,
"zoom": 9
},
{
"Location name": "Wind River Reservation",
"State": "WY",
"Days At Location": 1,
"trip": 2,
"lat": 43.2833,
"lon": -108.834
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Black Hills",
"State": "SD",
"Days At Location": 7,
"trip": 2,
"lat": 43.9939,
"lon": -103.7718
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Chicago",
"State": "IL",
"Days At Location": 2,
"trip": 2,
"lat": 41.8781,
"lon": -87.6298
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "<NAME>",
"State": "MI",
"Days At Location": 3,
"trip": 2,
"lat": 42.3314,
"lon": -83.0458
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "<NAME>",
"State": "ON",
"Days At Location": 1,
"trip": 2,
"lat": 43.6532,
"lon": -79.3832
"bearing": 25,
"pitch": 85,
"zoom": 9
},
{
"Location name": "<NAME>",
"State": "NY",
"Days At Location": 1,
"trip": 2,
"lat": 43.0962,
"lon": -79.0377
"bearing": 25,
"pitch": 75,
"zoom": 9
},
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 2,
"lat": 40.7128,
"lon": -74.0059
"bearing": 0,
"pitch": 0,
"zoom": 9
}
]
window.locations = locations
window.autoplay = false
getParameterByName = (name, url) ->
if !url
url = window.location.href
name = name.replace(/[\[\]]/g, '\\$&')
regex = new RegExp('[?&]' + name + '(=([^&#]*)|&|#|$)')
results = regex.exec(url)
if !results
return null
if !results[2]
return ''
decodeURIComponent results[2].replace(/\+/g, ' ')
route =
'type': 'FeatureCollection'
'features': [ {
'type': 'Feature'
'geometry':
'type': 'LineString'
'coordinates': []
} ]
listToGeoJson = (list) ->
for stop in list
route.features[0].geometry.coordinates.push [stop.lon,stop.lat]
#console.log 'route -->', route
listToGeoJson(locations)
points =
type: 'geojson'
data: {
type: 'FeatureCollection'
features: [
]
}
listToGeoJsonPoints = (list) ->
for stop in list
points.data.features.push {
type: 'Feature'
geometry: {
type: 'Point',
coordinates: [stop.lon,stop.lat]
},
properties: {
title: stop['Location name']
icon: 'circle'
}
}
listToGeoJsonPoints(locations)
mapboxgl.accessToken = '<KEY>'
uriStop = getParameterByName('stop')
if uriStop is null
window.currentStop = 0
else
window.currentStop = uriStop
map = new mapboxgl.Map {
container: 'map'
style: 'mapbox://styles/mapbox/light-v9'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
}
map2 = new mapboxgl.Map {
container: 'map2'
style: 'mapbox://styles/mapbox/dark-v9'
#style: 'mapbox://styles/ejfox/cirf7uxgm0001gwno49bk38eg'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 10
pitch: locations[window.currentStop].pitch / 10
zoom: 4
}
map.on 'load', ->
map.addSource('route', {
"type": "geojson",
"data": route
})
map.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 8,
"line-color": "black"
"line-opacity": 0.5
"line-blur": 6
}
}, 'place-city-sm')
console.log 'points', points
map.addSource('points', points)
map.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "{icon}-15",
#"icon-optional": true
#"icon-size": 2
"icon-padding": 4
#"text-field": "{title}",
"text-font": ["Open Sans Semibold", "Arial Unicode MS Bold"],
#"text-offset": [0.4, 0],
#"text-offset": [0, 1.3],
"text-anchor": "top"
"text-padding": 12
"text-optional": true
"symbol-avoid-edges": true
"text-allow-overlap": false
#{}"text-transform": "uppercase"
}
}, 'place-city-sm')
map2.addSource('points', points)
map2.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "circle-11",
"icon-size": 0.75
}
}, 'water')
map2.on 'load', ->
map2.addSource('route', {
"type": "geojson",
"data": route
})
map2.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 2,
"line-color": "white"
"line-opacity": 1
"line-blur": 1
}
}, 'place-city-sm')
###
minimap = new mapboxgl.Minimap()
map.on 'style.load', ->
map.addControl minimap
###
$('#location-name').text locations[window.currentStop]['Location name']
$('#navigation #prev-button').hide()
flyToStop = ->
map.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
speed: 0.8
}
map2.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 2.75
pitch: locations[window.currentStop].pitch / 10
#zoom: locations[window.currentStop].zoom / 1.8
zoom: locations[window.currentStop].zoom / 2.1
speed: 0.5
}
$('#stoplist li').removeClass('current-stop')
$('#stoplist [data-stop-id="'+window.currentStop+'"]').addClass('current-stop')
$('#location-name').text locations[window.currentStop]['Location name']
if window.currentStop > 0
$('#navigation #prev-button').show()
else
$('#navigation #prev-button').hide()
stoplist = d3.select('#stoplist')
stoplist.selectAll('li')
.data locations
.enter().append('li')
.text (d,i) ->
console.log 'd',d
return d['Location name']
.attr 'data-stop-id', (d,i) -> i
.attr 'class', (d,i) ->
if i is 0
'current-stop'
.style 'cursor', 'pointer'
.on 'click', (d,i) ->
window.currentStop = i
flyToStop()
flyToNext = ->
if window.currentStop < locations.length
window.currentStop++
else
window.currentStop = 0
flyToStop()
flyToPrev = ->
if window.currentStop > 0
window.currentStop--
else
window.currentStop = locations.length
flyToStop()
$('#navigation #next-button').click ->
flyToNext()
$('#navigation #prev-button').click ->
flyToPrev()
$('#autoplay-button').click ->
$(this).toggleClass('autoplay-active')
if window.autoplay is false
window.autoplay != false
flyToNext()
window.autoplay = setInterval ->
flyToNext()
, 3500
else
clearInterval window.autoplay
| true | $ = require('jquery')
URI = require('uri-js')
d3 = require('d3')
locations = [
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 1,
"lat": 40.6957,
"lon": -73.9409,
"bearing": -50,
"pitch": 20,
"zoom": 11
},
{
"Location name": "PI:NAME:<NAME>END_PIashington, DC",
"State": "DC",
"Days At Location": 2,
"trip": 1,
"lat": 38.9072,
"lon": -77.0369
"bearing": 0,
"pitch": 60,
"zoom": 11
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.7796,
"lon": -78.6382
"bearing": 12,
"pitch": 20,
"zoom": 9
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "NC",
"Days At Location": 1,
"trip": 1,
"lat": 35.0527,
"lon": -78.8784
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "NC",
"Days At Location": 2,
"trip": 1,
"lat": 35.5951,
"lon": -82.5515
"bearing": -50,
"pitch": 80,
"zoom": 11
},
{
"Location name": "Nantahala National Forest",
"State": "NC",
"Days At Location": 3,
"trip": 1,
"lat": 35.2338,
"lon": -83.5593
"bearing": -30,
"pitch": 60,
"zoom": 10
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "GA",
"Days At Location": 2,
"trip": 1,
"lat": 33.749,
"lon": -84.388,
"bearing": 120,
"pitch": 40,
"zoom": 8
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "GA",
"Days At Location": 4,
"trip": 1,
"lat": 32.0835,
"lon": -81.0998,
"bearing": 90,
"pitch": 20,
"zoom": 12
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.3668,
"lon": -86.3,
"bearing": 40,
"pitch": 10,
"zoom": 9
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "AL",
"Days At Location": 1,
"trip": 1,
"lat": 32.4074,
"lon": -87.0211
"bearing": 0,
"pitch": 0,
"zoom": 8
},
{
"Location name": "De Soto National Forest",
"State": "MS",
"Days At Location": 5,
"trip": 1,
"lat": 31.0669,
"lon": -88.9833
"bearing": 0,
"pitch": 50,
"zoom": 9
},
{
"Location name": "New Orleans",
"State": "LA",
"Days At Location": 3,
"trip": 1,
"lat": 29.9511,
"lon": -90.0715,
"bearing": -40,
"pitch": 50,
"zoom": 9
},
{
"Location name": "Sabine National Forest",
"State": "TX",
"Days At Location": 5,
"trip": 1,
"lat": 31.4439,
"lon": -93.77,
"bearing": -30,
"pitch": 40,
"zoom": 8
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "TX",
"Days At Location": 0,
"trip": 1,
"lat": 32.7767,
"lon": -96.797
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "TX",
"Days At Location": 1,
"trip": 1,
"lat": 35.222,
"lon": -101.8313
"bearing": -20,
"pitch": 30,
"zoom": 7
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 38.8339,
"lon": -104.8214
"bearing": -20,
"pitch": 50,
"zoom": 12
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 39.7392,
"lon": -104.9903
"bearing": -40,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Boulder",
"State": "CO",
"Days At Location": 2,
"trip": 1,
"lat": 40.015,
"lon": -105.2705
"bearing": -40,
"pitch": 70,
"zoom": 11
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "CO",
"Days At Location": 1,
"trip": 1,
"lat": 39.0403,
"lon": -107.9498
"bearing": -90,
"pitch": 90,
"zoom": 10
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "UT",
"Days At Location": 5,
"trip": 1,
"lat": 37.6728,
"lon": -112.1573
"bearing": 0,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Zion National Park",
"State": "UT",
"Days At Location": 7,
"trip": 1,
"lat": 37.2982,
"lon": -113.0263
"bearing": -20,
"pitch": 60,
"zoom": 12
},
{
"Location name": "Las Vegas",
"State": "NV",
"Days At Location": 3,
"trip": 1,
"lat": 36.1699,
"lon": -115.1398
"bearing": -40,
"pitch": 30,
"zoom": 9
},
{
"Location name": "Mojave National Preserve",
"State": "CA",
"Days At Location": 3,
"trip": 1,
"lat": 35.011,
"lon": -115.4734
"bearing": -20,
"pitch": 25,
"zoom": 10
},
{
"Location name": "Los Angeles",
"State": "CA",
"Days At Location": 2,
"trip": 1,
"lat": 34.0522,
"lon": -118.2437
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Yosemite National Park",
"State": "CA",
"Days At Location": 5,
"trip": 1,
"lat": 37.8651,
"lon": -119.5383
"bearing": 0,
"pitch": 50,
"zoom": 12
},
{
"Location name": "San Francisco",
"State": "CA",
"Days At Location": 6,
"trip": 1,
"lat": 37.7749,
"lon": -122.4194
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Mendocino National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 39.653,
"lon": -122.9496
"bearing": 0,
"pitch": 50,
"zoom": 11
},
{
"Location name": "Redwoods National Forest",
"State": "CA",
"Days At Location": 3,
"trip": 2,
"lat": 41.2132,
"lon": -124.0046
"bearing": 50,
"pitch": 40,
"zoom": 10
},
{
"Location name": "Eugene",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 44.0521,
"lon": -123.0868
"bearing": 25,
"pitch": 30,
"zoom": 10
},
{
"Location name": "Portland",
"State": "OR",
"Days At Location": 1,
"trip": 2,
"lat": 45.5231,
"lon": -122.6765
"bearing": 0,
"pitch": 70,
"zoom": 11
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "WA",
"Days At Location": 3,
"trip": 2,
"lat": 47.0379,
"lon": -122.9007
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Mt Hood National Forest",
"State": "OR",
"Days At Location": 2,
"trip": 2,
"lat": 45.33,
"lon": -121.7089
"bearing": 35,
"pitch": 70,
"zoom": 11
},
{
"Location name": "Grand Teton National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 43.7904,
"lon": -110.6818
"bearing": 45,
"pitch": 50,
"zoom": 10
},
{
"Location name": "Yellowstone National Park",
"State": "WY",
"Days At Location": 3,
"trip": 2,
"lat": 44.428,
"lon": -110.5885
"bearing": 55,
"pitch": 70,
"zoom": 9
},
{
"Location name": "Wind River Reservation",
"State": "WY",
"Days At Location": 1,
"trip": 2,
"lat": 43.2833,
"lon": -108.834
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Black Hills",
"State": "SD",
"Days At Location": 7,
"trip": 2,
"lat": 43.9939,
"lon": -103.7718
"bearing": 55,
"pitch": 35,
"zoom": 9
},
{
"Location name": "Chicago",
"State": "IL",
"Days At Location": 2,
"trip": 2,
"lat": 41.8781,
"lon": -87.6298
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "MI",
"Days At Location": 3,
"trip": 2,
"lat": 42.3314,
"lon": -83.0458
"bearing": 25,
"pitch": 35,
"zoom": 9
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "ON",
"Days At Location": 1,
"trip": 2,
"lat": 43.6532,
"lon": -79.3832
"bearing": 25,
"pitch": 85,
"zoom": 9
},
{
"Location name": "PI:NAME:<NAME>END_PI",
"State": "NY",
"Days At Location": 1,
"trip": 2,
"lat": 43.0962,
"lon": -79.0377
"bearing": 25,
"pitch": 75,
"zoom": 9
},
{
"Location name": "New York City",
"State": "NY",
"Days At Location": 0,
"trip": 2,
"lat": 40.7128,
"lon": -74.0059
"bearing": 0,
"pitch": 0,
"zoom": 9
}
]
window.locations = locations
window.autoplay = false
getParameterByName = (name, url) ->
if !url
url = window.location.href
name = name.replace(/[\[\]]/g, '\\$&')
regex = new RegExp('[?&]' + name + '(=([^&#]*)|&|#|$)')
results = regex.exec(url)
if !results
return null
if !results[2]
return ''
decodeURIComponent results[2].replace(/\+/g, ' ')
route =
'type': 'FeatureCollection'
'features': [ {
'type': 'Feature'
'geometry':
'type': 'LineString'
'coordinates': []
} ]
listToGeoJson = (list) ->
for stop in list
route.features[0].geometry.coordinates.push [stop.lon,stop.lat]
#console.log 'route -->', route
listToGeoJson(locations)
points =
type: 'geojson'
data: {
type: 'FeatureCollection'
features: [
]
}
listToGeoJsonPoints = (list) ->
for stop in list
points.data.features.push {
type: 'Feature'
geometry: {
type: 'Point',
coordinates: [stop.lon,stop.lat]
},
properties: {
title: stop['Location name']
icon: 'circle'
}
}
listToGeoJsonPoints(locations)
mapboxgl.accessToken = 'PI:KEY:<KEY>END_PI'
uriStop = getParameterByName('stop')
if uriStop is null
window.currentStop = 0
else
window.currentStop = uriStop
map = new mapboxgl.Map {
container: 'map'
style: 'mapbox://styles/mapbox/light-v9'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
}
map2 = new mapboxgl.Map {
container: 'map2'
style: 'mapbox://styles/mapbox/dark-v9'
#style: 'mapbox://styles/ejfox/cirf7uxgm0001gwno49bk38eg'
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 10
pitch: locations[window.currentStop].pitch / 10
zoom: 4
}
map.on 'load', ->
map.addSource('route', {
"type": "geojson",
"data": route
})
map.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 8,
"line-color": "black"
"line-opacity": 0.5
"line-blur": 6
}
}, 'place-city-sm')
console.log 'points', points
map.addSource('points', points)
map.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "{icon}-15",
#"icon-optional": true
#"icon-size": 2
"icon-padding": 4
#"text-field": "{title}",
"text-font": ["Open Sans Semibold", "Arial Unicode MS Bold"],
#"text-offset": [0.4, 0],
#"text-offset": [0, 1.3],
"text-anchor": "top"
"text-padding": 12
"text-optional": true
"symbol-avoid-edges": true
"text-allow-overlap": false
#{}"text-transform": "uppercase"
}
}, 'place-city-sm')
map2.addSource('points', points)
map2.addLayer({
id: 'points'
type: 'symbol'
source: 'points'
layout: {
"icon-image": "circle-11",
"icon-size": 0.75
}
}, 'water')
map2.on 'load', ->
map2.addSource('route', {
"type": "geojson",
"data": route
})
map2.addLayer({
"id": "route",
"source": "route",
"type": "line",
"layout": {
"line-join": "round"
"line-cap": "round"
}
"paint": {
"line-width": 2,
"line-color": "white"
"line-opacity": 1
"line-blur": 1
}
}, 'place-city-sm')
###
minimap = new mapboxgl.Minimap()
map.on 'style.load', ->
map.addControl minimap
###
$('#location-name').text locations[window.currentStop]['Location name']
$('#navigation #prev-button').hide()
flyToStop = ->
map.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing
pitch: locations[window.currentStop].pitch
zoom: locations[window.currentStop].zoom
speed: 0.8
}
map2.flyTo {
center: [locations[window.currentStop].lon, locations[window.currentStop].lat]
bearing: locations[window.currentStop].bearing / 2.75
pitch: locations[window.currentStop].pitch / 10
#zoom: locations[window.currentStop].zoom / 1.8
zoom: locations[window.currentStop].zoom / 2.1
speed: 0.5
}
$('#stoplist li').removeClass('current-stop')
$('#stoplist [data-stop-id="'+window.currentStop+'"]').addClass('current-stop')
$('#location-name').text locations[window.currentStop]['Location name']
if window.currentStop > 0
$('#navigation #prev-button').show()
else
$('#navigation #prev-button').hide()
stoplist = d3.select('#stoplist')
stoplist.selectAll('li')
.data locations
.enter().append('li')
.text (d,i) ->
console.log 'd',d
return d['Location name']
.attr 'data-stop-id', (d,i) -> i
.attr 'class', (d,i) ->
if i is 0
'current-stop'
.style 'cursor', 'pointer'
.on 'click', (d,i) ->
window.currentStop = i
flyToStop()
flyToNext = ->
if window.currentStop < locations.length
window.currentStop++
else
window.currentStop = 0
flyToStop()
flyToPrev = ->
if window.currentStop > 0
window.currentStop--
else
window.currentStop = locations.length
flyToStop()
$('#navigation #next-button').click ->
flyToNext()
$('#navigation #prev-button').click ->
flyToPrev()
$('#autoplay-button').click ->
$(this).toggleClass('autoplay-active')
if window.autoplay is false
window.autoplay != false
flyToNext()
window.autoplay = setInterval ->
flyToNext()
, 3500
else
clearInterval window.autoplay
|
[
{
"context": "ystem.execute\n cmd: \"\"\"\n echo ' bonjour '\n echo ' monde ' >&2\n \"\"\"\n ",
"end": 3687,
"score": 0.7303239107131958,
"start": 3683,
"tag": "NAME",
"value": "jour"
},
{
"context": "ystem.execute\n cmd: \"\"\"\n echo ' bonjour '\n echo ' monde ' >&2\n \"\"\"\n ",
"end": 4043,
"score": 0.5748874545097351,
"start": 4039,
"tag": "NAME",
"value": "jour"
}
] | packages/core/test/system.execute/index.coffee | DanielJohnHarty/node-nikita | 1 |
stream = require 'stream'
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'system.execute', ->
they 'in option cmd or as a string', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
, (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.system.execute 'text=yes; echo $text', (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.promise()
they 'cmd as a function', ({ssh}) ->
nikita
ssh: ssh
.call ->
@store['test:a_key'] = 'test context'
.system.execute
cmd: -> "text='#{@store['test:a_key']}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test context\n' unless err
.system.execute
a_key: 'test options'
cmd: ({options}) -> "text='#{options.a_key}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test options\n' unless err
.promise()
they 'stream stdout and unpipe', ({ssh}) ->
writer_done = callback_done = null
data = ''
out = new stream.Writable
out._write = (chunk, encoding, callback) ->
data += chunk.toString()
callback()
search1 = 'search_toto'
search2 = 'search_lulu'
unpiped = 0
out.on 'unpipe', ->
unpiped++
out.on 'finish', ->
false.should.be.true()
nikita
ssh: ssh
.system.execute
cmd: "cat #{__filename} | grep #{search1}"
stdout: out
.system.execute
cmd: "cat #{__filename} | grep #{search2}"
stdout: out
, (err) ->
unpiped.should.eql 2
data.should.containEql search1
data.should.containEql search2
.promise()
they 'stdout and stderr return empty', ({ssh}) -> #.skip 'remote',
nikita
ssh: ssh
.system.execute
cmd: "echo 'some text' | grep nothing"
relax: true
, (err, {stdout, stderr}) ->
stdout.should.eql '' unless err
stderr.should.eql '' unless err
.promise()
they 'validate exit code', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "exit 42"
.next (err) ->
err.message.should.eql 'Invalid Exit Code: 42'
.system.execute
cmd: "exit 42"
code: [0, 42]
.promise()
they 'should honor code skipped', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'should honor conditions', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
if_exists: __dirname
, (err, {status, stdout}) ->
status.should.be.true()
stdout.should.eql 'yes\n'
.system.execute
cmd: 'text=yes; echo $text'
if_exists: "__dirname/toto"
, (err, {status, stdout}) ->
status.should.be.false()
should.not.exist stdout
.promise()
they 'honor unless_exists', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: "ls -l #{__dirname}"
unless_exists: __dirname
, (err, {status}) ->
status.should.be.false() unless err
.promise()
describe 'trim', ->
they 'both stdout and stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bonjour '
echo ' monde ' >&2
"""
trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
they 'with trim_stdout and trim_stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bonjour '
echo ' monde ' >&2
"""
stdout_trim: true
stderr_trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
describe 'log', ->
they 'stdin, stdout, stderr', ({ssh}) ->
stdin = stdout = stderr = undefined
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stderr', (log) -> stderr = log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
, (err) ->
stdin.message.should.match /^echo.*;$/
stdout.message.should.eql 'to stdout\n'
stderr.message.should.eql 'to stderr\n'
.promise()
they 'disable logging', ({ssh}) ->
stdin = stdout = stderr = undefined
stdout_stream = stderr_stream = []
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stdout_stream', (log) -> stdout_stream.push log
.on 'stderr', (log) -> stderr = log
.on 'stderr_stream', (log) -> stderr_stream.push log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
stdout_log: false
stderr_log: false
, (err) ->
stdin.message.should.match /^echo.*;$/
(stdout is undefined).should.be.true()
stdout_stream.length.should.eql 0
(stderr is undefined).should.be.true()
stderr_stream.length.should.eql 0
.promise()
describe 'error', ->
they 'provide `stdout` and `stderr`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "Some Error"; exit 2'
"""
relax: true
, (err, {stdout, stderr}) ->
err.message.should.eql 'Invalid Exit Code: 2'
stdout.should.eql ''
stderr.should.eql 'Some Error\n'
.promise()
they 'provide `command`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ohno && exit 1
"""
relax: true
, (err) ->
err.command.should.eql 'echo ohno && exit 1'
.promise()
they 'trap on error', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
trap: true
relax: true
, (err) ->
err.should.be.an.Error
.promise()
| 76752 |
stream = require 'stream'
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'system.execute', ->
they 'in option cmd or as a string', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
, (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.system.execute 'text=yes; echo $text', (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.promise()
they 'cmd as a function', ({ssh}) ->
nikita
ssh: ssh
.call ->
@store['test:a_key'] = 'test context'
.system.execute
cmd: -> "text='#{@store['test:a_key']}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test context\n' unless err
.system.execute
a_key: 'test options'
cmd: ({options}) -> "text='#{options.a_key}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test options\n' unless err
.promise()
they 'stream stdout and unpipe', ({ssh}) ->
writer_done = callback_done = null
data = ''
out = new stream.Writable
out._write = (chunk, encoding, callback) ->
data += chunk.toString()
callback()
search1 = 'search_toto'
search2 = 'search_lulu'
unpiped = 0
out.on 'unpipe', ->
unpiped++
out.on 'finish', ->
false.should.be.true()
nikita
ssh: ssh
.system.execute
cmd: "cat #{__filename} | grep #{search1}"
stdout: out
.system.execute
cmd: "cat #{__filename} | grep #{search2}"
stdout: out
, (err) ->
unpiped.should.eql 2
data.should.containEql search1
data.should.containEql search2
.promise()
they 'stdout and stderr return empty', ({ssh}) -> #.skip 'remote',
nikita
ssh: ssh
.system.execute
cmd: "echo 'some text' | grep nothing"
relax: true
, (err, {stdout, stderr}) ->
stdout.should.eql '' unless err
stderr.should.eql '' unless err
.promise()
they 'validate exit code', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "exit 42"
.next (err) ->
err.message.should.eql 'Invalid Exit Code: 42'
.system.execute
cmd: "exit 42"
code: [0, 42]
.promise()
they 'should honor code skipped', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'should honor conditions', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
if_exists: __dirname
, (err, {status, stdout}) ->
status.should.be.true()
stdout.should.eql 'yes\n'
.system.execute
cmd: 'text=yes; echo $text'
if_exists: "__dirname/toto"
, (err, {status, stdout}) ->
status.should.be.false()
should.not.exist stdout
.promise()
they 'honor unless_exists', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: "ls -l #{__dirname}"
unless_exists: __dirname
, (err, {status}) ->
status.should.be.false() unless err
.promise()
describe 'trim', ->
they 'both stdout and stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bon<NAME> '
echo ' monde ' >&2
"""
trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
they 'with trim_stdout and trim_stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bon<NAME> '
echo ' monde ' >&2
"""
stdout_trim: true
stderr_trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
describe 'log', ->
they 'stdin, stdout, stderr', ({ssh}) ->
stdin = stdout = stderr = undefined
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stderr', (log) -> stderr = log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
, (err) ->
stdin.message.should.match /^echo.*;$/
stdout.message.should.eql 'to stdout\n'
stderr.message.should.eql 'to stderr\n'
.promise()
they 'disable logging', ({ssh}) ->
stdin = stdout = stderr = undefined
stdout_stream = stderr_stream = []
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stdout_stream', (log) -> stdout_stream.push log
.on 'stderr', (log) -> stderr = log
.on 'stderr_stream', (log) -> stderr_stream.push log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
stdout_log: false
stderr_log: false
, (err) ->
stdin.message.should.match /^echo.*;$/
(stdout is undefined).should.be.true()
stdout_stream.length.should.eql 0
(stderr is undefined).should.be.true()
stderr_stream.length.should.eql 0
.promise()
describe 'error', ->
they 'provide `stdout` and `stderr`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "Some Error"; exit 2'
"""
relax: true
, (err, {stdout, stderr}) ->
err.message.should.eql 'Invalid Exit Code: 2'
stdout.should.eql ''
stderr.should.eql 'Some Error\n'
.promise()
they 'provide `command`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ohno && exit 1
"""
relax: true
, (err) ->
err.command.should.eql 'echo ohno && exit 1'
.promise()
they 'trap on error', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
trap: true
relax: true
, (err) ->
err.should.be.an.Error
.promise()
| true |
stream = require 'stream'
nikita = require '../../src'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.posix
describe 'system.execute', ->
they 'in option cmd or as a string', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
, (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.system.execute 'text=yes; echo $text', (err, {status, stdout}) ->
status.should.be.true() unless err
stdout.should.eql 'yes\n' unless err
.promise()
they 'cmd as a function', ({ssh}) ->
nikita
ssh: ssh
.call ->
@store['test:a_key'] = 'test context'
.system.execute
cmd: -> "text='#{@store['test:a_key']}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test context\n' unless err
.system.execute
a_key: 'test options'
cmd: ({options}) -> "text='#{options.a_key}'; echo $text"
, (err, {status, stdout}) ->
stdout.should.eql 'test options\n' unless err
.promise()
they 'stream stdout and unpipe', ({ssh}) ->
writer_done = callback_done = null
data = ''
out = new stream.Writable
out._write = (chunk, encoding, callback) ->
data += chunk.toString()
callback()
search1 = 'search_toto'
search2 = 'search_lulu'
unpiped = 0
out.on 'unpipe', ->
unpiped++
out.on 'finish', ->
false.should.be.true()
nikita
ssh: ssh
.system.execute
cmd: "cat #{__filename} | grep #{search1}"
stdout: out
.system.execute
cmd: "cat #{__filename} | grep #{search2}"
stdout: out
, (err) ->
unpiped.should.eql 2
data.should.containEql search1
data.should.containEql search2
.promise()
they 'stdout and stderr return empty', ({ssh}) -> #.skip 'remote',
nikita
ssh: ssh
.system.execute
cmd: "echo 'some text' | grep nothing"
relax: true
, (err, {stdout, stderr}) ->
stdout.should.eql '' unless err
stderr.should.eql '' unless err
.promise()
they 'validate exit code', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "exit 42"
.next (err) ->
err.message.should.eql 'Invalid Exit Code: 42'
.system.execute
cmd: "exit 42"
code: [0, 42]
.promise()
they 'should honor code skipped', ({ssh}) ->
# code undefined
nikita
ssh: ssh
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.true() unless err
.system.execute
cmd: "mkdir #{scratch}/my_dir"
code: 0
code_skipped: 1
, (err, {status}) ->
status.should.be.false() unless err
.promise()
they 'should honor conditions', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: 'text=yes; echo $text'
if_exists: __dirname
, (err, {status, stdout}) ->
status.should.be.true()
stdout.should.eql 'yes\n'
.system.execute
cmd: 'text=yes; echo $text'
if_exists: "__dirname/toto"
, (err, {status, stdout}) ->
status.should.be.false()
should.not.exist stdout
.promise()
they 'honor unless_exists', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: "ls -l #{__dirname}"
unless_exists: __dirname
, (err, {status}) ->
status.should.be.false() unless err
.promise()
describe 'trim', ->
they 'both stdout and stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bonPI:NAME:<NAME>END_PI '
echo ' monde ' >&2
"""
trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
they 'with trim_stdout and trim_stderr', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ' bonPI:NAME:<NAME>END_PI '
echo ' monde ' >&2
"""
stdout_trim: true
stderr_trim: true
, (err, {stdout, stderr}) ->
stdout.should.eql 'bonjour' unless err
stderr.should.eql 'monde' unless err
.promise()
describe 'log', ->
they 'stdin, stdout, stderr', ({ssh}) ->
stdin = stdout = stderr = undefined
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stderr', (log) -> stderr = log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
, (err) ->
stdin.message.should.match /^echo.*;$/
stdout.message.should.eql 'to stdout\n'
stderr.message.should.eql 'to stderr\n'
.promise()
they 'disable logging', ({ssh}) ->
stdin = stdout = stderr = undefined
stdout_stream = stderr_stream = []
nikita
ssh: ssh
.on 'stdin', (log) -> stdin = log
.on 'stdout', (log) -> stdout = log
.on 'stdout_stream', (log) -> stdout_stream.push log
.on 'stderr', (log) -> stderr = log
.on 'stderr_stream', (log) -> stderr_stream.push log
.system.execute
cmd: "echo 'to stderr' >&2; echo 'to stdout';"
stdout_log: false
stderr_log: false
, (err) ->
stdin.message.should.match /^echo.*;$/
(stdout is undefined).should.be.true()
stdout_stream.length.should.eql 0
(stderr is undefined).should.be.true()
stderr_stream.length.should.eql 0
.promise()
describe 'error', ->
they 'provide `stdout` and `stderr`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "Some Error"; exit 2'
"""
relax: true
, (err, {stdout, stderr}) ->
err.message.should.eql 'Invalid Exit Code: 2'
stdout.should.eql ''
stderr.should.eql 'Some Error\n'
.promise()
they 'provide `command`', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
echo ohno && exit 1
"""
relax: true
, (err) ->
err.command.should.eql 'echo ohno && exit 1'
.promise()
they 'trap on error', ({ssh}) ->
nikita
ssh: ssh
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
.system.execute
cmd: """
sh -c '>&2 echo "exit 2'
echo 'ok'
"""
trap: true
relax: true
, (err) ->
err.should.be.an.Error
.promise()
|
[
{
"context": " path = @download\n data = @data\n key = 'tpl'\n if key? and not data\n data = @data\n ",
"end": 4995,
"score": 0.9868595600128174,
"start": 4992,
"tag": "KEY",
"value": "tpl"
}
] | src/layer-controller.coffee | dkiyatkin/layer-controller | 2 | _ = require('lodash')
Promise = require('bluebird')
superagent = require('superagent')
Module = require('./module')
pasteHTML = require('./pasteHTML')
Log = require('./log')
class LayerController extends Module
# Выполнить все события
# @param {String} event
# @param {*} args
# @return {?Promise} layer
emitAll: (event, args...) -> new Promise (resolve, reject) =>
counter = @listeners(event).length
return resolve(true) if not counter
stop = false
args.unshift event, (abort) -> # done(abort)
return if stop
if abort? # true и другие положительные не влияют
if abort instanceof Error
stop = true
return reject(abort)
if not abort
stop = true
return resolve(null)
resolve(true) if --counter is 0
@emit.apply(this, args)
# Список конфликтующих заданий
_conflictTask:
'stateAll': ['state', 'hideAll', 'hide', 'show', 'insert']
'state': ['hideAll', 'hide', 'show', 'insert', 'reset']
'hideAll': ['hide', 'show', 'insert']
'hide': ['show', 'insert', 'reset']
'show': ['hideAll', 'hide', 'insert', 'reset']
'insert': ['hideAll', 'hide', 'reset']
'load': ['reset']
'parse': ['reset']
'reset': ['hide', 'show', 'insert', 'load', 'parse']
# Разрешение несовместимых заданий
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task
_afterConflictTask: (name, type) ->
task = @task[name]
return task if task.run # если есть выполнение то ничего не делать
return task if not @_conflictTask[name]?.length
conflictTasks =
(_task.run for own _name, _task of @task when (@_conflictTask[name].indexOf(_name) isnt -1) and _task.run)
return task if not conflictTasks.length
task.run = Promise.all(conflictTasks).catch().then =>
@_deleteTask(task)
@[name](type)
task
# Определить задание, только разные задания могут выполнятся одновременно
# Одни задания разного типа выполняются друг за другом
# Задания одного типа второй раз не выполняются, а возвращают результат предыдущего задания, когда оно завершится
# Если есть конфликтующие задания, то новое задание запустится после их выполнения
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task Если есть task[name].run, то задание с этим именем выполняется, тип задания task[name].type
_task: (name, type) ->
@task[name] = {} if not @task[name]
task = @task[name]
if task.run # есть выполнение
return task if task.type is type
task.run.then => @[name](type)
task.type = type
@_afterConflictTask(name, type)
# Завершить задание
# @param {Object} task
# @param {?Function} fn resolve/reject
# @param {*} arg
# @return {?Promise} fn(arg)
_deleteTask: (task, fn, arg) ->
delete task.type
delete task.run
delete task.err
fn(arg) if fn?
# Тестовый метод слоя, ничего не делает
# События
# Несколько одновременных запусков запустят одну работу на всех
# @return {?Promise} layer
test: (testValue) ->
task = @_task('load', testValue)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('test', testValue))
emits.push(@emitAll('test.prop', testValue, 42)) if testValue is 24
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@testCount = 0 if not @testCount?
@testCount++
@log.debug('test action')
emits = []
emits.push(@emitAll('tested', testValue))
emits.push(@emitAll('tested.prop', testValue, 42)) if testValue is 24
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Рендер шаблона
# @param {String} tpl Шаблон
# @return {String} text Готовый текст
render: (tpl) ->
_.template(tpl, this)
# Получить на основе неполного пути полный путь, который будет скачиваться и запишется в кэш
# @param {String} path
# @return {String} path
_originPath: (path) ->
if @request.origin and path.search('//') isnt 0 and path.search('/') is 0 # относительные пути не поддерживаются
path = @request.origin + path
path
# Загрузить данные для слоя
# @param {String|Array|Object} path данные для загрузки
# @param {Object} data Объект для сохранения
# @param {?String} key Ключ по которому будут сохранены данные
# @return {?Promise} data
_load: (path, key, data) ->
# @log.debug('_load', path, key, data)
@data = {} if not @data
@_data = {} if not @_data
if not path
path = @download
data = @data
key = 'tpl'
if key? and not data
data = @data
if _.isString(path)
path = @render(path)
path = @_originPath(path)
if @request.cache[path]
return Promise.resolve(@request.cache[path]) if not (key? and data)
data[key] = @request.cache[path]
return Promise.resolve(data)
if not @request.loading[path]
@request.loading[path] = @request.agent.get(path)
@request.loading[path].set(@request.headers) if @request.headers
@request.loading[path].set('x-layer-controller-proxy', 'true') # защита от рекурсии
@request.loading[path] = Promise.promisify(@request.loading[path].end, @request.loading[path])()
@request.loading[path].then (res) =>
delete @request.loading[path]
if res.error
@log.error("load #{path}:", res.error?.message or res.error)
return
if res.body and Object.keys(res.body).length
@request.cache[path] = res.body
else
@request.cache[path] = res.text
@_data[path] = @request.cache[path]
return @request.cache[path] if not (key? and data)
data[key] = @request.cache[path]
return data
else if _.isArray(path)
Promise.each path, (item, i, value) =>
@_load(item, i, data)
.then (results) ->
data
else if _.isObject(path)
paths = []
for own _key, _path of path
if _.isObject(_path)
data[_key] = {}
paths.push(@_load(_path, _key, data[_key]))
else
paths.push(@_load(_path, _key, data))
Promise.all(paths).then ->
data
# Загрузить (layer.download) данные (layer.data) слоя, если они еще не загружены или загрузить один файл
# Данные кэшируются в layer.request.cache
# @return {?Promise} layer
load: ->
task = @_task('load')
return task.run if task.run
return Promise.resolve(this) if @data?
return Promise.reject(new Error(@log.error('layer.download does not exist'))) if not @download
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('load'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_load().then =>
emits = []
emits.push(@emitAll('loaded'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
# , (err) -> throw task.err = err # _load REVIEW
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Перерисовать слой и затем всех потомков
# @param {Boolean} force
# @return {Promise} layer
reparseAll: (force) ->
@reparse(force).then (layer) =>
# return null if not layer # XXX нужно ли это?
Promise.all(@childLayers.map (layer) -> layer.reparse(force)).catch().then =>
this
# Перерисовать слой
# @param {Boolean} force Скрывать и показывать слой
# @return {Promise} layer
reparse: (force) ->
if not @elementList?.length or not @isShown
return Promise.resolve(null) if not force
return @show(true).then (layer) =>
return layer if layer
@hideAll().then => layer
@_show(true).then (layer) =>
return layer if layer or not force
@hideAll().then => layer
# Распарсить шаблон (layer.data.tpl) слоя в html (layer.html)
# @param {Boolean} force Парсить даже если есть layer.html
# @return {Promise} layer
parse: (force = false) ->
task = @_task('parse', force)
return task.run if task.run
return Promise.resolve(this) if @html? and not force
return Promise.reject(new Error(@log.error('layer.data.tpl does not exist'))) if not @data?.tpl?
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('parse'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@html = @render(@data.tpl)
emits = []
emits.push(@emitAll('parsed'))
Promise.all(emits).then (emits) =>
for success in emits when not success
@html = null # XXX нужно ли это?
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
_make: (force) -> # load parse
if @download
@load().then (layer) =>
return null if not layer
# return this if not @data?.tpl?
@parse(force)
else
return Promise.resolve(this) if not @data?.tpl?
@parse(force)
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
make: (force = false) ->
task = @_task('make', force)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('make'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_make(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
emits.push(@emitAll('made'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Найти список элементов, если аргументы не переданы ищет список элементов слоя
# @param {Node|NodeList} node
# @param {String} selectors
# @return {NodeList|Array} elementList
findElements: (node = @parentNode or @parentLayer?.elementList, selectors = @selectors) ->
# @log.debug 'findElements' #, node, selectors
throw new Error('findElements: node does not exist') if not node
throw new Error('findElements: selectors does not exist') if not selectors
return node.find(selectors) if node.find and node.html # у массивов может быть свой find
return _.toArray(node.querySelectorAll(selectors)) if node.querySelectorAll
throw new Error(@log.error('findElements: bad node')) if not node[0]?.querySelectorAll
elementList = []
for element in node
elementList = elementList.concat(_.toArray(element.querySelectorAll(selectors)))
elementList
# Вставить html в список элементов
# @param {NodeList} elementList
# @param {String} html
htmlElements: (elementList, html) ->
throw new Error('htmlElements: elementList does not exist') if not elementList
throw new Error('htmlElements: html does not exist') if not html?
return elementList.html(html) if elementList.html
Array::forEach.call elementList, (element) ->
pasteHTML(element, html) # element.innerHTML = @html
# Вставить слой, нет обработки если слой заместит какой-то другой слой
# @param {Boolean} force Вставлять слой даже если уже есть @elementList
# @return {Promise} layer
insert: (force = true) ->
task = @_task('insert', force)
return task.run if task.run
return Promise.reject(new Error(@log.error('layer.selectors does not exist'))) if not @selectors
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('insert'))
emits.push(@emitAll('insert.window')) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
unless not force and @elementList?.length
@elementList = null
elementList = @findElements()
return @_deleteTask(task, Promise.resolve, null) if not elementList?.length
@htmlElements(elementList, @html)
@elementList = elementList
emits = []
# emits.push(@emitAll('inserted'))
emits.push(@emitAll('domready'))
# emits.push(@emitAll('inserted.window')) if window?
emits.push(@emitAll('domready.window')) if window?
Promise.all(emits).then (emits) =>
for success in emits when not success
@elementList = null
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Приготовить, вставить если нужно
# @return {Promise} layer
_show: (force) ->
@make(force).then (layer) => # false - не парсить если уже есть html
return null if not layer
@insert(force) # false - не вставлять слой если уже есть elementList
# Показать слой (загрузить, распарсить, вставить), если он не показан. Если слой показан, ничего не делать
# @param {Boolean} force Парсить если уже есть html, вставлять слой если уже есть elementList
# @return {Promise} layer
show: (force = false) ->
task = @_task('show', force)
return task.run if task.run
return Promise.resolve(this) if @isShown and @elementList?.length
return Promise.resolve(null) unless @parentNode or (@parentLayer and @parentLayer.isShown and @parentLayer.elementList?.length)
# return Promise.resolve(this) if @isShown
# return Promise.resolve(null) if @parentLayer and not @parentLayer.isShown
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('show'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_show(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
# emits.push(@emitAll('showed'))
emits.push(@emitAll('shown'))
emits.push(@emitAll('shown.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@isShown = true
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть все дочерние слои начиная с последнего и затем скрыть сам слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт
# @return {Promise} layer
hideAll: (force = false) ->
@log.debug('hideAll', force)
task = @_task('hideAll', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide.all'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
Promise.all(@childLayers.map (layer) -> layer.hideAll(force)).catch().then =>
@hide(force).then =>
emits = []
emits.push(@emitAll('hidden.all'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт, и заново найти layer.elementList если его нету
# @return {Promise} layer
hide: (force = false) ->
@log.debug('hide', force)
task = @_task('hide', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
if force and not @elementList?.length
@htmlElements(@findElements(), '')
else
@htmlElements(@elementList, '')
@isShown = false
@elementList = null
emits = []
emits.push(@emitAll('hidden'))
emits.push(@emitAll('hidden.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Привести слой к состоянию и рекурсивно привести все дочерние слои к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
stateAll: (state = '') ->
task = @_task('stateAll', state)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
# @log.debug('stateAll run', state)
emits = []
emits.push(@emitAll('state.all', state))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@state(state).then =>
Promise.all(@childLayers.map (layer) -> layer.stateAll(state)).catch().then =>
emits = []
emits.push(@emitAll('stated.all', state))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть или показать слой в зависимости от состояния layer.regState
# @param {String} state Состояние для слоя
# @return {Promise} layer
_state: (state) -> # XXX возврат ошибки только от заданий
return Promise.resolve(this) if not @selectors # XXX layer.selectors не очень очевидно
return @hideAll() unless not @regState or (state.search(@regState) != -1)
# delete @isShown # XXX нужно или нет?
@show()
# Привести слой к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
state: (state = '') ->
# @log.debug('state', state)
@task.state = {queue: []} if not @task.state
task = @_afterConflictTask('state', state)
if task.run # если уже идет state
pushed = task.queue.push(state)
return task.run.then => # выполнить state(), если это последний в очереди
return null if task.queue.length isnt pushed
task.queue = [] # очищаем массив
task.run = @state(state)
task.run = new Promise (resolve, reject) =>
# @log.debug('state run')
@task.state.next = state
@task.state.equal = (if @task.state.current is @task.state.next then true else false)
@task.state.progress = (if @task.state.current? and not @task.state.equal then true else false)
@task.state.nofirst = @task.state.current? # не в первый раз
emits = []
emits.push(@emitAll('state', state))
emits.push(@emitAll('state.window', state)) if window?
emits.push(@emitAll('state.next', state)) if @task.state.nofirst # не в первый раз
if @task.state.equal # состояния одинаковые
emits.push(@emitAll('state.equal', state))
emits.push(@emitAll('state.equal.window', state)) if window?
else # состояния разные
emits.push(@emitAll('state.different', state))
emits.push(@emitAll('state.different.window', state)) if window?
if @task.state.progress # не в первый раз и состояния разные
emits.push(@emitAll('state.progress', state))
emits.push(@emitAll('state.progress.window', state)) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_state(state).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer # слой не вставился или не скрылся
@task.state.last = @task.state.current
@task.state.current = state
delete @task.state.next
emits = []
emits.push(@emitAll('stated', state))
if @task.state.nofirst # не в первый раз
emits.push(@emitAll('stated.next', state))
emits.push(@emitAll('stated.next.window', state)) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Boolean} success
_reset: (cacheKey) ->
delete @html
delete @elementList # слой может быть isShown, но elementList сбрасываем
delete @data
return true if not cacheKey
return false if not @_data or not @download
if _.isString(cacheKey)
path = @render(@download[cacheKey])
return false if not path
path = @_originPath(path)
delete @_data[path]
delete @request.cache[path]
return true
if _.isBoolean(cacheKey) # удалить все связанные загрузки
for own path, data of @_data
delete @_data[path]
delete @request.cache[path]
return true
false
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Promise} success
reset: (cacheKey) ->
task = @_task('reset', cacheKey)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
process.nextTick =>
@_deleteTask(task, resolve, @_reset(cacheKey))
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Получить полное имя слоя
# @return {String} name
getFullName: ->
return @name if not @parentLayer
@parentLayer.getFullName() + '.' + @name
constructor: (parentLayer) ->
super wildcard: true
@childLayers = []
if parentLayer instanceof LayerController # определение layer.main
@parentLayer = parentLayer
@parentLayer.childLayers.push(this)
@main = parentLayer.main
@request = @main.request
@layers = @main.layers
@layers.push(this)
@name = "#{@parentLayer.childLayers.length}(#{@layers.length})" if not @name
else # main слой без parentLayer
@main = this
@parentNode = document if document?
@main.request = {}
if window?
@main.request.origin = # на сервере origin определяется по своему
window.location.origin or
window.location.protocol + '//' + window.location.hostname +
(if window.location.port then ':' + window.location.port else '')
@main.request.agent = superagent
@main.request.loading = {} # загружаемые адреса и их Promise
@main.request.cache = {}
@main.layers = [this]
@main.name = parentLayer?.name or @main.name or 'main'
@log = new Log(this)
# @log.debug('new')
@task = {}
@config = {}
@rel = {}
LayerController.emit("init.#{@getFullName()}", this)
LayerController._ = _
LayerController.Promise = Promise
LayerController.superagent = superagent
LayerController.pasteHTML = pasteHTML
LayerController.Log = Log
module.exports = LayerController
LayerController.Module = Module
LayerController.EventEmitter2 = Module.EventEmitter2
LayerController.extend(new Module.EventEmitter2({wildcard: true})) # делаем сам класс эмиттером
| 181778 | _ = require('lodash')
Promise = require('bluebird')
superagent = require('superagent')
Module = require('./module')
pasteHTML = require('./pasteHTML')
Log = require('./log')
class LayerController extends Module
# Выполнить все события
# @param {String} event
# @param {*} args
# @return {?Promise} layer
emitAll: (event, args...) -> new Promise (resolve, reject) =>
counter = @listeners(event).length
return resolve(true) if not counter
stop = false
args.unshift event, (abort) -> # done(abort)
return if stop
if abort? # true и другие положительные не влияют
if abort instanceof Error
stop = true
return reject(abort)
if not abort
stop = true
return resolve(null)
resolve(true) if --counter is 0
@emit.apply(this, args)
# Список конфликтующих заданий
_conflictTask:
'stateAll': ['state', 'hideAll', 'hide', 'show', 'insert']
'state': ['hideAll', 'hide', 'show', 'insert', 'reset']
'hideAll': ['hide', 'show', 'insert']
'hide': ['show', 'insert', 'reset']
'show': ['hideAll', 'hide', 'insert', 'reset']
'insert': ['hideAll', 'hide', 'reset']
'load': ['reset']
'parse': ['reset']
'reset': ['hide', 'show', 'insert', 'load', 'parse']
# Разрешение несовместимых заданий
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task
_afterConflictTask: (name, type) ->
task = @task[name]
return task if task.run # если есть выполнение то ничего не делать
return task if not @_conflictTask[name]?.length
conflictTasks =
(_task.run for own _name, _task of @task when (@_conflictTask[name].indexOf(_name) isnt -1) and _task.run)
return task if not conflictTasks.length
task.run = Promise.all(conflictTasks).catch().then =>
@_deleteTask(task)
@[name](type)
task
# Определить задание, только разные задания могут выполнятся одновременно
# Одни задания разного типа выполняются друг за другом
# Задания одного типа второй раз не выполняются, а возвращают результат предыдущего задания, когда оно завершится
# Если есть конфликтующие задания, то новое задание запустится после их выполнения
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task Если есть task[name].run, то задание с этим именем выполняется, тип задания task[name].type
_task: (name, type) ->
@task[name] = {} if not @task[name]
task = @task[name]
if task.run # есть выполнение
return task if task.type is type
task.run.then => @[name](type)
task.type = type
@_afterConflictTask(name, type)
# Завершить задание
# @param {Object} task
# @param {?Function} fn resolve/reject
# @param {*} arg
# @return {?Promise} fn(arg)
_deleteTask: (task, fn, arg) ->
delete task.type
delete task.run
delete task.err
fn(arg) if fn?
# Тестовый метод слоя, ничего не делает
# События
# Несколько одновременных запусков запустят одну работу на всех
# @return {?Promise} layer
test: (testValue) ->
task = @_task('load', testValue)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('test', testValue))
emits.push(@emitAll('test.prop', testValue, 42)) if testValue is 24
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@testCount = 0 if not @testCount?
@testCount++
@log.debug('test action')
emits = []
emits.push(@emitAll('tested', testValue))
emits.push(@emitAll('tested.prop', testValue, 42)) if testValue is 24
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Рендер шаблона
# @param {String} tpl Шаблон
# @return {String} text Готовый текст
render: (tpl) ->
_.template(tpl, this)
# Получить на основе неполного пути полный путь, который будет скачиваться и запишется в кэш
# @param {String} path
# @return {String} path
_originPath: (path) ->
if @request.origin and path.search('//') isnt 0 and path.search('/') is 0 # относительные пути не поддерживаются
path = @request.origin + path
path
# Загрузить данные для слоя
# @param {String|Array|Object} path данные для загрузки
# @param {Object} data Объект для сохранения
# @param {?String} key Ключ по которому будут сохранены данные
# @return {?Promise} data
_load: (path, key, data) ->
# @log.debug('_load', path, key, data)
@data = {} if not @data
@_data = {} if not @_data
if not path
path = @download
data = @data
key = '<KEY>'
if key? and not data
data = @data
if _.isString(path)
path = @render(path)
path = @_originPath(path)
if @request.cache[path]
return Promise.resolve(@request.cache[path]) if not (key? and data)
data[key] = @request.cache[path]
return Promise.resolve(data)
if not @request.loading[path]
@request.loading[path] = @request.agent.get(path)
@request.loading[path].set(@request.headers) if @request.headers
@request.loading[path].set('x-layer-controller-proxy', 'true') # защита от рекурсии
@request.loading[path] = Promise.promisify(@request.loading[path].end, @request.loading[path])()
@request.loading[path].then (res) =>
delete @request.loading[path]
if res.error
@log.error("load #{path}:", res.error?.message or res.error)
return
if res.body and Object.keys(res.body).length
@request.cache[path] = res.body
else
@request.cache[path] = res.text
@_data[path] = @request.cache[path]
return @request.cache[path] if not (key? and data)
data[key] = @request.cache[path]
return data
else if _.isArray(path)
Promise.each path, (item, i, value) =>
@_load(item, i, data)
.then (results) ->
data
else if _.isObject(path)
paths = []
for own _key, _path of path
if _.isObject(_path)
data[_key] = {}
paths.push(@_load(_path, _key, data[_key]))
else
paths.push(@_load(_path, _key, data))
Promise.all(paths).then ->
data
# Загрузить (layer.download) данные (layer.data) слоя, если они еще не загружены или загрузить один файл
# Данные кэшируются в layer.request.cache
# @return {?Promise} layer
load: ->
task = @_task('load')
return task.run if task.run
return Promise.resolve(this) if @data?
return Promise.reject(new Error(@log.error('layer.download does not exist'))) if not @download
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('load'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_load().then =>
emits = []
emits.push(@emitAll('loaded'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
# , (err) -> throw task.err = err # _load REVIEW
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Перерисовать слой и затем всех потомков
# @param {Boolean} force
# @return {Promise} layer
reparseAll: (force) ->
@reparse(force).then (layer) =>
# return null if not layer # XXX нужно ли это?
Promise.all(@childLayers.map (layer) -> layer.reparse(force)).catch().then =>
this
# Перерисовать слой
# @param {Boolean} force Скрывать и показывать слой
# @return {Promise} layer
reparse: (force) ->
if not @elementList?.length or not @isShown
return Promise.resolve(null) if not force
return @show(true).then (layer) =>
return layer if layer
@hideAll().then => layer
@_show(true).then (layer) =>
return layer if layer or not force
@hideAll().then => layer
# Распарсить шаблон (layer.data.tpl) слоя в html (layer.html)
# @param {Boolean} force Парсить даже если есть layer.html
# @return {Promise} layer
parse: (force = false) ->
task = @_task('parse', force)
return task.run if task.run
return Promise.resolve(this) if @html? and not force
return Promise.reject(new Error(@log.error('layer.data.tpl does not exist'))) if not @data?.tpl?
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('parse'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@html = @render(@data.tpl)
emits = []
emits.push(@emitAll('parsed'))
Promise.all(emits).then (emits) =>
for success in emits when not success
@html = null # XXX нужно ли это?
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
_make: (force) -> # load parse
if @download
@load().then (layer) =>
return null if not layer
# return this if not @data?.tpl?
@parse(force)
else
return Promise.resolve(this) if not @data?.tpl?
@parse(force)
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
make: (force = false) ->
task = @_task('make', force)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('make'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_make(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
emits.push(@emitAll('made'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Найти список элементов, если аргументы не переданы ищет список элементов слоя
# @param {Node|NodeList} node
# @param {String} selectors
# @return {NodeList|Array} elementList
findElements: (node = @parentNode or @parentLayer?.elementList, selectors = @selectors) ->
# @log.debug 'findElements' #, node, selectors
throw new Error('findElements: node does not exist') if not node
throw new Error('findElements: selectors does not exist') if not selectors
return node.find(selectors) if node.find and node.html # у массивов может быть свой find
return _.toArray(node.querySelectorAll(selectors)) if node.querySelectorAll
throw new Error(@log.error('findElements: bad node')) if not node[0]?.querySelectorAll
elementList = []
for element in node
elementList = elementList.concat(_.toArray(element.querySelectorAll(selectors)))
elementList
# Вставить html в список элементов
# @param {NodeList} elementList
# @param {String} html
htmlElements: (elementList, html) ->
throw new Error('htmlElements: elementList does not exist') if not elementList
throw new Error('htmlElements: html does not exist') if not html?
return elementList.html(html) if elementList.html
Array::forEach.call elementList, (element) ->
pasteHTML(element, html) # element.innerHTML = @html
# Вставить слой, нет обработки если слой заместит какой-то другой слой
# @param {Boolean} force Вставлять слой даже если уже есть @elementList
# @return {Promise} layer
insert: (force = true) ->
task = @_task('insert', force)
return task.run if task.run
return Promise.reject(new Error(@log.error('layer.selectors does not exist'))) if not @selectors
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('insert'))
emits.push(@emitAll('insert.window')) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
unless not force and @elementList?.length
@elementList = null
elementList = @findElements()
return @_deleteTask(task, Promise.resolve, null) if not elementList?.length
@htmlElements(elementList, @html)
@elementList = elementList
emits = []
# emits.push(@emitAll('inserted'))
emits.push(@emitAll('domready'))
# emits.push(@emitAll('inserted.window')) if window?
emits.push(@emitAll('domready.window')) if window?
Promise.all(emits).then (emits) =>
for success in emits when not success
@elementList = null
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Приготовить, вставить если нужно
# @return {Promise} layer
_show: (force) ->
@make(force).then (layer) => # false - не парсить если уже есть html
return null if not layer
@insert(force) # false - не вставлять слой если уже есть elementList
# Показать слой (загрузить, распарсить, вставить), если он не показан. Если слой показан, ничего не делать
# @param {Boolean} force Парсить если уже есть html, вставлять слой если уже есть elementList
# @return {Promise} layer
show: (force = false) ->
task = @_task('show', force)
return task.run if task.run
return Promise.resolve(this) if @isShown and @elementList?.length
return Promise.resolve(null) unless @parentNode or (@parentLayer and @parentLayer.isShown and @parentLayer.elementList?.length)
# return Promise.resolve(this) if @isShown
# return Promise.resolve(null) if @parentLayer and not @parentLayer.isShown
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('show'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_show(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
# emits.push(@emitAll('showed'))
emits.push(@emitAll('shown'))
emits.push(@emitAll('shown.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@isShown = true
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть все дочерние слои начиная с последнего и затем скрыть сам слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт
# @return {Promise} layer
hideAll: (force = false) ->
@log.debug('hideAll', force)
task = @_task('hideAll', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide.all'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
Promise.all(@childLayers.map (layer) -> layer.hideAll(force)).catch().then =>
@hide(force).then =>
emits = []
emits.push(@emitAll('hidden.all'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт, и заново найти layer.elementList если его нету
# @return {Promise} layer
hide: (force = false) ->
@log.debug('hide', force)
task = @_task('hide', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
if force and not @elementList?.length
@htmlElements(@findElements(), '')
else
@htmlElements(@elementList, '')
@isShown = false
@elementList = null
emits = []
emits.push(@emitAll('hidden'))
emits.push(@emitAll('hidden.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Привести слой к состоянию и рекурсивно привести все дочерние слои к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
stateAll: (state = '') ->
task = @_task('stateAll', state)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
# @log.debug('stateAll run', state)
emits = []
emits.push(@emitAll('state.all', state))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@state(state).then =>
Promise.all(@childLayers.map (layer) -> layer.stateAll(state)).catch().then =>
emits = []
emits.push(@emitAll('stated.all', state))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть или показать слой в зависимости от состояния layer.regState
# @param {String} state Состояние для слоя
# @return {Promise} layer
_state: (state) -> # XXX возврат ошибки только от заданий
return Promise.resolve(this) if not @selectors # XXX layer.selectors не очень очевидно
return @hideAll() unless not @regState or (state.search(@regState) != -1)
# delete @isShown # XXX нужно или нет?
@show()
# Привести слой к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
state: (state = '') ->
# @log.debug('state', state)
@task.state = {queue: []} if not @task.state
task = @_afterConflictTask('state', state)
if task.run # если уже идет state
pushed = task.queue.push(state)
return task.run.then => # выполнить state(), если это последний в очереди
return null if task.queue.length isnt pushed
task.queue = [] # очищаем массив
task.run = @state(state)
task.run = new Promise (resolve, reject) =>
# @log.debug('state run')
@task.state.next = state
@task.state.equal = (if @task.state.current is @task.state.next then true else false)
@task.state.progress = (if @task.state.current? and not @task.state.equal then true else false)
@task.state.nofirst = @task.state.current? # не в первый раз
emits = []
emits.push(@emitAll('state', state))
emits.push(@emitAll('state.window', state)) if window?
emits.push(@emitAll('state.next', state)) if @task.state.nofirst # не в первый раз
if @task.state.equal # состояния одинаковые
emits.push(@emitAll('state.equal', state))
emits.push(@emitAll('state.equal.window', state)) if window?
else # состояния разные
emits.push(@emitAll('state.different', state))
emits.push(@emitAll('state.different.window', state)) if window?
if @task.state.progress # не в первый раз и состояния разные
emits.push(@emitAll('state.progress', state))
emits.push(@emitAll('state.progress.window', state)) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_state(state).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer # слой не вставился или не скрылся
@task.state.last = @task.state.current
@task.state.current = state
delete @task.state.next
emits = []
emits.push(@emitAll('stated', state))
if @task.state.nofirst # не в первый раз
emits.push(@emitAll('stated.next', state))
emits.push(@emitAll('stated.next.window', state)) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Boolean} success
_reset: (cacheKey) ->
delete @html
delete @elementList # слой может быть isShown, но elementList сбрасываем
delete @data
return true if not cacheKey
return false if not @_data or not @download
if _.isString(cacheKey)
path = @render(@download[cacheKey])
return false if not path
path = @_originPath(path)
delete @_data[path]
delete @request.cache[path]
return true
if _.isBoolean(cacheKey) # удалить все связанные загрузки
for own path, data of @_data
delete @_data[path]
delete @request.cache[path]
return true
false
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Promise} success
reset: (cacheKey) ->
task = @_task('reset', cacheKey)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
process.nextTick =>
@_deleteTask(task, resolve, @_reset(cacheKey))
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Получить полное имя слоя
# @return {String} name
getFullName: ->
return @name if not @parentLayer
@parentLayer.getFullName() + '.' + @name
constructor: (parentLayer) ->
super wildcard: true
@childLayers = []
if parentLayer instanceof LayerController # определение layer.main
@parentLayer = parentLayer
@parentLayer.childLayers.push(this)
@main = parentLayer.main
@request = @main.request
@layers = @main.layers
@layers.push(this)
@name = "#{@parentLayer.childLayers.length}(#{@layers.length})" if not @name
else # main слой без parentLayer
@main = this
@parentNode = document if document?
@main.request = {}
if window?
@main.request.origin = # на сервере origin определяется по своему
window.location.origin or
window.location.protocol + '//' + window.location.hostname +
(if window.location.port then ':' + window.location.port else '')
@main.request.agent = superagent
@main.request.loading = {} # загружаемые адреса и их Promise
@main.request.cache = {}
@main.layers = [this]
@main.name = parentLayer?.name or @main.name or 'main'
@log = new Log(this)
# @log.debug('new')
@task = {}
@config = {}
@rel = {}
LayerController.emit("init.#{@getFullName()}", this)
LayerController._ = _
LayerController.Promise = Promise
LayerController.superagent = superagent
LayerController.pasteHTML = pasteHTML
LayerController.Log = Log
module.exports = LayerController
LayerController.Module = Module
LayerController.EventEmitter2 = Module.EventEmitter2
LayerController.extend(new Module.EventEmitter2({wildcard: true})) # делаем сам класс эмиттером
| true | _ = require('lodash')
Promise = require('bluebird')
superagent = require('superagent')
Module = require('./module')
pasteHTML = require('./pasteHTML')
Log = require('./log')
class LayerController extends Module
# Выполнить все события
# @param {String} event
# @param {*} args
# @return {?Promise} layer
emitAll: (event, args...) -> new Promise (resolve, reject) =>
counter = @listeners(event).length
return resolve(true) if not counter
stop = false
args.unshift event, (abort) -> # done(abort)
return if stop
if abort? # true и другие положительные не влияют
if abort instanceof Error
stop = true
return reject(abort)
if not abort
stop = true
return resolve(null)
resolve(true) if --counter is 0
@emit.apply(this, args)
# Список конфликтующих заданий
_conflictTask:
'stateAll': ['state', 'hideAll', 'hide', 'show', 'insert']
'state': ['hideAll', 'hide', 'show', 'insert', 'reset']
'hideAll': ['hide', 'show', 'insert']
'hide': ['show', 'insert', 'reset']
'show': ['hideAll', 'hide', 'insert', 'reset']
'insert': ['hideAll', 'hide', 'reset']
'load': ['reset']
'parse': ['reset']
'reset': ['hide', 'show', 'insert', 'load', 'parse']
# Разрешение несовместимых заданий
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task
_afterConflictTask: (name, type) ->
task = @task[name]
return task if task.run # если есть выполнение то ничего не делать
return task if not @_conflictTask[name]?.length
conflictTasks =
(_task.run for own _name, _task of @task when (@_conflictTask[name].indexOf(_name) isnt -1) and _task.run)
return task if not conflictTasks.length
task.run = Promise.all(conflictTasks).catch().then =>
@_deleteTask(task)
@[name](type)
task
# Определить задание, только разные задания могут выполнятся одновременно
# Одни задания разного типа выполняются друг за другом
# Задания одного типа второй раз не выполняются, а возвращают результат предыдущего задания, когда оно завершится
# Если есть конфликтующие задания, то новое задание запустится после их выполнения
# @param {String} name Имя функции, задание
# @param {*} type Аргумент для функции, тип задания
# @return {Object} task Если есть task[name].run, то задание с этим именем выполняется, тип задания task[name].type
_task: (name, type) ->
@task[name] = {} if not @task[name]
task = @task[name]
if task.run # есть выполнение
return task if task.type is type
task.run.then => @[name](type)
task.type = type
@_afterConflictTask(name, type)
# Завершить задание
# @param {Object} task
# @param {?Function} fn resolve/reject
# @param {*} arg
# @return {?Promise} fn(arg)
_deleteTask: (task, fn, arg) ->
delete task.type
delete task.run
delete task.err
fn(arg) if fn?
# Тестовый метод слоя, ничего не делает
# События
# Несколько одновременных запусков запустят одну работу на всех
# @return {?Promise} layer
test: (testValue) ->
task = @_task('load', testValue)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('test', testValue))
emits.push(@emitAll('test.prop', testValue, 42)) if testValue is 24
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@testCount = 0 if not @testCount?
@testCount++
@log.debug('test action')
emits = []
emits.push(@emitAll('tested', testValue))
emits.push(@emitAll('tested.prop', testValue, 42)) if testValue is 24
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Рендер шаблона
# @param {String} tpl Шаблон
# @return {String} text Готовый текст
render: (tpl) ->
_.template(tpl, this)
# Получить на основе неполного пути полный путь, который будет скачиваться и запишется в кэш
# @param {String} path
# @return {String} path
_originPath: (path) ->
if @request.origin and path.search('//') isnt 0 and path.search('/') is 0 # относительные пути не поддерживаются
path = @request.origin + path
path
# Загрузить данные для слоя
# @param {String|Array|Object} path данные для загрузки
# @param {Object} data Объект для сохранения
# @param {?String} key Ключ по которому будут сохранены данные
# @return {?Promise} data
_load: (path, key, data) ->
# @log.debug('_load', path, key, data)
@data = {} if not @data
@_data = {} if not @_data
if not path
path = @download
data = @data
key = 'PI:KEY:<KEY>END_PI'
if key? and not data
data = @data
if _.isString(path)
path = @render(path)
path = @_originPath(path)
if @request.cache[path]
return Promise.resolve(@request.cache[path]) if not (key? and data)
data[key] = @request.cache[path]
return Promise.resolve(data)
if not @request.loading[path]
@request.loading[path] = @request.agent.get(path)
@request.loading[path].set(@request.headers) if @request.headers
@request.loading[path].set('x-layer-controller-proxy', 'true') # защита от рекурсии
@request.loading[path] = Promise.promisify(@request.loading[path].end, @request.loading[path])()
@request.loading[path].then (res) =>
delete @request.loading[path]
if res.error
@log.error("load #{path}:", res.error?.message or res.error)
return
if res.body and Object.keys(res.body).length
@request.cache[path] = res.body
else
@request.cache[path] = res.text
@_data[path] = @request.cache[path]
return @request.cache[path] if not (key? and data)
data[key] = @request.cache[path]
return data
else if _.isArray(path)
Promise.each path, (item, i, value) =>
@_load(item, i, data)
.then (results) ->
data
else if _.isObject(path)
paths = []
for own _key, _path of path
if _.isObject(_path)
data[_key] = {}
paths.push(@_load(_path, _key, data[_key]))
else
paths.push(@_load(_path, _key, data))
Promise.all(paths).then ->
data
# Загрузить (layer.download) данные (layer.data) слоя, если они еще не загружены или загрузить один файл
# Данные кэшируются в layer.request.cache
# @return {?Promise} layer
load: ->
task = @_task('load')
return task.run if task.run
return Promise.resolve(this) if @data?
return Promise.reject(new Error(@log.error('layer.download does not exist'))) if not @download
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('load'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_load().then =>
emits = []
emits.push(@emitAll('loaded'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
# , (err) -> throw task.err = err # _load REVIEW
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Перерисовать слой и затем всех потомков
# @param {Boolean} force
# @return {Promise} layer
reparseAll: (force) ->
@reparse(force).then (layer) =>
# return null if not layer # XXX нужно ли это?
Promise.all(@childLayers.map (layer) -> layer.reparse(force)).catch().then =>
this
# Перерисовать слой
# @param {Boolean} force Скрывать и показывать слой
# @return {Promise} layer
reparse: (force) ->
if not @elementList?.length or not @isShown
return Promise.resolve(null) if not force
return @show(true).then (layer) =>
return layer if layer
@hideAll().then => layer
@_show(true).then (layer) =>
return layer if layer or not force
@hideAll().then => layer
# Распарсить шаблон (layer.data.tpl) слоя в html (layer.html)
# @param {Boolean} force Парсить даже если есть layer.html
# @return {Promise} layer
parse: (force = false) ->
task = @_task('parse', force)
return task.run if task.run
return Promise.resolve(this) if @html? and not force
return Promise.reject(new Error(@log.error('layer.data.tpl does not exist'))) if not @data?.tpl?
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('parse'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@html = @render(@data.tpl)
emits = []
emits.push(@emitAll('parsed'))
Promise.all(emits).then (emits) =>
for success in emits when not success
@html = null # XXX нужно ли это?
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
_make: (force) -> # load parse
if @download
@load().then (layer) =>
return null if not layer
# return this if not @data?.tpl?
@parse(force)
else
return Promise.resolve(this) if not @data?.tpl?
@parse(force)
# Загрузить, распарсить слой
# @param {Boolean} force
# @return {Promise} layer
make: (force = false) ->
task = @_task('make', force)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('make'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_make(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
emits.push(@emitAll('made'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Найти список элементов, если аргументы не переданы ищет список элементов слоя
# @param {Node|NodeList} node
# @param {String} selectors
# @return {NodeList|Array} elementList
findElements: (node = @parentNode or @parentLayer?.elementList, selectors = @selectors) ->
# @log.debug 'findElements' #, node, selectors
throw new Error('findElements: node does not exist') if not node
throw new Error('findElements: selectors does not exist') if not selectors
return node.find(selectors) if node.find and node.html # у массивов может быть свой find
return _.toArray(node.querySelectorAll(selectors)) if node.querySelectorAll
throw new Error(@log.error('findElements: bad node')) if not node[0]?.querySelectorAll
elementList = []
for element in node
elementList = elementList.concat(_.toArray(element.querySelectorAll(selectors)))
elementList
# Вставить html в список элементов
# @param {NodeList} elementList
# @param {String} html
htmlElements: (elementList, html) ->
throw new Error('htmlElements: elementList does not exist') if not elementList
throw new Error('htmlElements: html does not exist') if not html?
return elementList.html(html) if elementList.html
Array::forEach.call elementList, (element) ->
pasteHTML(element, html) # element.innerHTML = @html
# Вставить слой, нет обработки если слой заместит какой-то другой слой
# @param {Boolean} force Вставлять слой даже если уже есть @elementList
# @return {Promise} layer
insert: (force = true) ->
task = @_task('insert', force)
return task.run if task.run
return Promise.reject(new Error(@log.error('layer.selectors does not exist'))) if not @selectors
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('insert'))
emits.push(@emitAll('insert.window')) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
unless not force and @elementList?.length
@elementList = null
elementList = @findElements()
return @_deleteTask(task, Promise.resolve, null) if not elementList?.length
@htmlElements(elementList, @html)
@elementList = elementList
emits = []
# emits.push(@emitAll('inserted'))
emits.push(@emitAll('domready'))
# emits.push(@emitAll('inserted.window')) if window?
emits.push(@emitAll('domready.window')) if window?
Promise.all(emits).then (emits) =>
for success in emits when not success
@elementList = null
return @_deleteTask(task, Promise.resolve, null)
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Приготовить, вставить если нужно
# @return {Promise} layer
_show: (force) ->
@make(force).then (layer) => # false - не парсить если уже есть html
return null if not layer
@insert(force) # false - не вставлять слой если уже есть elementList
# Показать слой (загрузить, распарсить, вставить), если он не показан. Если слой показан, ничего не делать
# @param {Boolean} force Парсить если уже есть html, вставлять слой если уже есть elementList
# @return {Promise} layer
show: (force = false) ->
task = @_task('show', force)
return task.run if task.run
return Promise.resolve(this) if @isShown and @elementList?.length
return Promise.resolve(null) unless @parentNode or (@parentLayer and @parentLayer.isShown and @parentLayer.elementList?.length)
# return Promise.resolve(this) if @isShown
# return Promise.resolve(null) if @parentLayer and not @parentLayer.isShown
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('show'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_show(force).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer
emits = []
# emits.push(@emitAll('showed'))
emits.push(@emitAll('shown'))
emits.push(@emitAll('shown.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@isShown = true
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть все дочерние слои начиная с последнего и затем скрыть сам слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт
# @return {Promise} layer
hideAll: (force = false) ->
@log.debug('hideAll', force)
task = @_task('hideAll', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide.all'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
Promise.all(@childLayers.map (layer) -> layer.hideAll(force)).catch().then =>
@hide(force).then =>
emits = []
emits.push(@emitAll('hidden.all'))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть слой
# @param {Boolean} force Пытаться скрыть даже если слой уже скрыт, и заново найти layer.elementList если его нету
# @return {Promise} layer
hide: (force = false) ->
@log.debug('hide', force)
task = @_task('hide', force)
return task.run if task.run
return Promise.resolve(this) if not @isShown and not @elementList?.length and not force
task.run = new Promise (resolve, reject) =>
emits = []
emits.push(@emitAll('hide'))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
if force and not @elementList?.length
@htmlElements(@findElements(), '')
else
@htmlElements(@elementList, '')
@isShown = false
@elementList = null
emits = []
emits.push(@emitAll('hidden'))
emits.push(@emitAll('hidden.window')) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Привести слой к состоянию и рекурсивно привести все дочерние слои к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
stateAll: (state = '') ->
task = @_task('stateAll', state)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
# @log.debug('stateAll run', state)
emits = []
emits.push(@emitAll('state.all', state))
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@state(state).then =>
Promise.all(@childLayers.map (layer) -> layer.stateAll(state)).catch().then =>
emits = []
emits.push(@emitAll('stated.all', state))
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Скрыть или показать слой в зависимости от состояния layer.regState
# @param {String} state Состояние для слоя
# @return {Promise} layer
_state: (state) -> # XXX возврат ошибки только от заданий
return Promise.resolve(this) if not @selectors # XXX layer.selectors не очень очевидно
return @hideAll() unless not @regState or (state.search(@regState) != -1)
# delete @isShown # XXX нужно или нет?
@show()
# Привести слой к состоянию
# @param {String} state Состояние для слоя
# @return {Promise} layer
state: (state = '') ->
# @log.debug('state', state)
@task.state = {queue: []} if not @task.state
task = @_afterConflictTask('state', state)
if task.run # если уже идет state
pushed = task.queue.push(state)
return task.run.then => # выполнить state(), если это последний в очереди
return null if task.queue.length isnt pushed
task.queue = [] # очищаем массив
task.run = @state(state)
task.run = new Promise (resolve, reject) =>
# @log.debug('state run')
@task.state.next = state
@task.state.equal = (if @task.state.current is @task.state.next then true else false)
@task.state.progress = (if @task.state.current? and not @task.state.equal then true else false)
@task.state.nofirst = @task.state.current? # не в первый раз
emits = []
emits.push(@emitAll('state', state))
emits.push(@emitAll('state.window', state)) if window?
emits.push(@emitAll('state.next', state)) if @task.state.nofirst # не в первый раз
if @task.state.equal # состояния одинаковые
emits.push(@emitAll('state.equal', state))
emits.push(@emitAll('state.equal.window', state)) if window?
else # состояния разные
emits.push(@emitAll('state.different', state))
emits.push(@emitAll('state.different.window', state)) if window?
if @task.state.progress # не в первый раз и состояния разные
emits.push(@emitAll('state.progress', state))
emits.push(@emitAll('state.progress.window', state)) if window?
resolve Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_state(state).then (layer) =>
return @_deleteTask(task, Promise.resolve, null) if not layer # слой не вставился или не скрылся
@task.state.last = @task.state.current
@task.state.current = state
delete @task.state.next
emits = []
emits.push(@emitAll('stated', state))
if @task.state.nofirst # не в первый раз
emits.push(@emitAll('stated.next', state))
emits.push(@emitAll('stated.next.window', state)) if window?
Promise.all(emits).then (emits) =>
return @_deleteTask(task, Promise.resolve, null) for success in emits when not success
@_deleteTask(task, Promise.resolve, this)
, (err) -> throw task.err = err
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Boolean} success
_reset: (cacheKey) ->
delete @html
delete @elementList # слой может быть isShown, но elementList сбрасываем
delete @data
return true if not cacheKey
return false if not @_data or not @download
if _.isString(cacheKey)
path = @render(@download[cacheKey])
return false if not path
path = @_originPath(path)
delete @_data[path]
delete @request.cache[path]
return true
if _.isBoolean(cacheKey) # удалить все связанные загрузки
for own path, data of @_data
delete @_data[path]
delete @request.cache[path]
return true
false
# Очистка слоя от временных данных
# @param {String|Boolean} cacheKey
# @return {Promise} success
reset: (cacheKey) ->
task = @_task('reset', cacheKey)
return task.run if task.run
task.run = new Promise (resolve, reject) =>
process.nextTick =>
@_deleteTask(task, resolve, @_reset(cacheKey))
.catch (err) =>
@log.error(err) if not task.err?
@_deleteTask(task)
throw err
# Получить полное имя слоя
# @return {String} name
getFullName: ->
return @name if not @parentLayer
@parentLayer.getFullName() + '.' + @name
constructor: (parentLayer) ->
super wildcard: true
@childLayers = []
if parentLayer instanceof LayerController # определение layer.main
@parentLayer = parentLayer
@parentLayer.childLayers.push(this)
@main = parentLayer.main
@request = @main.request
@layers = @main.layers
@layers.push(this)
@name = "#{@parentLayer.childLayers.length}(#{@layers.length})" if not @name
else # main слой без parentLayer
@main = this
@parentNode = document if document?
@main.request = {}
if window?
@main.request.origin = # на сервере origin определяется по своему
window.location.origin or
window.location.protocol + '//' + window.location.hostname +
(if window.location.port then ':' + window.location.port else '')
@main.request.agent = superagent
@main.request.loading = {} # загружаемые адреса и их Promise
@main.request.cache = {}
@main.layers = [this]
@main.name = parentLayer?.name or @main.name or 'main'
@log = new Log(this)
# @log.debug('new')
@task = {}
@config = {}
@rel = {}
LayerController.emit("init.#{@getFullName()}", this)
LayerController._ = _
LayerController.Promise = Promise
LayerController.superagent = superagent
LayerController.pasteHTML = pasteHTML
LayerController.Log = Log
module.exports = LayerController
LayerController.Module = Module
LayerController.EventEmitter2 = Module.EventEmitter2
LayerController.extend(new Module.EventEmitter2({wildcard: true})) # делаем сам класс эмиттером
|
[
{
"context": "#\n# Tables main file\n#\n# Copyright (C) 2013 Nikolay Nemshilov\n#\n\ncore = require('core')\n$ = require('d",
"end": 61,
"score": 0.9998782873153687,
"start": 44,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/table/main.coffee | lovely-io/lovely.io-stl | 2 | #
# Tables main file
#
# Copyright (C) 2013 Nikolay Nemshilov
#
core = require('core')
$ = require('dom')
ext = core.ext
Class = core.Class
Element = $.Element
include 'src/table'
$.Wrapper.set 'table', Table
$(document).delegate 'table th[data-sort]', 'click', ->
@parent('table').sort(@index(), !@hasClass('asc'))
exports = ext Table,
version: '%{version}'
| 30018 | #
# Tables main file
#
# Copyright (C) 2013 <NAME>
#
core = require('core')
$ = require('dom')
ext = core.ext
Class = core.Class
Element = $.Element
include 'src/table'
$.Wrapper.set 'table', Table
$(document).delegate 'table th[data-sort]', 'click', ->
@parent('table').sort(@index(), !@hasClass('asc'))
exports = ext Table,
version: '%{version}'
| true | #
# Tables main file
#
# Copyright (C) 2013 PI:NAME:<NAME>END_PI
#
core = require('core')
$ = require('dom')
ext = core.ext
Class = core.Class
Element = $.Element
include 'src/table'
$.Wrapper.set 'table', Table
$(document).delegate 'table th[data-sort]', 'click', ->
@parent('table').sort(@index(), !@hasClass('asc'))
exports = ext Table,
version: '%{version}'
|
[
{
"context": ", ->\n messageValue = memo().is -> { from: \"stenver1010@gmail.com\", to: \"stenver1010@gmail.com\" }\n beforeEac",
"end": 805,
"score": 0.9999133348464966,
"start": 784,
"tag": "EMAIL",
"value": "stenver1010@gmail.com"
},
{
"context": "emo().is -> { from: \"stenver1010@gmail.com\", to: \"stenver1010@gmail.com\" }\n beforeEach ->\n sinon.spy(emai",
"end": 834,
"score": 0.9999114274978638,
"start": 813,
"tag": "EMAIL",
"value": "stenver1010@gmail.com"
}
] | test/email_consumer_test.coffee | stenver/node-mailing-service | 0 | ConsumerObject = require './test_helper'
EmailConsumer = require './../lib/email_consumer'
describe 'EmailConsumer', ->
messageConsumer = memo().is -> new ConsumerObject()
emailjsserver = memo().is -> {send: ->}
emailConsumer = memo().is -> new EmailConsumer(emailjsserver(), messageConsumer())
describe '#start', ->
beforeEach ->
sinon.spy(messageConsumer(), "on")
emailConsumer().start()
afterEach ->
messageConsumer().on.restore()
it 'starts consuming messages', ->
expect(messageConsumer().on).to.be.calledOnce
context 'when listening for messages', ->
beforeEach ->
emailConsumer().start()
context 'when receiving a message', ->
context 'and valid message', ->
messageValue = memo().is -> { from: "stenver1010@gmail.com", to: "stenver1010@gmail.com" }
beforeEach ->
sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
it "sends and email with the message", ->
expect(emailjsserver().send).to.be.calledOnce
context 'and invalid message', ->
messageValue = memo().is -> { }
it "sends and email with the message", ->
emailserverspy = sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
expect(emailserverspy.called).to.equal(false)
| 3823 | ConsumerObject = require './test_helper'
EmailConsumer = require './../lib/email_consumer'
describe 'EmailConsumer', ->
messageConsumer = memo().is -> new ConsumerObject()
emailjsserver = memo().is -> {send: ->}
emailConsumer = memo().is -> new EmailConsumer(emailjsserver(), messageConsumer())
describe '#start', ->
beforeEach ->
sinon.spy(messageConsumer(), "on")
emailConsumer().start()
afterEach ->
messageConsumer().on.restore()
it 'starts consuming messages', ->
expect(messageConsumer().on).to.be.calledOnce
context 'when listening for messages', ->
beforeEach ->
emailConsumer().start()
context 'when receiving a message', ->
context 'and valid message', ->
messageValue = memo().is -> { from: "<EMAIL>", to: "<EMAIL>" }
beforeEach ->
sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
it "sends and email with the message", ->
expect(emailjsserver().send).to.be.calledOnce
context 'and invalid message', ->
messageValue = memo().is -> { }
it "sends and email with the message", ->
emailserverspy = sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
expect(emailserverspy.called).to.equal(false)
| true | ConsumerObject = require './test_helper'
EmailConsumer = require './../lib/email_consumer'
describe 'EmailConsumer', ->
messageConsumer = memo().is -> new ConsumerObject()
emailjsserver = memo().is -> {send: ->}
emailConsumer = memo().is -> new EmailConsumer(emailjsserver(), messageConsumer())
describe '#start', ->
beforeEach ->
sinon.spy(messageConsumer(), "on")
emailConsumer().start()
afterEach ->
messageConsumer().on.restore()
it 'starts consuming messages', ->
expect(messageConsumer().on).to.be.calledOnce
context 'when listening for messages', ->
beforeEach ->
emailConsumer().start()
context 'when receiving a message', ->
context 'and valid message', ->
messageValue = memo().is -> { from: "PI:EMAIL:<EMAIL>END_PI", to: "PI:EMAIL:<EMAIL>END_PI" }
beforeEach ->
sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
it "sends and email with the message", ->
expect(emailjsserver().send).to.be.calledOnce
context 'and invalid message', ->
messageValue = memo().is -> { }
it "sends and email with the message", ->
emailserverspy = sinon.spy(emailjsserver(), "send")
messageConsumer().emit('message', {value: JSON.stringify(messageValue()) } )
expect(emailserverspy.called).to.equal(false)
|
[
{
"context": "ite description (for SEO)\n\t\t\tdescription: \"\"\"\n\t\t\t\tStephen Butterfill's research on philosophical issues in co",
"end": 1446,
"score": 0.6680141091346741,
"start": 1437,
"tag": "NAME",
"value": "Stephen B"
},
{
"context": "n (for SEO)\n\t\t\tdescription: \"\"\"\n\t\t\t\tStephen Butterfill's research on philosophical issues in cognitive d",
"end": 1455,
"score": 0.6679059863090515,
"start": 1451,
"tag": "NAME",
"value": "fill"
},
{
"context": "\t\t\"\"\"\n\n\t\t\t# The website author's name\n\t\t\tauthor: \"Stephen A. Butterfill\"\n\n\t\t\t# The website author's email\n\t\t\temail: \"s.bu",
"end": 1825,
"score": 0.9998543858528137,
"start": 1804,
"tag": "NAME",
"value": "Stephen A. Butterfill"
},
{
"context": "fill\"\n\n\t\t\t# The website author's email\n\t\t\temail: \"s.butterfill@warwick.ac.uk\"\n\n\n\n\t\t# -----------------------------\n\t\t# Helpe",
"end": 1894,
"score": 0.9999300241470337,
"start": 1871,
"tag": "EMAIL",
"value": "s.butterfill@warwick.ac"
}
] | docpad.coffee | butterfill/www-butterfill | 0 | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
#http://docpad.org/docs/troubleshoot#watching-doesn-t-work-works-only-some-of-the-time-or-i-get-eisdir-errors
watchOptions: preferredMethods: ['watchFile','watch']
plugins:
#this avoids problems with svg which require text elements!
text:
matchElementRegexString: 't'
# no need for .html extension in links
cleanurls:
static: true
# syntax for this plugin seems to change continuously
raw:
raw:
# rsync
# -r recursive
# -u skip file if the destination file is newer
# -l copy any links over as well
command: ['rsync', '-rul', 'src/raw/', 'out/' ]
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
get_url: (document) ->
document.url
# Specify some site properties
site:
# The production url of our website
url: "http://butterfill.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
#'www.website.com',
#'website.herokuapp.com'
]
# The default title of our website
title: "Stephen Butterfill"
# The website description (for SEO)
description: """
Stephen Butterfill's research on philosophical issues in cognitive development; esp. joint action and mindreading
"""
# The website keywords (for SEO) separated by commas
keywords: """
philosophy, mind, development, psychology, cognitive science, mindreading, joint action, collective intentionality
"""
# The website author's name
author: "Stephen A. Butterfill"
# The website author's email
email: "s.butterfill@warwick.ac.uk"
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@site.title} | #{@document.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'posts'},[date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
}
# Export our DocPad Configuration
module.exports = docpadConfig | 177503 | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
#http://docpad.org/docs/troubleshoot#watching-doesn-t-work-works-only-some-of-the-time-or-i-get-eisdir-errors
watchOptions: preferredMethods: ['watchFile','watch']
plugins:
#this avoids problems with svg which require text elements!
text:
matchElementRegexString: 't'
# no need for .html extension in links
cleanurls:
static: true
# syntax for this plugin seems to change continuously
raw:
raw:
# rsync
# -r recursive
# -u skip file if the destination file is newer
# -l copy any links over as well
command: ['rsync', '-rul', 'src/raw/', 'out/' ]
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
get_url: (document) ->
document.url
# Specify some site properties
site:
# The production url of our website
url: "http://butterfill.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
#'www.website.com',
#'website.herokuapp.com'
]
# The default title of our website
title: "Stephen Butterfill"
# The website description (for SEO)
description: """
<NAME>utter<NAME>'s research on philosophical issues in cognitive development; esp. joint action and mindreading
"""
# The website keywords (for SEO) separated by commas
keywords: """
philosophy, mind, development, psychology, cognitive science, mindreading, joint action, collective intentionality
"""
# The website author's name
author: "<NAME>"
# The website author's email
email: "<EMAIL>.uk"
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@site.title} | #{@document.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'posts'},[date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
}
# Export our DocPad Configuration
module.exports = docpadConfig | true | # The DocPad Configuration File
# It is simply a CoffeeScript Object which is parsed by CSON
docpadConfig = {
#http://docpad.org/docs/troubleshoot#watching-doesn-t-work-works-only-some-of-the-time-or-i-get-eisdir-errors
watchOptions: preferredMethods: ['watchFile','watch']
plugins:
#this avoids problems with svg which require text elements!
text:
matchElementRegexString: 't'
# no need for .html extension in links
cleanurls:
static: true
# syntax for this plugin seems to change continuously
raw:
raw:
# rsync
# -r recursive
# -u skip file if the destination file is newer
# -l copy any links over as well
command: ['rsync', '-rul', 'src/raw/', 'out/' ]
# =================================
# Template Data
# These are variables that will be accessible via our templates
# To access one of these within our templates, refer to the FAQ: https://github.com/bevry/docpad/wiki/FAQ
templateData:
get_url: (document) ->
document.url
# Specify some site properties
site:
# The production url of our website
url: "http://butterfill.com"
# Here are some old site urls that you would like to redirect from
oldUrls: [
#'www.website.com',
#'website.herokuapp.com'
]
# The default title of our website
title: "Stephen Butterfill"
# The website description (for SEO)
description: """
PI:NAME:<NAME>END_PIutterPI:NAME:<NAME>END_PI's research on philosophical issues in cognitive development; esp. joint action and mindreading
"""
# The website keywords (for SEO) separated by commas
keywords: """
philosophy, mind, development, psychology, cognitive science, mindreading, joint action, collective intentionality
"""
# The website author's name
author: "PI:NAME:<NAME>END_PI"
# The website author's email
email: "PI:EMAIL:<EMAIL>END_PI.uk"
# -----------------------------
# Helper Functions
# Get the prepared site/document title
# Often we would like to specify particular formatting to our page's title
# we can apply that formatting here
getPreparedTitle: ->
# if we have a document title, then we should use that and suffix the site's title onto it
if @document.title
"#{@site.title} | #{@document.title}"
# if our document does not have it's own title, then we should just use the site's title
else
@site.title
# Get the prepared site/document description
getPreparedDescription: ->
# if we have a document description, then we should use that, otherwise use the site's description
@document.description or @site.description
# Get the prepared site/document keywords
getPreparedKeywords: ->
# Merge the document keywords with the site keywords
@site.keywords.concat(@document.keywords or []).join(', ')
# =================================
# Collections
# These are special collections that our website makes available to us
collections:
# For instance, this one will fetch in all documents that have pageOrder set within their meta data
pages: (database) ->
database.findAllLive({pageOrder: $exists: true}, [pageOrder:1,title:1])
# This one, will fetch in all documents that have the tag "post" specified in their meta data
posts: (database) ->
database.findAllLive({relativeOutDirPath:'posts'},[date:-1])
# =================================
# DocPad Events
# Here we can define handlers for events that DocPad fires
# You can find a full listing of events on the DocPad Wiki
events:
# Server Extend
# Used to add our own custom routes to the server before the docpad routes are added
serverExtend: (opts) ->
# Extract the server from the options
{server} = opts
docpad = @docpad
# As we are now running in an event,
# ensure we are using the latest copy of the docpad configuraiton
# and fetch our urls from it
latestConfig = docpad.getConfig()
oldUrls = latestConfig.templateData.site.oldUrls or []
newUrl = latestConfig.templateData.site.url
# Redirect any requests accessing one of our sites oldUrls to the new site url
server.use (req,res,next) ->
if req.headers.host in oldUrls
res.redirect(newUrl+req.url, 301)
else
next()
}
# Export our DocPad Configuration
module.exports = docpadConfig |
[
{
"context": "an your opponent!\"\n }\n {\n name: 'Brawlwood'\n difficulty: 4\n id: 'brawlwood'\n ",
"end": 1751,
"score": 0.9969778060913086,
"start": 1742,
"tag": "NAME",
"value": "Brawlwood"
}
] | app/views/play/ladder_home.coffee | oaugereau/codecombat | 0 | View = require 'views/kinds/RootView'
template = require 'templates/play/ladder_home'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'collections/CocoCollection'
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (model) ->
super()
@url = "/db/user/#{me.id}/level.sessions?project=state.complete,levelID"
module.exports = class LadderHomeView extends View
id: "ladder-home-view"
template: template
constructor: (options) ->
super options
@levelStatusMap = {}
@sessions = new LevelSessionsCollection()
@sessions.fetch()
@listenToOnce @sessions, 'sync', @onSessionsLoaded
onSessionsLoaded: (e) ->
for session in @sessions.models
@levelStatusMap[session.get('levelID')] = if session.get('state')?.complete then 'complete' else 'started'
@render()
getRenderData: (context={}) ->
context = super(context)
arenas = [
{
name: 'Greed'
difficulty: 4
id: 'greed'
image: '/file/db/level/53558b5a9914f5a90d7ccddb/greed_banner.jpg'
description: "Liked Dungeon Arena and Gold Rush? Put them together in this economic arena!"
}
{
name: 'Dungeon Arena'
difficulty: 3
id: 'dungeon-arena'
image: '/file/db/level/53173f76c269d400000543c2/Level%20Banner%20Dungeon%20Arena.jpg'
description: "Play head-to-head against fellow Wizards in a dungeon melee!"
}
{
name: 'Gold Rush'
difficulty: 3
id: 'gold-rush'
image: '/file/db/level/533353722a61b7ca6832840c/Gold-Rush.png'
description: "Prove you are better at collecting gold than your opponent!"
}
{
name: 'Brawlwood'
difficulty: 4
id: 'brawlwood'
image: '/file/db/level/52d97ecd32362bc86e004e87/Level%20Banner%20Brawlwood.jpg'
description: "Combat the armies of other Wizards in a strategic forest arena! (Fast computer required.)"
}
]
context.campaigns = [
{id: "multiplayer", name: "Multiplayer Arenas", description: "... in which you code head-to-head against other players.", levels: arenas}
]
context.levelStatusMap = @levelStatusMap
context
afterRender: ->
super()
@$el.find('.modal').on 'shown.bs.modal', ->
$('input:visible:first', @).focus()
| 3585 | View = require 'views/kinds/RootView'
template = require 'templates/play/ladder_home'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'collections/CocoCollection'
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (model) ->
super()
@url = "/db/user/#{me.id}/level.sessions?project=state.complete,levelID"
module.exports = class LadderHomeView extends View
id: "ladder-home-view"
template: template
constructor: (options) ->
super options
@levelStatusMap = {}
@sessions = new LevelSessionsCollection()
@sessions.fetch()
@listenToOnce @sessions, 'sync', @onSessionsLoaded
onSessionsLoaded: (e) ->
for session in @sessions.models
@levelStatusMap[session.get('levelID')] = if session.get('state')?.complete then 'complete' else 'started'
@render()
getRenderData: (context={}) ->
context = super(context)
arenas = [
{
name: 'Greed'
difficulty: 4
id: 'greed'
image: '/file/db/level/53558b5a9914f5a90d7ccddb/greed_banner.jpg'
description: "Liked Dungeon Arena and Gold Rush? Put them together in this economic arena!"
}
{
name: 'Dungeon Arena'
difficulty: 3
id: 'dungeon-arena'
image: '/file/db/level/53173f76c269d400000543c2/Level%20Banner%20Dungeon%20Arena.jpg'
description: "Play head-to-head against fellow Wizards in a dungeon melee!"
}
{
name: 'Gold Rush'
difficulty: 3
id: 'gold-rush'
image: '/file/db/level/533353722a61b7ca6832840c/Gold-Rush.png'
description: "Prove you are better at collecting gold than your opponent!"
}
{
name: '<NAME>'
difficulty: 4
id: 'brawlwood'
image: '/file/db/level/52d97ecd32362bc86e004e87/Level%20Banner%20Brawlwood.jpg'
description: "Combat the armies of other Wizards in a strategic forest arena! (Fast computer required.)"
}
]
context.campaigns = [
{id: "multiplayer", name: "Multiplayer Arenas", description: "... in which you code head-to-head against other players.", levels: arenas}
]
context.levelStatusMap = @levelStatusMap
context
afterRender: ->
super()
@$el.find('.modal').on 'shown.bs.modal', ->
$('input:visible:first', @).focus()
| true | View = require 'views/kinds/RootView'
template = require 'templates/play/ladder_home'
LevelSession = require 'models/LevelSession'
CocoCollection = require 'collections/CocoCollection'
class LevelSessionsCollection extends CocoCollection
url: ''
model: LevelSession
constructor: (model) ->
super()
@url = "/db/user/#{me.id}/level.sessions?project=state.complete,levelID"
module.exports = class LadderHomeView extends View
id: "ladder-home-view"
template: template
constructor: (options) ->
super options
@levelStatusMap = {}
@sessions = new LevelSessionsCollection()
@sessions.fetch()
@listenToOnce @sessions, 'sync', @onSessionsLoaded
onSessionsLoaded: (e) ->
for session in @sessions.models
@levelStatusMap[session.get('levelID')] = if session.get('state')?.complete then 'complete' else 'started'
@render()
getRenderData: (context={}) ->
context = super(context)
arenas = [
{
name: 'Greed'
difficulty: 4
id: 'greed'
image: '/file/db/level/53558b5a9914f5a90d7ccddb/greed_banner.jpg'
description: "Liked Dungeon Arena and Gold Rush? Put them together in this economic arena!"
}
{
name: 'Dungeon Arena'
difficulty: 3
id: 'dungeon-arena'
image: '/file/db/level/53173f76c269d400000543c2/Level%20Banner%20Dungeon%20Arena.jpg'
description: "Play head-to-head against fellow Wizards in a dungeon melee!"
}
{
name: 'Gold Rush'
difficulty: 3
id: 'gold-rush'
image: '/file/db/level/533353722a61b7ca6832840c/Gold-Rush.png'
description: "Prove you are better at collecting gold than your opponent!"
}
{
name: 'PI:NAME:<NAME>END_PI'
difficulty: 4
id: 'brawlwood'
image: '/file/db/level/52d97ecd32362bc86e004e87/Level%20Banner%20Brawlwood.jpg'
description: "Combat the armies of other Wizards in a strategic forest arena! (Fast computer required.)"
}
]
context.campaigns = [
{id: "multiplayer", name: "Multiplayer Arenas", description: "... in which you code head-to-head against other players.", levels: arenas}
]
context.levelStatusMap = @levelStatusMap
context
afterRender: ->
super()
@$el.find('.modal').on 'shown.bs.modal', ->
$('input:visible:first', @).focus()
|
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.9999203085899353,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/directives/header-status.js.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'headerStatus', [
'Experiment'
'$state'
'Status'
'TestInProgressHelper'
'$rootScope'
'expName'
'ModalError'
'$location'
'$timeout'
'alerts'
(Experiment, $state, Status, TestInProgressHelper, $rootScope, expName, ModalError, $location, $timeout, alerts) ->
restrict: 'EA'
replace: true
transclude: true
controller: ['$scope', ($scope) ->
this.start_confirm_show = $scope.start_confirm_show
this.checkButtonStatus = () ->
if $scope.start_confirm_show
$scope.start_confirm_show = false
this.start_confirm_show = $scope.start_confirm_show
]
scope:
experimentId: '=?'
templateUrl: 'app/views/directives/header-status.html'
link: ($scope, elem, attrs, controller) ->
INIT_LOADING = 2
experiment_id = null
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
$scope.start_confirm_show = false
$scope.dataAnalysis = false
$scope.isStarted = false
$scope.isOpenedError = false
counter = 0
stringUrl = "run-experiment"
if ($location.path().indexOf(stringUrl) == -1)
$scope.dataAnalysis = true
$scope.isLoading = () ->
$scope.expLoading || $scope.statusLoading
$scope.show = ->
if attrs.experimentId then (experiment_id and $scope.status) else $scope.status
onResize = ->
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 20
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.right-content').css('opacity', '1')
, 10
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (resp) ->
$scope.expLoading = false
cb resp.experiment if cb
$scope.is_holding = false
$scope.enterState = false
$scope.done = false
$scope.state = 'idle' #by default
checkStatus = () ->
getExperiment (exp) ->
$scope.experiment = exp
onResize()
# if !$scope.experiment.completed_at
# $timeout checkStatus, 1000
#checkStatus()
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
counter++
$scope.statusData = data
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
$scope.isCurrentExp = parseInt(data.experiment_controller.experiment?.id) is parseInt(experiment_id)
if $scope.isCurrentExp is true
if !$scope.isStarted and data.experiment_controller.machine.state != 'idle'
$scope.isStarted = true
else if $scope.isStarted and data.experiment_controller.machine.state == 'idle'
$scope.isStarted = false
$scope.enterState = $scope.isCurrentExp
#console.log $scope.enterState
if ((($scope.oldState isnt $scope.state or !$scope.experiment))) and experiment_id
getExperiment (exp) ->
$scope.experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
if $scope.state is 'idle' && $scope.experiment.completed_at
$scope.done = true
else if $scope.state is 'idle' && !$scope.experiment.completed_at
checkStatus()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
$scope.timePercentage = TestInProgressHelper.timePercentage(data)
if $scope.statusLoading > 0
$scope.statusLoading--
#in progress
if $scope.state isnt 'idle' and $scope.state isnt 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d #{$scope.timePercentage || 0}%,#5d8329 #{$scope.timePercentage || 0}%,#5d8329 100%)"
else if $scope.state is 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else if $scope.state is 'idle' and !$scope.dataAnalysis and $scope.enterState
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else
$scope.backgroundStyle = {}
onResize()
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.experiment)
$scope.startExperiment = ->
$scope.isStarted = true
Experiment.startExperiment(experiment_id).then ->
$scope.experiment.started_at = true
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
getExperiment (exp) ->
$scope.experiment = exp
$rootScope.$broadcast 'experiment:started', experiment_id
if $state.is('edit-protocol')
max_cycle = Experiment.getMaxExperimentCycle($scope.experiment)
$state.go('run-experiment', {'id': experiment_id, 'chart': 'amplification', 'max_cycle': max_cycle})
.catch (resp) ->
console.log('error')
alerts.showMessage(resp.data.status.error, $scope);
$scope.startConfirm = ->
$scope.start_confirm_show = true
controller.start_confirm_show = $scope.start_confirm_show
$scope.stopExperiment = ->
Experiment.stopExperiment($scope.experiment.id)
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.experiment.id)
$scope.expName = (truncate_length) ->
return Experiment.truncateName($scope.experiment.name, truncate_length)
$scope.viewError = ->
$scope.isOpenedError = true
$scope.closeError = ->
$scope.isOpenedError = false
$scope.$on 'expName:Updated', ->
$scope.experiment?.name = expName.name
$scope.$on 'complete', ->
getExperiment (exp) ->
$scope.dataAnalysis = true
$scope.experiment = exp
$scope.$watch 'experimentId', (id) ->
return if !id
experiment_id = id
getExperiment (exp) ->
$scope.experiment = exp
$scope.$on 'window:resize', ->
onResize()
]
| 128993 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'headerStatus', [
'Experiment'
'$state'
'Status'
'TestInProgressHelper'
'$rootScope'
'expName'
'ModalError'
'$location'
'$timeout'
'alerts'
(Experiment, $state, Status, TestInProgressHelper, $rootScope, expName, ModalError, $location, $timeout, alerts) ->
restrict: 'EA'
replace: true
transclude: true
controller: ['$scope', ($scope) ->
this.start_confirm_show = $scope.start_confirm_show
this.checkButtonStatus = () ->
if $scope.start_confirm_show
$scope.start_confirm_show = false
this.start_confirm_show = $scope.start_confirm_show
]
scope:
experimentId: '=?'
templateUrl: 'app/views/directives/header-status.html'
link: ($scope, elem, attrs, controller) ->
INIT_LOADING = 2
experiment_id = null
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
$scope.start_confirm_show = false
$scope.dataAnalysis = false
$scope.isStarted = false
$scope.isOpenedError = false
counter = 0
stringUrl = "run-experiment"
if ($location.path().indexOf(stringUrl) == -1)
$scope.dataAnalysis = true
$scope.isLoading = () ->
$scope.expLoading || $scope.statusLoading
$scope.show = ->
if attrs.experimentId then (experiment_id and $scope.status) else $scope.status
onResize = ->
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 20
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.right-content').css('opacity', '1')
, 10
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (resp) ->
$scope.expLoading = false
cb resp.experiment if cb
$scope.is_holding = false
$scope.enterState = false
$scope.done = false
$scope.state = 'idle' #by default
checkStatus = () ->
getExperiment (exp) ->
$scope.experiment = exp
onResize()
# if !$scope.experiment.completed_at
# $timeout checkStatus, 1000
#checkStatus()
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
counter++
$scope.statusData = data
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
$scope.isCurrentExp = parseInt(data.experiment_controller.experiment?.id) is parseInt(experiment_id)
if $scope.isCurrentExp is true
if !$scope.isStarted and data.experiment_controller.machine.state != 'idle'
$scope.isStarted = true
else if $scope.isStarted and data.experiment_controller.machine.state == 'idle'
$scope.isStarted = false
$scope.enterState = $scope.isCurrentExp
#console.log $scope.enterState
if ((($scope.oldState isnt $scope.state or !$scope.experiment))) and experiment_id
getExperiment (exp) ->
$scope.experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
if $scope.state is 'idle' && $scope.experiment.completed_at
$scope.done = true
else if $scope.state is 'idle' && !$scope.experiment.completed_at
checkStatus()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
$scope.timePercentage = TestInProgressHelper.timePercentage(data)
if $scope.statusLoading > 0
$scope.statusLoading--
#in progress
if $scope.state isnt 'idle' and $scope.state isnt 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d #{$scope.timePercentage || 0}%,#5d8329 #{$scope.timePercentage || 0}%,#5d8329 100%)"
else if $scope.state is 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else if $scope.state is 'idle' and !$scope.dataAnalysis and $scope.enterState
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else
$scope.backgroundStyle = {}
onResize()
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.experiment)
$scope.startExperiment = ->
$scope.isStarted = true
Experiment.startExperiment(experiment_id).then ->
$scope.experiment.started_at = true
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
getExperiment (exp) ->
$scope.experiment = exp
$rootScope.$broadcast 'experiment:started', experiment_id
if $state.is('edit-protocol')
max_cycle = Experiment.getMaxExperimentCycle($scope.experiment)
$state.go('run-experiment', {'id': experiment_id, 'chart': 'amplification', 'max_cycle': max_cycle})
.catch (resp) ->
console.log('error')
alerts.showMessage(resp.data.status.error, $scope);
$scope.startConfirm = ->
$scope.start_confirm_show = true
controller.start_confirm_show = $scope.start_confirm_show
$scope.stopExperiment = ->
Experiment.stopExperiment($scope.experiment.id)
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.experiment.id)
$scope.expName = (truncate_length) ->
return Experiment.truncateName($scope.experiment.name, truncate_length)
$scope.viewError = ->
$scope.isOpenedError = true
$scope.closeError = ->
$scope.isOpenedError = false
$scope.$on 'expName:Updated', ->
$scope.experiment?.name = expName.name
$scope.$on 'complete', ->
getExperiment (exp) ->
$scope.dataAnalysis = true
$scope.experiment = exp
$scope.$watch 'experimentId', (id) ->
return if !id
experiment_id = id
getExperiment (exp) ->
$scope.experiment = exp
$scope.$on 'window:resize', ->
onResize()
]
| true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'headerStatus', [
'Experiment'
'$state'
'Status'
'TestInProgressHelper'
'$rootScope'
'expName'
'ModalError'
'$location'
'$timeout'
'alerts'
(Experiment, $state, Status, TestInProgressHelper, $rootScope, expName, ModalError, $location, $timeout, alerts) ->
restrict: 'EA'
replace: true
transclude: true
controller: ['$scope', ($scope) ->
this.start_confirm_show = $scope.start_confirm_show
this.checkButtonStatus = () ->
if $scope.start_confirm_show
$scope.start_confirm_show = false
this.start_confirm_show = $scope.start_confirm_show
]
scope:
experimentId: '=?'
templateUrl: 'app/views/directives/header-status.html'
link: ($scope, elem, attrs, controller) ->
INIT_LOADING = 2
experiment_id = null
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
$scope.start_confirm_show = false
$scope.dataAnalysis = false
$scope.isStarted = false
$scope.isOpenedError = false
counter = 0
stringUrl = "run-experiment"
if ($location.path().indexOf(stringUrl) == -1)
$scope.dataAnalysis = true
$scope.isLoading = () ->
$scope.expLoading || $scope.statusLoading
$scope.show = ->
if attrs.experimentId then (experiment_id and $scope.status) else $scope.status
onResize = ->
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 20
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.right-content').css('opacity', '1')
, 10
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (resp) ->
$scope.expLoading = false
cb resp.experiment if cb
$scope.is_holding = false
$scope.enterState = false
$scope.done = false
$scope.state = 'idle' #by default
checkStatus = () ->
getExperiment (exp) ->
$scope.experiment = exp
onResize()
# if !$scope.experiment.completed_at
# $timeout checkStatus, 1000
#checkStatus()
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
counter++
$scope.statusData = data
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
$scope.isCurrentExp = parseInt(data.experiment_controller.experiment?.id) is parseInt(experiment_id)
if $scope.isCurrentExp is true
if !$scope.isStarted and data.experiment_controller.machine.state != 'idle'
$scope.isStarted = true
else if $scope.isStarted and data.experiment_controller.machine.state == 'idle'
$scope.isStarted = false
$scope.enterState = $scope.isCurrentExp
#console.log $scope.enterState
if ((($scope.oldState isnt $scope.state or !$scope.experiment))) and experiment_id
getExperiment (exp) ->
$scope.experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
if $scope.state is 'idle' && $scope.experiment.completed_at
$scope.done = true
else if $scope.state is 'idle' && !$scope.experiment.completed_at
checkStatus()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
$scope.timePercentage = TestInProgressHelper.timePercentage(data)
if $scope.statusLoading > 0
$scope.statusLoading--
#in progress
if $scope.state isnt 'idle' and $scope.state isnt 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d #{$scope.timePercentage || 0}%,#5d8329 #{$scope.timePercentage || 0}%,#5d8329 100%)"
else if $scope.state is 'complete' and $scope.isCurrentExp
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else if $scope.state is 'idle' and !$scope.dataAnalysis and $scope.enterState
$scope.backgroundStyle =
background: "-webkit-linear-gradient(left, #63b02d 0%,#63b02d 100%,#5d8329 100%,#5d8329 100%)"
else
$scope.backgroundStyle = {}
onResize()
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.experiment)
$scope.startExperiment = ->
$scope.isStarted = true
Experiment.startExperiment(experiment_id).then ->
$scope.experiment.started_at = true
$scope.expLoading = true
$scope.statusLoading = INIT_LOADING
getExperiment (exp) ->
$scope.experiment = exp
$rootScope.$broadcast 'experiment:started', experiment_id
if $state.is('edit-protocol')
max_cycle = Experiment.getMaxExperimentCycle($scope.experiment)
$state.go('run-experiment', {'id': experiment_id, 'chart': 'amplification', 'max_cycle': max_cycle})
.catch (resp) ->
console.log('error')
alerts.showMessage(resp.data.status.error, $scope);
$scope.startConfirm = ->
$scope.start_confirm_show = true
controller.start_confirm_show = $scope.start_confirm_show
$scope.stopExperiment = ->
Experiment.stopExperiment($scope.experiment.id)
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.experiment.id)
$scope.expName = (truncate_length) ->
return Experiment.truncateName($scope.experiment.name, truncate_length)
$scope.viewError = ->
$scope.isOpenedError = true
$scope.closeError = ->
$scope.isOpenedError = false
$scope.$on 'expName:Updated', ->
$scope.experiment?.name = expName.name
$scope.$on 'complete', ->
getExperiment (exp) ->
$scope.dataAnalysis = true
$scope.experiment = exp
$scope.$watch 'experimentId', (id) ->
return if !id
experiment_id = id
getExperiment (exp) ->
$scope.experiment = exp
$scope.$on 'window:resize', ->
onResize()
]
|
[
{
"context": " ]\n series: [\n {\n name: \"Total\"\n type: \"pareto\"\n yAxis: 1\n ",
"end": 1326,
"score": 0.8853781223297119,
"start": 1321,
"tag": "NAME",
"value": "Total"
},
{
"context": "aseSeries: 1\n }\n {\n name: \"Respondents\"\n type: \"column\"\n zIndex: 2",
"end": 1461,
"score": 0.9904316067695618,
"start": 1454,
"tag": "NAME",
"value": "Respond"
},
{
"context": "es: 1\n }\n {\n name: \"Respondents\"\n type: \"column\"\n zIndex: 2\n ",
"end": 1465,
"score": 0.6341879963874817,
"start": 1461,
"tag": "NAME",
"value": "ents"
}
] | app/assets/javascripts/components/pareto.coffee | sleepepi/helpds.study | 0 | @paretoCharts = ->
$("[data-object~=pareto-chart]").each((index, element) ->
$(element).highcharts
credits: enabled: false
chart:
type: "column"
colors: [
"#2196f3" # blue
"#fff176" # yellow
]
title: text: "Participant not interested"
tooltip:
shared: true
formatter: ->
@points.reduce ((s, point) ->
if point.series.type == "pareto"
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{Math.floor(point.y)}%"
else
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{point.y}"
), '<b>' + @x + '</b>'
xAxis:
categories: [
"Time commitment too great"
"Too difficult traveling to appointments"
"Study compensation too low"
"Not comfortable with randomization"
"Unable to complete screening PSG"
"Other reasons"
]
crosshair: true
yAxis: [
{ title: text: "" }
{
title: text: ""
minPadding: 0
maxPadding: 0
max: 100
min: 0
opposite: true
labels: format: "{value}%"
}
]
series: [
{
name: "Total"
type: "pareto"
yAxis: 1
zIndex: 10
baseSeries: 1
}
{
name: "Respondents"
type: "column"
zIndex: 2
data: [
67
59
27
15
8
4
]
}
]
)
| 103962 | @paretoCharts = ->
$("[data-object~=pareto-chart]").each((index, element) ->
$(element).highcharts
credits: enabled: false
chart:
type: "column"
colors: [
"#2196f3" # blue
"#fff176" # yellow
]
title: text: "Participant not interested"
tooltip:
shared: true
formatter: ->
@points.reduce ((s, point) ->
if point.series.type == "pareto"
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{Math.floor(point.y)}%"
else
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{point.y}"
), '<b>' + @x + '</b>'
xAxis:
categories: [
"Time commitment too great"
"Too difficult traveling to appointments"
"Study compensation too low"
"Not comfortable with randomization"
"Unable to complete screening PSG"
"Other reasons"
]
crosshair: true
yAxis: [
{ title: text: "" }
{
title: text: ""
minPadding: 0
maxPadding: 0
max: 100
min: 0
opposite: true
labels: format: "{value}%"
}
]
series: [
{
name: "<NAME>"
type: "pareto"
yAxis: 1
zIndex: 10
baseSeries: 1
}
{
name: "<NAME> <NAME>"
type: "column"
zIndex: 2
data: [
67
59
27
15
8
4
]
}
]
)
| true | @paretoCharts = ->
$("[data-object~=pareto-chart]").each((index, element) ->
$(element).highcharts
credits: enabled: false
chart:
type: "column"
colors: [
"#2196f3" # blue
"#fff176" # yellow
]
title: text: "Participant not interested"
tooltip:
shared: true
formatter: ->
@points.reduce ((s, point) ->
if point.series.type == "pareto"
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{Math.floor(point.y)}%"
else
s + "<br/><span style=\"color:#{point.color}\">\u25CF</span> #{point.series.name}: #{point.y}"
), '<b>' + @x + '</b>'
xAxis:
categories: [
"Time commitment too great"
"Too difficult traveling to appointments"
"Study compensation too low"
"Not comfortable with randomization"
"Unable to complete screening PSG"
"Other reasons"
]
crosshair: true
yAxis: [
{ title: text: "" }
{
title: text: ""
minPadding: 0
maxPadding: 0
max: 100
min: 0
opposite: true
labels: format: "{value}%"
}
]
series: [
{
name: "PI:NAME:<NAME>END_PI"
type: "pareto"
yAxis: 1
zIndex: 10
baseSeries: 1
}
{
name: "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI"
type: "column"
zIndex: 2
data: [
67
59
27
15
8
4
]
}
]
)
|
[
{
"context": " accunt and sign in\", ->\n user_data = {\"email\": \"user_test@example.com\", \"password\": \"1234\"}\n\n it \"POST /api/user/signu",
"end": 342,
"score": 0.9999251365661621,
"start": 321,
"tag": "EMAIL",
"value": "user_test@example.com"
},
{
"context": "= {\"email\": \"user_test@example.com\", \"password\": \"1234\"}\n\n it \"POST /api/user/signup/ should return 202",
"end": 362,
"score": 0.9992292523384094,
"start": 358,
"tag": "PASSWORD",
"value": "1234"
},
{
"context": "otten password flow\", ->\n user_data = {\"email\": \"marcin.mincer+mean@gmail.com\", \"password\": \"1234\"}\n token = null\n it \"Should",
"end": 1346,
"score": 0.9999287128448486,
"start": 1318,
"tag": "EMAIL",
"value": "marcin.mincer+mean@gmail.com"
},
{
"context": "il\": \"marcin.mincer+mean@gmail.com\", \"password\": \"1234\"}\n token = null\n it \"Should not find the user f",
"end": 1366,
"score": 0.9992612600326538,
"start": 1362,
"tag": "PASSWORD",
"value": "1234"
},
{
"context": " .expect 404, done\n return\n\n it \"Add new user user_test_mail@example.com\", (done) ->\n agent.post(\"/api/user/signup/\")\n ",
"end": 1594,
"score": 0.9999173283576965,
"start": 1568,
"tag": "EMAIL",
"value": "user_test_mail@example.com"
},
{
"context": "reset/\")\n .send({token: token, password: \"new_password\"})\n .expect 200, done\n return\n\n it \"P",
"end": 2225,
"score": 0.9993898868560791,
"start": 2213,
"tag": "PASSWORD",
"value": "new_password"
},
{
"context": "reset/\")\n .send({token: token, password: \"new_password\"})\n .expect 404, done\n return\n\n it \"P",
"end": 2440,
"score": 0.9993767142295837,
"start": 2428,
"tag": "PASSWORD",
"value": "new_password"
},
{
"context": "st(\"/api/user/signin/\")\n .send({\"email\": \"marcin.mincer+mean@gmail.com\", \"password\": \"new_password\"})\n .expect 2",
"end": 2987,
"score": 0.9999029040336609,
"start": 2959,
"tag": "EMAIL",
"value": "marcin.mincer+mean@gmail.com"
},
{
"context": "il\": \"marcin.mincer+mean@gmail.com\", \"password\": \"new_password\"})\n .expect 200, done\n return\n \n ret",
"end": 3015,
"score": 0.9994087219238281,
"start": 3003,
"tag": "PASSWORD",
"value": "new_password"
},
{
"context": "Changing user data\", ->\n user_data1 = {\"email\": \"test.user+1@gmail.com\", \"password\": \"1234\"}\n user_data2 = {\"email\": \"t",
"end": 3151,
"score": 0.9998947978019714,
"start": 3130,
"tag": "EMAIL",
"value": "test.user+1@gmail.com"
},
{
"context": "= {\"email\": \"test.user+1@gmail.com\", \"password\": \"1234\"}\n user_data2 = {\"email\": \"test.user+2@gmail.com",
"end": 3171,
"score": 0.9994502067565918,
"start": 3167,
"tag": "PASSWORD",
"value": "1234"
},
{
"context": "m\", \"password\": \"1234\"}\n user_data2 = {\"email\": \"test.user+2@gmail.com\", \"password\": \"1234\"}\n\n it \"Add new user test.us",
"end": 3221,
"score": 0.9998927712440491,
"start": 3200,
"tag": "EMAIL",
"value": "test.user+2@gmail.com"
},
{
"context": "= {\"email\": \"test.user+2@gmail.com\", \"password\": \"1234\"}\n\n it \"Add new user test.user+1@example.com\", (",
"end": 3241,
"score": 0.9994390606880188,
"start": 3237,
"tag": "PASSWORD",
"value": "1234"
},
{
"context": "mail.com\", \"password\": \"1234\"}\n\n it \"Add new user test.user+1@example.com\", (done) ->\n agent.post(\"/api/user/signup/\")\n ",
"end": 3287,
"score": 0.9995540976524353,
"start": 3264,
"tag": "EMAIL",
"value": "test.user+1@example.com"
},
{
"context": "er_data1.email + \"/\")\n .send({\"password\": \"new_password\"})\n .expect 200\n .end (err, res) ->",
"end": 4373,
"score": 0.9993897080421448,
"start": 4361,
"tag": "PASSWORD",
"value": "new_password"
},
{
"context": " .send({\"email\": user_data1.email, \"password\": \"new_password\"})\n .expect 200, done\n ret",
"end": 4727,
"score": 0.9993476867675781,
"start": 4715,
"tag": "PASSWORD",
"value": "new_password"
},
{
"context": "ct 200, done\n return\n\n it \"Add second new user test.user+2@example.com and signout\", (done) ->\n agent.post(\"/api/user",
"end": 4831,
"score": 0.9998822808265686,
"start": 4808,
"tag": "EMAIL",
"value": "test.user+2@example.com"
},
{
"context": " .send({\"email\": user_data1.email, \"password\": \"new_password\"})\n .expect 200\n .end (err, res) ->",
"end": 5265,
"score": 0.9992969632148743,
"start": 5253,
"tag": "PASSWORD",
"value": "new_password"
}
] | development/server/test/server.coffee | SwingDev/MEAN-starter | 0 | process.env.NODE_ENV = "test"
request = require("supertest")
assert = require('chai').assert
app = require("../app")
expect = require('chai').expect
config = require("../config/config")
agent = request.agent(app) # agent should persist sessions
describe "Create accunt and sign in", ->
user_data = {"email": "user_test@example.com", "password": "1234"}
it "POST /api/user/signup/ should return 202 CREATED", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "GET /api/user/current/ should return 403", (done) ->
agent.get("/api/user/current/")
.expect 403, done
return
it "POST /api/user/signin/ should return 200 OK", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 200, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
return
describe "Frogotten password flow", ->
user_data = {"email": "marcin.mincer+mean@gmail.com", "password": "1234"}
token = null
it "Should not find the user first", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 404, done
return
it "Add new user user_test_mail@example.com", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "POST /api/user/forgot/ should return 200 OK", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 200
.end (err, res) ->
return done(err) if err
expect(res.body).to.have.property('token')
token = res.body.token
done()
return
it "POST /api/user/reset/ should change password and return 200 OK", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "new_password"})
.expect 200, done
return
it "POST /api/user/reset/ should not be able to change password again", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "new_password"})
.expect 404, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "POST /api/user/signin/ should not be able to log in using old password", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 403, done
return
it "POST /api/user/signin/ should be able to log in using new password", (done) ->
agent.post("/api/user/signin/")
.send({"email": "marcin.mincer+mean@gmail.com", "password": "new_password"})
.expect 200, done
return
return
describe "Changing user data", ->
user_data1 = {"email": "test.user+1@gmail.com", "password": "1234"}
user_data2 = {"email": "test.user+2@gmail.com", "password": "1234"}
it "Add new user test.user+1@example.com", (done) ->
agent.post("/api/user/signup/")
.send(user_data1)
.expect 202, done
return
it "New user should be able to change profile data", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"profile": {"gender": "male"}})
.expect 200
.end (err, res) ->
expect(res.body["user"]["profile"]["gender"]).to.equal("male")
done(err)
return
it "New user shouldn't be able to make himself admin", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"isAdmin": true})
.expect 403, done
return
it "New user shouldn't be able to add new keys to model", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"new_key": "new_key"})
.expect 200
.end (err, res) ->
expect(res.body["user"]).not.to.have.property('new_key')
done(err)
return
it "New user should be able to change password", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"password": "new_password"})
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signout/")
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "new_password"})
.expect 200, done
return
it "Add second new user test.user+2@example.com and signout", (done) ->
agent.post("/api/user/signup/")
.send(user_data2)
.expect 202
.end (err, res) ->
done(err) if err
agent.post("/api/user/signout/")
.expect 200, done
return
it "Sign in as new user and you should not be able to modify second user", (done) ->
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "new_password"})
.expect 200
.end (err, res) ->
done(err) if err
agent.put("/api/user/" + user_data2.email + "/")
.send({"profile.gender": "female"})
.expect 403, done
return
return
| 141980 | process.env.NODE_ENV = "test"
request = require("supertest")
assert = require('chai').assert
app = require("../app")
expect = require('chai').expect
config = require("../config/config")
agent = request.agent(app) # agent should persist sessions
describe "Create accunt and sign in", ->
user_data = {"email": "<EMAIL>", "password": "<PASSWORD>"}
it "POST /api/user/signup/ should return 202 CREATED", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "GET /api/user/current/ should return 403", (done) ->
agent.get("/api/user/current/")
.expect 403, done
return
it "POST /api/user/signin/ should return 200 OK", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 200, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
return
describe "Frogotten password flow", ->
user_data = {"email": "<EMAIL>", "password": "<PASSWORD>"}
token = null
it "Should not find the user first", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 404, done
return
it "Add new user <EMAIL>", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "POST /api/user/forgot/ should return 200 OK", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 200
.end (err, res) ->
return done(err) if err
expect(res.body).to.have.property('token')
token = res.body.token
done()
return
it "POST /api/user/reset/ should change password and return 200 OK", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "<PASSWORD>"})
.expect 200, done
return
it "POST /api/user/reset/ should not be able to change password again", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "<PASSWORD>"})
.expect 404, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "POST /api/user/signin/ should not be able to log in using old password", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 403, done
return
it "POST /api/user/signin/ should be able to log in using new password", (done) ->
agent.post("/api/user/signin/")
.send({"email": "<EMAIL>", "password": "<PASSWORD>"})
.expect 200, done
return
return
describe "Changing user data", ->
user_data1 = {"email": "<EMAIL>", "password": "<PASSWORD>"}
user_data2 = {"email": "<EMAIL>", "password": "<PASSWORD>"}
it "Add new user <EMAIL>", (done) ->
agent.post("/api/user/signup/")
.send(user_data1)
.expect 202, done
return
it "New user should be able to change profile data", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"profile": {"gender": "male"}})
.expect 200
.end (err, res) ->
expect(res.body["user"]["profile"]["gender"]).to.equal("male")
done(err)
return
it "New user shouldn't be able to make himself admin", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"isAdmin": true})
.expect 403, done
return
it "New user shouldn't be able to add new keys to model", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"new_key": "new_key"})
.expect 200
.end (err, res) ->
expect(res.body["user"]).not.to.have.property('new_key')
done(err)
return
it "New user should be able to change password", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"password": "<PASSWORD>"})
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signout/")
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "<PASSWORD>"})
.expect 200, done
return
it "Add second new user <EMAIL> and signout", (done) ->
agent.post("/api/user/signup/")
.send(user_data2)
.expect 202
.end (err, res) ->
done(err) if err
agent.post("/api/user/signout/")
.expect 200, done
return
it "Sign in as new user and you should not be able to modify second user", (done) ->
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "<PASSWORD>"})
.expect 200
.end (err, res) ->
done(err) if err
agent.put("/api/user/" + user_data2.email + "/")
.send({"profile.gender": "female"})
.expect 403, done
return
return
| true | process.env.NODE_ENV = "test"
request = require("supertest")
assert = require('chai').assert
app = require("../app")
expect = require('chai').expect
config = require("../config/config")
agent = request.agent(app) # agent should persist sessions
describe "Create accunt and sign in", ->
user_data = {"email": "PI:EMAIL:<EMAIL>END_PI", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
it "POST /api/user/signup/ should return 202 CREATED", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "GET /api/user/current/ should return 403", (done) ->
agent.get("/api/user/current/")
.expect 403, done
return
it "POST /api/user/signin/ should return 200 OK", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 200, done
return
it "GET /api/user/current/ should return 200 OK", (done) ->
agent.get("/api/user/current/")
.expect 200, done
return
return
describe "Frogotten password flow", ->
user_data = {"email": "PI:EMAIL:<EMAIL>END_PI", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
token = null
it "Should not find the user first", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 404, done
return
it "Add new user PI:EMAIL:<EMAIL>END_PI", (done) ->
agent.post("/api/user/signup/")
.send(user_data)
.expect 202, done
return
it "POST /api/user/forgot/ should return 200 OK", (done) ->
agent.post("/api/user/forgot/")
.send({email: user_data.email})
.expect 200
.end (err, res) ->
return done(err) if err
expect(res.body).to.have.property('token')
token = res.body.token
done()
return
it "POST /api/user/reset/ should change password and return 200 OK", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 200, done
return
it "POST /api/user/reset/ should not be able to change password again", (done) ->
agent.post("/api/user/reset/")
.send({token: token, password: "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 404, done
return
it "POST /api/user/signout/ should return 200 OK", (done) ->
agent.post("/api/user/signout/")
.expect 200, done
return
it "POST /api/user/signin/ should not be able to log in using old password", (done) ->
agent.post("/api/user/signin/")
.send(user_data)
.expect 403, done
return
it "POST /api/user/signin/ should be able to log in using new password", (done) ->
agent.post("/api/user/signin/")
.send({"email": "PI:EMAIL:<EMAIL>END_PI", "password": "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 200, done
return
return
describe "Changing user data", ->
user_data1 = {"email": "PI:EMAIL:<EMAIL>END_PI", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
user_data2 = {"email": "PI:EMAIL:<EMAIL>END_PI", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
it "Add new user PI:EMAIL:<EMAIL>END_PI", (done) ->
agent.post("/api/user/signup/")
.send(user_data1)
.expect 202, done
return
it "New user should be able to change profile data", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"profile": {"gender": "male"}})
.expect 200
.end (err, res) ->
expect(res.body["user"]["profile"]["gender"]).to.equal("male")
done(err)
return
it "New user shouldn't be able to make himself admin", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"isAdmin": true})
.expect 403, done
return
it "New user shouldn't be able to add new keys to model", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"new_key": "new_key"})
.expect 200
.end (err, res) ->
expect(res.body["user"]).not.to.have.property('new_key')
done(err)
return
it "New user should be able to change password", (done) ->
agent.put("/api/user/" + user_data1.email + "/")
.send({"password": "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signout/")
.expect 200
.end (err, res) ->
return done(err) if err
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 200, done
return
it "Add second new user PI:EMAIL:<EMAIL>END_PI and signout", (done) ->
agent.post("/api/user/signup/")
.send(user_data2)
.expect 202
.end (err, res) ->
done(err) if err
agent.post("/api/user/signout/")
.expect 200, done
return
it "Sign in as new user and you should not be able to modify second user", (done) ->
agent.post("/api/user/signin/")
.send({"email": user_data1.email, "password": "PI:PASSWORD:<PASSWORD>END_PI"})
.expect 200
.end (err, res) ->
done(err) if err
agent.put("/api/user/" + user_data2.email + "/")
.send({"profile.gender": "female"})
.expect 403, done
return
return
|
[
{
"context": "\n noteId: noteId\n data: data\n name: name\n owner: @userId\n uploadedAt: new Date\n ",
"end": 1855,
"score": 0.9974361062049866,
"start": 1851,
"tag": "NAME",
"value": "name"
},
{
"context": "eId\n data: data\n name: name\n owner: @userId\n uploadedAt: new Date\n }\n Notes.update",
"end": 1876,
"score": 0.9989659786224365,
"start": 1869,
"tag": "USERNAME",
"value": "@userId"
}
] | imports/api/files/methods.coffee | jean/BulletNotes | 7 | import { Meteor } from 'meteor/meteor'
import { _ } from 'meteor/underscore'
import { ValidatedMethod } from 'meteor/mdg:validated-method'
import SimpleSchema from 'simpl-schema'
import { DDPRateLimiter } from 'meteor/ddp-rate-limiter'
import { Notes } from '../notes/notes.coffee'
import { Files } from './files.coffee'
export remove = new ValidatedMethod
name: 'files.remove'
validate: new SimpleSchema
id: Files.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ id }) ->
file = Files.findOne id
if @userId != file.owner
throw new (Meteor.Error)('not-authorized')
Files.remove { _id: id }
export setNote = new ValidatedMethod
name: 'files.setNote'
validate: new SimpleSchema
fileId: Files.simpleSchema().schema('_id')
noteId: Notes.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ fileId, noteId }) ->
file = Files.findOne fileId
if file.owner != Meteor.userId()
throw new (Meteor.Error)('not-authorized')
Files.update fileId, $set:
noteId: noteId
export fileSize = new ValidatedMethod
name: 'files.size'
validate: null
run: () ->
console.log Meteor.userId()
files = Files.find
owner: Meteor.userId()
console.log files.count()
size = 0
files.forEach (doc)->
size += BSON.calculateObjectSize doc
console.log "Got size: ",size
export upload = new ValidatedMethod
name: 'files.upload'
validate: new SimpleSchema
noteId: Notes.simpleSchema().schema('_id')
data: Files.simpleSchema().schema('data')
name: Files.simpleSchema().schema('name')
.validator
clean: yes
run: ({noteId, data, name}) ->
if !@userId || !Meteor.user().isAdmin
throw new (Meteor.Error)('not-authorized')
Files.insert {
noteId: noteId
data: data
name: name
owner: @userId
uploadedAt: new Date
}
Notes.update noteId, $set:
showContent: true
# Get note of all method names on Notes
NOTES_METHODS = _.pluck([
remove
upload
setNote
], 'name')
if Meteor.isServer
# Only allow 5 notes operations per connection per second
DDPRateLimiter.addRule {
name: (name) ->
_.contains NOTES_METHODS, name
# Rate limit per connection ID
connectionId: ->
yes
}, 5, 1000
| 34697 | import { Meteor } from 'meteor/meteor'
import { _ } from 'meteor/underscore'
import { ValidatedMethod } from 'meteor/mdg:validated-method'
import SimpleSchema from 'simpl-schema'
import { DDPRateLimiter } from 'meteor/ddp-rate-limiter'
import { Notes } from '../notes/notes.coffee'
import { Files } from './files.coffee'
export remove = new ValidatedMethod
name: 'files.remove'
validate: new SimpleSchema
id: Files.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ id }) ->
file = Files.findOne id
if @userId != file.owner
throw new (Meteor.Error)('not-authorized')
Files.remove { _id: id }
export setNote = new ValidatedMethod
name: 'files.setNote'
validate: new SimpleSchema
fileId: Files.simpleSchema().schema('_id')
noteId: Notes.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ fileId, noteId }) ->
file = Files.findOne fileId
if file.owner != Meteor.userId()
throw new (Meteor.Error)('not-authorized')
Files.update fileId, $set:
noteId: noteId
export fileSize = new ValidatedMethod
name: 'files.size'
validate: null
run: () ->
console.log Meteor.userId()
files = Files.find
owner: Meteor.userId()
console.log files.count()
size = 0
files.forEach (doc)->
size += BSON.calculateObjectSize doc
console.log "Got size: ",size
export upload = new ValidatedMethod
name: 'files.upload'
validate: new SimpleSchema
noteId: Notes.simpleSchema().schema('_id')
data: Files.simpleSchema().schema('data')
name: Files.simpleSchema().schema('name')
.validator
clean: yes
run: ({noteId, data, name}) ->
if !@userId || !Meteor.user().isAdmin
throw new (Meteor.Error)('not-authorized')
Files.insert {
noteId: noteId
data: data
name: <NAME>
owner: @userId
uploadedAt: new Date
}
Notes.update noteId, $set:
showContent: true
# Get note of all method names on Notes
NOTES_METHODS = _.pluck([
remove
upload
setNote
], 'name')
if Meteor.isServer
# Only allow 5 notes operations per connection per second
DDPRateLimiter.addRule {
name: (name) ->
_.contains NOTES_METHODS, name
# Rate limit per connection ID
connectionId: ->
yes
}, 5, 1000
| true | import { Meteor } from 'meteor/meteor'
import { _ } from 'meteor/underscore'
import { ValidatedMethod } from 'meteor/mdg:validated-method'
import SimpleSchema from 'simpl-schema'
import { DDPRateLimiter } from 'meteor/ddp-rate-limiter'
import { Notes } from '../notes/notes.coffee'
import { Files } from './files.coffee'
export remove = new ValidatedMethod
name: 'files.remove'
validate: new SimpleSchema
id: Files.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ id }) ->
file = Files.findOne id
if @userId != file.owner
throw new (Meteor.Error)('not-authorized')
Files.remove { _id: id }
export setNote = new ValidatedMethod
name: 'files.setNote'
validate: new SimpleSchema
fileId: Files.simpleSchema().schema('_id')
noteId: Notes.simpleSchema().schema('_id')
.validator
clean: yes
filter: no
run: ({ fileId, noteId }) ->
file = Files.findOne fileId
if file.owner != Meteor.userId()
throw new (Meteor.Error)('not-authorized')
Files.update fileId, $set:
noteId: noteId
export fileSize = new ValidatedMethod
name: 'files.size'
validate: null
run: () ->
console.log Meteor.userId()
files = Files.find
owner: Meteor.userId()
console.log files.count()
size = 0
files.forEach (doc)->
size += BSON.calculateObjectSize doc
console.log "Got size: ",size
export upload = new ValidatedMethod
name: 'files.upload'
validate: new SimpleSchema
noteId: Notes.simpleSchema().schema('_id')
data: Files.simpleSchema().schema('data')
name: Files.simpleSchema().schema('name')
.validator
clean: yes
run: ({noteId, data, name}) ->
if !@userId || !Meteor.user().isAdmin
throw new (Meteor.Error)('not-authorized')
Files.insert {
noteId: noteId
data: data
name: PI:NAME:<NAME>END_PI
owner: @userId
uploadedAt: new Date
}
Notes.update noteId, $set:
showContent: true
# Get note of all method names on Notes
NOTES_METHODS = _.pluck([
remove
upload
setNote
], 'name')
if Meteor.isServer
# Only allow 5 notes operations per connection per second
DDPRateLimiter.addRule {
name: (name) ->
_.contains NOTES_METHODS, name
# Rate limit per connection ID
connectionId: ->
yes
}, 5, 1000
|
[
{
"context": "y {String} attributes.user.name ユーザー名\n # @property {String} attributes.user.",
"end": 5402,
"score": 0.6213860511779785,
"start": 5398,
"tag": "NAME",
"value": "ユーザー"
}
] | src/video/NicoVideoInfo.coffee | taku-o/node-nicovideo-api | 28 | _ = require "lodash"
__ = require "lodash-deep"
Request = require "request-promise"
cheerio = require "cheerio"
{sprintf} = require("sprintf")
deepFreeze = require "deep-freeze"
Ent = require "ent"
Deferred = require "promise-native-deferred"
APIEndpoints = require "../APIEndpoints"
NicoException = require "../NicoException"
###*
# ニコニコ動画APIの動画情報モデルクラス
#
# Properties
# getメソッドで第1階層まで取得できます。
# Example: NicoVideoInfo.get("user").id
#
#
# @class NicoVideoInfo
# @extends EventEmitter2
###
module.exports =
class NicoVideoInfo
@fetch : (movieId, session) ->
defer = new Deferred
return defer.reject "Fetch failed. Movie id not specified." unless movieId?
# getThumbInfoの結果を取得
APIEndpoints.video.getMovieInfo(session, {movieId})
.then (res) ->
if res.statusCode is 503
defer.reject("Nicovideo has in maintenance.")
info = new NicoVideoInfo(movieId, session)
info._attr = deepFreeze(NicoVideoInfo.parseResponse(res.body, movieId))
defer.resolve(info)
defer.promise
###*
# @private
# @param {String} resBody getThumbInfoAPIから取得したXML
# @return {Object}
###
@parseResponse : (resBody, movieId) ->
$res = cheerio.load resBody
if $res(":root").attr("status") isnt "ok"
errorMessage = $res("error description").text()
throw new NicoException
message : "Failed to fetch movie info (#{errorMessage}) movie:#{movieId}"
code : $res "error code"
$resThumb = $res "thumb"
# 動画の秒単位の長さを出しておく
length = do (length) ->
length = $resThumb.find("length").text().split(":")
s = length.pop() | 0
m = length.pop() | 0
h = length.pop() | 0
return s + (m * 60) + (h * 3600)
{
id : $resThumb.find("video_id").text()
title : Ent.decode($resThumb.find("title").text())
description : $resThumb.find("description").text()
length : length # 秒数
movieType : $resThumb.find("movie_type").text()# "flv"とか
thumbnail : $resThumb.find("thumbnail_url").text()
isDeleted : false
count :
view : $resThumb.find("view_counter").text() | 0
comments : $resThumb.find("comment_num").text() | 0
mylist : $resThumb.find("mylist_counter").text() | 0
tags : do ->
tagList = []
for tags in $resThumb.find("tags")
$tags = cheerio tags
domain = $tags.attr("domain")
for tag in $tags.find("tag")
$tag = cheerio tag
tagList.push {
name : $tag.text()
isCategory : $tag.attr("category") is "1"
isLocked : $tag.attr("lock") is "1"
domain : domain
}
tagList
user :
id : $resThumb.find("user_id").text() | 0
name : $resThumb.find("user_nickname").text()
icon : $resThumb.find("user_icon_url").text()
}
@defaults :
title : null
description : null
length : null # 秒数
movieType : null # "flv", "mp4"
thumbnail : null
isDeleted : false
count :
view : -1
comments : -1
mylist : -1
tags : [] # {name:string, isCategory:boolean, isLocked:boolean}
user :
id : -1
name : null
icon : null # URL
###*
# @property id
# @type String
###
###*
# @property {Object} attributes
# @property {String} attributes.id 動画ID
# @property {String} attributes.title 動画タイトル
# @property {String} attributes.description 動画説明文
# @property {Number} attributes.length 動画の長さ(秒)
# @property {String} attributes.movieType 動画ファイルの形式(mp4, flv, swf)
# @property {String} attributes.thumbnail サムネイル画像のURL
# @property {Boolean} attributes.isDeleted 削除されているか(現在、常にfalse)
# @property {Object} attributes.stats 統計情報
# @property {Number} attributes.stats.view 再生数
# @property {Object} attributes.stats.comments コメント数
# @property {Object} attributes.stats.mylist マイリスト数
# @property {Array<Object>} attributes.tags タグ情報
# @property {String} attributes.tags[n].name タグ名
# @property {Boolean} attributes.tags[n].isCategory カテゴリタグか
# @property {String} attributes.tags[n].isLocked ロックされているか
# @property {String} attributes.tags[n].domain どの国のタグか(日本="jp")
# @property {Object} attributes.user 投稿者情報
# @property {Number} attributes.user.id ユーザーID
# @property {String} attributes.user.name ユーザー名
# @property {String} attributes.user.icon ユーザーアイコンのURL
###
_attr : {}
###*
# @class NicoVideoInfo
# @constructor
# @param {String} movieId 動画ID
# @param {NicoSession} _session セッション
###
constructor : (movieId, @_session) ->
# 指定された動画の動画情報インスタンスがキャッシュされていればそれを返す
# キャッシュに対応する動画情報インスタンスがなければ、新規作成してキャッシュ
# return VideoInfo._cache[movieId] if VideoInfo._cache[movieId]?
# @_attr = _.cloneDeep(NicoVideoInfo.defaults)
Object.defineProperties @,
id :
value : movieId
###*
# 動画が削除されているか調べます。
# @return {Boolean}
###
isDeleted : ->
return @get "isDeleted"
###*
# この動画のgetflv APIの結果を取得します。
# @return {Promise}
###
fetchGetFlv : ->
@_session.video.getFlv @id
###*
# 属性を取得します。
# @param {String} path 属性名(Ex. "id", "title", "user.id")
###
get : (path) ->
return __.deepGet @_attr, path
| 118590 | _ = require "lodash"
__ = require "lodash-deep"
Request = require "request-promise"
cheerio = require "cheerio"
{sprintf} = require("sprintf")
deepFreeze = require "deep-freeze"
Ent = require "ent"
Deferred = require "promise-native-deferred"
APIEndpoints = require "../APIEndpoints"
NicoException = require "../NicoException"
###*
# ニコニコ動画APIの動画情報モデルクラス
#
# Properties
# getメソッドで第1階層まで取得できます。
# Example: NicoVideoInfo.get("user").id
#
#
# @class NicoVideoInfo
# @extends EventEmitter2
###
module.exports =
class NicoVideoInfo
@fetch : (movieId, session) ->
defer = new Deferred
return defer.reject "Fetch failed. Movie id not specified." unless movieId?
# getThumbInfoの結果を取得
APIEndpoints.video.getMovieInfo(session, {movieId})
.then (res) ->
if res.statusCode is 503
defer.reject("Nicovideo has in maintenance.")
info = new NicoVideoInfo(movieId, session)
info._attr = deepFreeze(NicoVideoInfo.parseResponse(res.body, movieId))
defer.resolve(info)
defer.promise
###*
# @private
# @param {String} resBody getThumbInfoAPIから取得したXML
# @return {Object}
###
@parseResponse : (resBody, movieId) ->
$res = cheerio.load resBody
if $res(":root").attr("status") isnt "ok"
errorMessage = $res("error description").text()
throw new NicoException
message : "Failed to fetch movie info (#{errorMessage}) movie:#{movieId}"
code : $res "error code"
$resThumb = $res "thumb"
# 動画の秒単位の長さを出しておく
length = do (length) ->
length = $resThumb.find("length").text().split(":")
s = length.pop() | 0
m = length.pop() | 0
h = length.pop() | 0
return s + (m * 60) + (h * 3600)
{
id : $resThumb.find("video_id").text()
title : Ent.decode($resThumb.find("title").text())
description : $resThumb.find("description").text()
length : length # 秒数
movieType : $resThumb.find("movie_type").text()# "flv"とか
thumbnail : $resThumb.find("thumbnail_url").text()
isDeleted : false
count :
view : $resThumb.find("view_counter").text() | 0
comments : $resThumb.find("comment_num").text() | 0
mylist : $resThumb.find("mylist_counter").text() | 0
tags : do ->
tagList = []
for tags in $resThumb.find("tags")
$tags = cheerio tags
domain = $tags.attr("domain")
for tag in $tags.find("tag")
$tag = cheerio tag
tagList.push {
name : $tag.text()
isCategory : $tag.attr("category") is "1"
isLocked : $tag.attr("lock") is "1"
domain : domain
}
tagList
user :
id : $resThumb.find("user_id").text() | 0
name : $resThumb.find("user_nickname").text()
icon : $resThumb.find("user_icon_url").text()
}
@defaults :
title : null
description : null
length : null # 秒数
movieType : null # "flv", "mp4"
thumbnail : null
isDeleted : false
count :
view : -1
comments : -1
mylist : -1
tags : [] # {name:string, isCategory:boolean, isLocked:boolean}
user :
id : -1
name : null
icon : null # URL
###*
# @property id
# @type String
###
###*
# @property {Object} attributes
# @property {String} attributes.id 動画ID
# @property {String} attributes.title 動画タイトル
# @property {String} attributes.description 動画説明文
# @property {Number} attributes.length 動画の長さ(秒)
# @property {String} attributes.movieType 動画ファイルの形式(mp4, flv, swf)
# @property {String} attributes.thumbnail サムネイル画像のURL
# @property {Boolean} attributes.isDeleted 削除されているか(現在、常にfalse)
# @property {Object} attributes.stats 統計情報
# @property {Number} attributes.stats.view 再生数
# @property {Object} attributes.stats.comments コメント数
# @property {Object} attributes.stats.mylist マイリスト数
# @property {Array<Object>} attributes.tags タグ情報
# @property {String} attributes.tags[n].name タグ名
# @property {Boolean} attributes.tags[n].isCategory カテゴリタグか
# @property {String} attributes.tags[n].isLocked ロックされているか
# @property {String} attributes.tags[n].domain どの国のタグか(日本="jp")
# @property {Object} attributes.user 投稿者情報
# @property {Number} attributes.user.id ユーザーID
# @property {String} attributes.user.name <NAME>名
# @property {String} attributes.user.icon ユーザーアイコンのURL
###
_attr : {}
###*
# @class NicoVideoInfo
# @constructor
# @param {String} movieId 動画ID
# @param {NicoSession} _session セッション
###
constructor : (movieId, @_session) ->
# 指定された動画の動画情報インスタンスがキャッシュされていればそれを返す
# キャッシュに対応する動画情報インスタンスがなければ、新規作成してキャッシュ
# return VideoInfo._cache[movieId] if VideoInfo._cache[movieId]?
# @_attr = _.cloneDeep(NicoVideoInfo.defaults)
Object.defineProperties @,
id :
value : movieId
###*
# 動画が削除されているか調べます。
# @return {Boolean}
###
isDeleted : ->
return @get "isDeleted"
###*
# この動画のgetflv APIの結果を取得します。
# @return {Promise}
###
fetchGetFlv : ->
@_session.video.getFlv @id
###*
# 属性を取得します。
# @param {String} path 属性名(Ex. "id", "title", "user.id")
###
get : (path) ->
return __.deepGet @_attr, path
| true | _ = require "lodash"
__ = require "lodash-deep"
Request = require "request-promise"
cheerio = require "cheerio"
{sprintf} = require("sprintf")
deepFreeze = require "deep-freeze"
Ent = require "ent"
Deferred = require "promise-native-deferred"
APIEndpoints = require "../APIEndpoints"
NicoException = require "../NicoException"
###*
# ニコニコ動画APIの動画情報モデルクラス
#
# Properties
# getメソッドで第1階層まで取得できます。
# Example: NicoVideoInfo.get("user").id
#
#
# @class NicoVideoInfo
# @extends EventEmitter2
###
module.exports =
class NicoVideoInfo
@fetch : (movieId, session) ->
defer = new Deferred
return defer.reject "Fetch failed. Movie id not specified." unless movieId?
# getThumbInfoの結果を取得
APIEndpoints.video.getMovieInfo(session, {movieId})
.then (res) ->
if res.statusCode is 503
defer.reject("Nicovideo has in maintenance.")
info = new NicoVideoInfo(movieId, session)
info._attr = deepFreeze(NicoVideoInfo.parseResponse(res.body, movieId))
defer.resolve(info)
defer.promise
###*
# @private
# @param {String} resBody getThumbInfoAPIから取得したXML
# @return {Object}
###
@parseResponse : (resBody, movieId) ->
$res = cheerio.load resBody
if $res(":root").attr("status") isnt "ok"
errorMessage = $res("error description").text()
throw new NicoException
message : "Failed to fetch movie info (#{errorMessage}) movie:#{movieId}"
code : $res "error code"
$resThumb = $res "thumb"
# 動画の秒単位の長さを出しておく
length = do (length) ->
length = $resThumb.find("length").text().split(":")
s = length.pop() | 0
m = length.pop() | 0
h = length.pop() | 0
return s + (m * 60) + (h * 3600)
{
id : $resThumb.find("video_id").text()
title : Ent.decode($resThumb.find("title").text())
description : $resThumb.find("description").text()
length : length # 秒数
movieType : $resThumb.find("movie_type").text()# "flv"とか
thumbnail : $resThumb.find("thumbnail_url").text()
isDeleted : false
count :
view : $resThumb.find("view_counter").text() | 0
comments : $resThumb.find("comment_num").text() | 0
mylist : $resThumb.find("mylist_counter").text() | 0
tags : do ->
tagList = []
for tags in $resThumb.find("tags")
$tags = cheerio tags
domain = $tags.attr("domain")
for tag in $tags.find("tag")
$tag = cheerio tag
tagList.push {
name : $tag.text()
isCategory : $tag.attr("category") is "1"
isLocked : $tag.attr("lock") is "1"
domain : domain
}
tagList
user :
id : $resThumb.find("user_id").text() | 0
name : $resThumb.find("user_nickname").text()
icon : $resThumb.find("user_icon_url").text()
}
@defaults :
title : null
description : null
length : null # 秒数
movieType : null # "flv", "mp4"
thumbnail : null
isDeleted : false
count :
view : -1
comments : -1
mylist : -1
tags : [] # {name:string, isCategory:boolean, isLocked:boolean}
user :
id : -1
name : null
icon : null # URL
###*
# @property id
# @type String
###
###*
# @property {Object} attributes
# @property {String} attributes.id 動画ID
# @property {String} attributes.title 動画タイトル
# @property {String} attributes.description 動画説明文
# @property {Number} attributes.length 動画の長さ(秒)
# @property {String} attributes.movieType 動画ファイルの形式(mp4, flv, swf)
# @property {String} attributes.thumbnail サムネイル画像のURL
# @property {Boolean} attributes.isDeleted 削除されているか(現在、常にfalse)
# @property {Object} attributes.stats 統計情報
# @property {Number} attributes.stats.view 再生数
# @property {Object} attributes.stats.comments コメント数
# @property {Object} attributes.stats.mylist マイリスト数
# @property {Array<Object>} attributes.tags タグ情報
# @property {String} attributes.tags[n].name タグ名
# @property {Boolean} attributes.tags[n].isCategory カテゴリタグか
# @property {String} attributes.tags[n].isLocked ロックされているか
# @property {String} attributes.tags[n].domain どの国のタグか(日本="jp")
# @property {Object} attributes.user 投稿者情報
# @property {Number} attributes.user.id ユーザーID
# @property {String} attributes.user.name PI:NAME:<NAME>END_PI名
# @property {String} attributes.user.icon ユーザーアイコンのURL
###
_attr : {}
###*
# @class NicoVideoInfo
# @constructor
# @param {String} movieId 動画ID
# @param {NicoSession} _session セッション
###
constructor : (movieId, @_session) ->
# 指定された動画の動画情報インスタンスがキャッシュされていればそれを返す
# キャッシュに対応する動画情報インスタンスがなければ、新規作成してキャッシュ
# return VideoInfo._cache[movieId] if VideoInfo._cache[movieId]?
# @_attr = _.cloneDeep(NicoVideoInfo.defaults)
Object.defineProperties @,
id :
value : movieId
###*
# 動画が削除されているか調べます。
# @return {Boolean}
###
isDeleted : ->
return @get "isDeleted"
###*
# この動画のgetflv APIの結果を取得します。
# @return {Promise}
###
fetchGetFlv : ->
@_session.video.getFlv @id
###*
# 属性を取得します。
# @param {String} path 属性名(Ex. "id", "title", "user.id")
###
get : (path) ->
return __.deepGet @_attr, path
|
[
{
"context": " isInternal: true\n\n\n credentialsUser:\n id: \"13a88c31413019245de27da7\"\n username: 'Martin Wawrusch'\n _tenantId: '",
"end": 537,
"score": 0.6131163239479065,
"start": 513,
"tag": "KEY",
"value": "13a88c31413019245de27da7"
},
{
"context": " id: \"13a88c31413019245de27da7\"\n username: 'Martin Wawrusch'\n _tenantId: '13a88c31413019245de27da0'\n ro",
"end": 569,
"score": 0.991215705871582,
"start": 554,
"tag": "NAME",
"value": "Martin Wawrusch"
},
{
"context": " roles: []\n\n credentialsServerAdmin:\n id: \"13a88c31413019245de27da0\"\n username: 'John Smith'\n _tenantId: '13a88",
"end": 687,
"score": 0.6730706691741943,
"start": 663,
"tag": "KEY",
"value": "13a88c31413019245de27da0"
},
{
"context": " id: \"13a88c31413019245de27da0\"\n username: 'John Smith'\n _tenantId: '13a88c31413019245de27da0'\n ro",
"end": 714,
"score": 0.6592682600021362,
"start": 704,
"tag": "USERNAME",
"value": "John Smith"
}
] | test/support/fixtures.coffee | codedoctor/hapi-routes-roles | 1 |
module.exports =
clientId: '01234567890123456789000a'
_tenantId: '01234567890123456789000b'
invalidRoleId:'0123456789012345678900aa'
invalidRole:
description: "This is a role 1"
isInternal: false
role1:
name: "role1"
description: "This is a role 1"
isInternal: false
role2:
name: "role2"
description: "This is a role 2"
isInternal: false
roleInternal1:
name: "role3"
description: "This is a role 3"
isInternal: true
credentialsUser:
id: "13a88c31413019245de27da7"
username: 'Martin Wawrusch'
_tenantId: '13a88c31413019245de27da0'
roles: []
credentialsServerAdmin:
id: "13a88c31413019245de27da0"
username: 'John Smith'
_tenantId: '13a88c31413019245de27da0'
roles: []
scopes: ['server-admin']
| 6351 |
module.exports =
clientId: '01234567890123456789000a'
_tenantId: '01234567890123456789000b'
invalidRoleId:'0123456789012345678900aa'
invalidRole:
description: "This is a role 1"
isInternal: false
role1:
name: "role1"
description: "This is a role 1"
isInternal: false
role2:
name: "role2"
description: "This is a role 2"
isInternal: false
roleInternal1:
name: "role3"
description: "This is a role 3"
isInternal: true
credentialsUser:
id: "<KEY>"
username: '<NAME>'
_tenantId: '13a88c31413019245de27da0'
roles: []
credentialsServerAdmin:
id: "<KEY>"
username: 'John Smith'
_tenantId: '13a88c31413019245de27da0'
roles: []
scopes: ['server-admin']
| true |
module.exports =
clientId: '01234567890123456789000a'
_tenantId: '01234567890123456789000b'
invalidRoleId:'0123456789012345678900aa'
invalidRole:
description: "This is a role 1"
isInternal: false
role1:
name: "role1"
description: "This is a role 1"
isInternal: false
role2:
name: "role2"
description: "This is a role 2"
isInternal: false
roleInternal1:
name: "role3"
description: "This is a role 3"
isInternal: true
credentialsUser:
id: "PI:KEY:<KEY>END_PI"
username: 'PI:NAME:<NAME>END_PI'
_tenantId: '13a88c31413019245de27da0'
roles: []
credentialsServerAdmin:
id: "PI:KEY:<KEY>END_PI"
username: 'John Smith'
_tenantId: '13a88c31413019245de27da0'
roles: []
scopes: ['server-admin']
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.797730565071106,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"",
"end": 511,
"score": 0.6917129755020142,
"start": 508,
"tag": "NAME",
"value": "Hos"
},
{
"context": "ccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"",
"end": 518,
"score": 0.8703250885009766,
"start": 514,
"tag": "NAME",
"value": "Joel"
},
{
"context": "ong\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\"",
"end": 522,
"score": 0.6062000393867493,
"start": 521,
"tag": "NAME",
"value": "A"
},
{
"context": "sa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",",
"end": 530,
"score": 0.6056967973709106,
"start": 528,
"tag": "NAME",
"value": "Ob"
},
{
"context": "r\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Mat",
"end": 540,
"score": 0.8759093284606934,
"start": 535,
"tag": "NAME",
"value": "Jonah"
},
{
"context": ",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Ma",
"end": 546,
"score": 0.8404310345649719,
"start": 543,
"tag": "NAME",
"value": "Mic"
},
{
"context": "\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"L",
"end": 552,
"score": 0.5866717100143433,
"start": 549,
"tag": "NAME",
"value": "Nah"
},
{
"context": "d\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",",
"end": 584,
"score": 0.6285778880119324,
"start": 581,
"tag": "NAME",
"value": "Mal"
},
{
"context": "nah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",",
"end": 591,
"score": 0.9348254203796387,
"start": 587,
"tag": "NAME",
"value": "Matt"
},
{
"context": "ic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"",
"end": 598,
"score": 0.9548324942588806,
"start": 594,
"tag": "NAME",
"value": "Mark"
},
{
"context": "h\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"Eph\",\"P",
"end": 605,
"score": 0.8765692710876465,
"start": 601,
"tag": "NAME",
"value": "Luke"
},
{
"context": "\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"Eph\",\"Phil\",\"C",
"end": 612,
"score": 0.7254642248153687,
"start": 608,
"tag": "NAME",
"value": "John"
},
{
"context": "ndle book: 2Sam (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"2. Samuelova 1:1\").osis()).toEqual(\"2Sam.1.1\")\n\t\texpect(p.pars",
"end": 13500,
"score": 0.817625880241394,
"start": 13491,
"tag": "NAME",
"value": "Samuelova"
},
{
"context": "\n\t\tp.include_apocrypha(false)\n\t\texpect(p.parse(\"2. SAMUELOVA 1:1\").osis()).toEqual(\"2Sam.1.1\")\n\t\texpect(p.pars",
"end": 13773,
"score": 0.8277532458305359,
"start": 13764,
"tag": "NAME",
"value": "SAMUELOVA"
},
{
"context": "handle book: 1Sam (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"1. Samuelova 1:1\").osis()).toEqual(\"1Sam.1.1\")\n\t\texpect(p.pars",
"end": 14331,
"score": 0.7540531158447266,
"start": 14319,
"tag": "NAME",
"value": "1. Samuelova"
},
{
"context": "sis()).toEqual(\"1Sam.1.1\")\n\t\texpect(p.parse(\"1 Samuelova 1:1\").osis()).toEqual(\"1Sam.1.1\")\n\t\texpect(p.pars",
"end": 14395,
"score": 0.651374101638794,
"start": 14389,
"tag": "NAME",
"value": "uelova"
},
{
"context": ".include_apocrypha(false)\n\t\texpect(p.parse(\"1. SAMUELOVA 1:1\").osis()).toEqual(\"1Sam.1.1\")\n\t\texpect(p.pars",
"end": 14604,
"score": 0.6361603736877441,
"start": 14598,
"tag": "NAME",
"value": "UELOVA"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Dan (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Daniel 1:1\").osis",
"end": 26238,
"score": 0.9929656386375427,
"start": 26235,
"tag": "NAME",
"value": "Dan"
},
{
"context": " handle book: Dan (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Daniel 1:1\").osis()).toEqual(\"Dan.1.1\")\n\t\texpect(p.parse",
"end": 26277,
"score": 0.9491336941719055,
"start": 26271,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Hos (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Ozej 1:1\").osis",
"end": 26803,
"score": 0.7538617849349976,
"start": 26802,
"tag": "NAME",
"value": "H"
},
{
"context": "ual(\"Hos.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Joel (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_",
"end": 27217,
"score": 0.8116936683654785,
"start": 27215,
"tag": "NAME",
"value": "Jo"
},
{
"context": "al(\"Obad.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Jonah (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 29041,
"score": 0.838577389717102,
"start": 29036,
"tag": "NAME",
"value": "Jonah"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Jonah (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Jonah 1:1\").osis(",
"end": 29301,
"score": 0.9030704498291016,
"start": 29296,
"tag": "NAME",
"value": "Jonah"
},
{
"context": "andle book: Jonah (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Jonah 1:1\").osis()).toEqual(\"Jonah.1.1\")\n\t\texpect(p.par",
"end": 29339,
"score": 0.7643733024597168,
"start": 29334,
"tag": "NAME",
"value": "Jonah"
},
{
"context": "l(\"Jonah.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Mic (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_",
"end": 29730,
"score": 0.8952515125274658,
"start": 29729,
"tag": "NAME",
"value": "M"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Mic (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Mihej 1:1\").osi",
"end": 29988,
"score": 0.7594144940376282,
"start": 29987,
"tag": "NAME",
"value": "M"
},
{
"context": "ual(\"Mal.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Matt (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv",
"end": 34163,
"score": 0.9272924661636353,
"start": 34162,
"tag": "NAME",
"value": "M"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Matt (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Matej 1:1\").os",
"end": 34422,
"score": 0.9386357069015503,
"start": 34421,
"tag": "NAME",
"value": "M"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Mark (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Marko 1:1\").osis(",
"end": 35108,
"score": 0.9556306004524231,
"start": 35104,
"tag": "NAME",
"value": "Mark"
},
{
"context": "al(\"Mark.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Luke (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 35532,
"score": 0.7005246877670288,
"start": 35528,
"tag": "NAME",
"value": "Luke"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Luke (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Luka 1:1\").osis()",
"end": 35791,
"score": 0.7498918771743774,
"start": 35787,
"tag": "NAME",
"value": "Luke"
},
{
"context": "p.include_apocrypha true\n\tit \"should handle book: 1John (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"1. Janezovo pismo",
"end": 36474,
"score": 0.667921781539917,
"start": 36469,
"tag": "NAME",
"value": "1John"
},
{
"context": "\").osis()).toEqual(\"1John.1.1\")\n\t\texpect(p.parse(\"1JOHN 1:1\").osis()).toEqual(\"1John.1.1\")\n\t\texpect(p.par",
"end": 36940,
"score": 0.7487976551055908,
"start": 36935,
"tag": "NAME",
"value": "1JOHN"
},
{
"context": "(\"1John.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book 2John (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 37075,
"score": 0.6609529852867126,
"start": 37070,
"tag": "NAME",
"value": "2John"
},
{
"context": "p.include_apocrypha true\n\tit \"should handle book: 2John (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"2. Janezovo pismo",
"end": 37335,
"score": 0.7017824649810791,
"start": 37330,
"tag": "NAME",
"value": "2John"
},
{
"context": "l(\"3John.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book John (sl)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 38796,
"score": 0.9558257460594177,
"start": 38792,
"tag": "NAME",
"value": "John"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: John (sl)\", ->\n\t\t`\n\t\texpect(p.parse(\"Janez 1:1\").osis(",
"end": 39055,
"score": 0.994149923324585,
"start": 39051,
"tag": "NAME",
"value": "John"
},
{
"context": "1\").osis()).toEqual(\"John.1.1\")\n\t\texpect(p.parse(\"John 1:1\").osis()).toEqual(\"John.1.1\")\n\t\texpect(p.pars",
"end": 39150,
"score": 0.9159250259399414,
"start": 39146,
"tag": "NAME",
"value": "John"
},
{
"context": "1\").osis()).toEqual(\"John.1.1\")\n\t\texpect(p.parse(\"JOHN 1:1\").osis()).toEqual(\"John.1.1\")\n\t\texpect(p.pars",
"end": 39349,
"score": 0.9726632833480835,
"start": 39345,
"tag": "NAME",
"value": "JOHN"
},
{
"context": ".include_apocrypha(false)\n\t\texpect(p.parse(\"1. TIMOTEJU 1:1\").osis()).toEqual(\"1Tim.1.1\")\n\t\texpect(p.pa",
"end": 49762,
"score": 0.5503741502761841,
"start": 49759,
"tag": "NAME",
"value": "OTE"
}
] | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/sl/spec.coffee | saiba-mais/bible-lessons | 0 | bcv_parser = require("../../js/sl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sl)", ->
`
expect(p.parse("1. Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mz 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("1. MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MZ 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sl)", ->
`
expect(p.parse("2. Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mz 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("2. MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MZ 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sl)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sl)", ->
`
expect(p.parse("3. Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mz 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("3. MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MZ 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sl)", ->
`
expect(p.parse("4. Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mz 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("4. MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MZ 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sl)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sl)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sl)", ->
`
expect(p.parse("Zalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Zal 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žal 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ZAL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽAL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sl)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sl)", ->
`
expect(p.parse("Razodetje 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Raz 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("RAZODETJE 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("RAZ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sl)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sl)", ->
`
expect(p.parse("5. Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mz 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("5. MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MZ 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sl)", ->
`
expect(p.parse("Jozue 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUE 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sl)", ->
`
expect(p.parse("Sodniki 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Sod 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("SODNIKI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("SOD 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sl)", ->
`
expect(p.parse("Ruta 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sl)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sl)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sl)", ->
`
expect(p.parse("Izaija 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Iz 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAIJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("IZ 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sl)", ->
`
expect(p.parse("2. Samuelova 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelova 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2. SAMUELOVA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELOVA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sl)", ->
`
expect(p.parse("1. Samuelova 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelova 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1. SAMUELOVA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELOVA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sl)", ->
`
expect(p.parse("2. Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sl)", ->
`
expect(p.parse("1. Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sl)", ->
`
expect(p.parse("2. Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sl)", ->
`
expect(p.parse("1. Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sl)", ->
`
expect(p.parse("Nehemija 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sl)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sl)", ->
`
expect(p.parse("Estera 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sl)", ->
`
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sl)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sl)", ->
`
expect(p.parse("Pregovori 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prg 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("PREGOVORI 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PRG 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sl)", ->
`
expect(p.parse("Pridigar 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prd 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PRIDIGAR 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRD 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sl)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sl)", ->
`
expect(p.parse("Visoka pesem 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Vp 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("VISOKA PESEM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("VP 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sl)", ->
`
expect(p.parse("Jeremija 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sl)", ->
`
expect(p.parse("Ezekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezk 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (sl)", ->
`
expect(p.parse("Daniel 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (sl)", ->
`
expect(p.parse("Ozej 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Oz 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OZEJ 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OZ 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sl)", ->
`
expect(p.parse("Abdija 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (sl)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (sl)", ->
`
expect(p.parse("Mihej 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mih 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIHEJ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIH 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sl)", ->
`
expect(p.parse("Sofonija 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sl)", ->
`
expect(p.parse("Agej 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGEJ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sl)", ->
`
expect(p.parse("Zaharija 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zah 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAHARIJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sl)", ->
`
expect(p.parse("Malahija 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAHIJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (sl)", ->
`
expect(p.parse("Matej 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATEJ 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (sl)", ->
`
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (sl)", ->
`
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (sl)", ->
`
expect(p.parse("1. Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Jn 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (sl)", ->
`
expect(p.parse("2. Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Jn 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2. JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sl)", ->
`
expect(p.parse("3. Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Jn 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3. JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (sl)", ->
`
expect(p.parse("Janez 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Jn 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JANEZ 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sl)", ->
`
expect(p.parse("Apostolska dela 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLSKA DELA 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APD 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sl)", ->
`
expect(p.parse("Rimljanom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rim 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("RIMLJANOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RIM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sl)", ->
`
expect(p.parse("2. Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sl)", ->
`
expect(p.parse("1. Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sl)", ->
`
expect(p.parse("Galacanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galačanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALACANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALAČANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sl)", ->
`
expect(p.parse("Efezanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efežanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEŽANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (sl)", ->
`
expect(p.parse("Filipljanom 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Flp 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPLJANOM 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FLP 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sl)", ->
`
expect(p.parse("Kolosanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kološanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOŠANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sl)", ->
`
expect(p.parse("2. Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sl)", ->
`
expect(p.parse("1. Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sl)", ->
`
expect(p.parse("2. Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sl)", ->
`
expect(p.parse("1. Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIMOTEJU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEJU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sl)", ->
`
expect(p.parse("Filemonu 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Flm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONU 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sl)", ->
`
expect(p.parse("Hebrejcem 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREJCEM 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sl)", ->
`
expect(p.parse("Jakob 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOB 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sl)", ->
`
expect(p.parse("2. Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pt 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2. PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sl)", ->
`
expect(p.parse("1. Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pt 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sl)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sl)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sl)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sl)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sl)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sl)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sl)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sl)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sl)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sl"]
it "should handle ranges (sl)", ->
expect(p.parse("Titus 1:1 do 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1do2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 DO 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sl)", ->
expect(p.parse("Titus 1:1, poglavje 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 POGLAVJE 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sl)", ->
expect(p.parse("Exod 1:1 stavek 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm STAVEK 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sl)", ->
expect(p.parse("Exod 1:1 in 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 IN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sl)", ->
expect(p.parse("Ps 3 Naslov, 4:2, 5:Naslov").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 NASLOV, 4:2, 5:NASLOV").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sl)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sl)", ->
expect(p.parse("Lev 1 (chr)").osis_and_translations()).toEqual [["Lev.1", "chr"]]
expect(p.parse("lev 1 chr").osis_and_translations()).toEqual [["Lev.1", "chr"]]
it "should handle book ranges (sl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 do 3 Janezovo pismo").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 80422 | bcv_parser = require("../../js/sl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sl)", ->
`
expect(p.parse("1. Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mz 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("1. MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MZ 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sl)", ->
`
expect(p.parse("2. Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mz 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("2. MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MZ 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sl)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sl)", ->
`
expect(p.parse("3. Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mz 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("3. MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MZ 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sl)", ->
`
expect(p.parse("4. Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mz 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("4. MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MZ 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sl)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sl)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sl)", ->
`
expect(p.parse("Zalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Zal 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žal 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ZAL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽAL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sl)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sl)", ->
`
expect(p.parse("Razodetje 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Raz 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("RAZODETJE 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("RAZ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sl)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sl)", ->
`
expect(p.parse("5. Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mz 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("5. MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MZ 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sl)", ->
`
expect(p.parse("Jozue 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUE 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sl)", ->
`
expect(p.parse("Sodniki 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Sod 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("SODNIKI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("SOD 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sl)", ->
`
expect(p.parse("Ruta 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sl)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sl)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sl)", ->
`
expect(p.parse("Izaija 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Iz 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAIJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("IZ 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sl)", ->
`
expect(p.parse("2. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelova 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELOVA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1. SAM<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELOVA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sl)", ->
`
expect(p.parse("2. Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sl)", ->
`
expect(p.parse("1. Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sl)", ->
`
expect(p.parse("2. Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sl)", ->
`
expect(p.parse("1. Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sl)", ->
`
expect(p.parse("Nehemija 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sl)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sl)", ->
`
expect(p.parse("Estera 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sl)", ->
`
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sl)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sl)", ->
`
expect(p.parse("Pregovori 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prg 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("PREGOVORI 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PRG 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sl)", ->
`
expect(p.parse("Pridigar 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prd 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PRIDIGAR 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRD 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sl)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sl)", ->
`
expect(p.parse("Visoka pesem 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Vp 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("VISOKA PESEM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("VP 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sl)", ->
`
expect(p.parse("Jeremija 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sl)", ->
`
expect(p.parse("Ezekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezk 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>os (sl)", ->
`
expect(p.parse("Ozej 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Oz 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OZEJ 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OZ 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book <NAME>el (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sl)", ->
`
expect(p.parse("Abdija 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME> (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book <NAME>ic (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ic (sl)", ->
`
expect(p.parse("Mihej 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mih 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIHEJ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIH 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sl)", ->
`
expect(p.parse("Sofonija 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sl)", ->
`
expect(p.parse("Agej 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGEJ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sl)", ->
`
expect(p.parse("Zaharija 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zah 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAHARIJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sl)", ->
`
expect(p.parse("Malahija 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAHIJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book <NAME>att (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>att (sl)", ->
`
expect(p.parse("Matej 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATEJ 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("1. Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Jn 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book <NAME> (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("2. Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Jn 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2. JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sl)", ->
`
expect(p.parse("3. Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Jn 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3. JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (sl)", ->
`
expect(p.parse("Janez 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Jn 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JANEZ 1:1").osis()).toEqual("John.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sl)", ->
`
expect(p.parse("Apostolska dela 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLSKA DELA 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APD 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sl)", ->
`
expect(p.parse("Rimljanom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rim 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("RIMLJANOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RIM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sl)", ->
`
expect(p.parse("2. Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sl)", ->
`
expect(p.parse("1. Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sl)", ->
`
expect(p.parse("Galacanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galačanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALACANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALAČANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sl)", ->
`
expect(p.parse("Efezanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efežanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEŽANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (sl)", ->
`
expect(p.parse("Filipljanom 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Flp 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPLJANOM 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FLP 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sl)", ->
`
expect(p.parse("Kolosanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kološanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOŠANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sl)", ->
`
expect(p.parse("2. Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sl)", ->
`
expect(p.parse("1. Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sl)", ->
`
expect(p.parse("2. Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sl)", ->
`
expect(p.parse("1. Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIM<NAME>JU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEJU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sl)", ->
`
expect(p.parse("Filemonu 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Flm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONU 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sl)", ->
`
expect(p.parse("Hebrejcem 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREJCEM 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sl)", ->
`
expect(p.parse("Jakob 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOB 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sl)", ->
`
expect(p.parse("2. Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pt 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2. PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sl)", ->
`
expect(p.parse("1. Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pt 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sl)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sl)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sl)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sl)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sl)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sl)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sl)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sl)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sl)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sl"]
it "should handle ranges (sl)", ->
expect(p.parse("Titus 1:1 do 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1do2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 DO 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sl)", ->
expect(p.parse("Titus 1:1, poglavje 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 POGLAVJE 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sl)", ->
expect(p.parse("Exod 1:1 stavek 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm STAVEK 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sl)", ->
expect(p.parse("Exod 1:1 in 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 IN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sl)", ->
expect(p.parse("Ps 3 Naslov, 4:2, 5:Naslov").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 NASLOV, 4:2, 5:NASLOV").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sl)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sl)", ->
expect(p.parse("Lev 1 (chr)").osis_and_translations()).toEqual [["Lev.1", "chr"]]
expect(p.parse("lev 1 chr").osis_and_translations()).toEqual [["Lev.1", "chr"]]
it "should handle book ranges (sl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 do 3 Janezovo pismo").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/sl_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (sl)", ->
`
expect(p.parse("1. Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mojzesova 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mz 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("1. MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOJZESOVA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MZ 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (sl)", ->
`
expect(p.parse("2. Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mojzesova 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mz 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("2. MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOJZESOVA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MZ 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (sl)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (sl)", ->
`
expect(p.parse("3. Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mojzesova 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mz 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("3. MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOJZESOVA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MZ 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (sl)", ->
`
expect(p.parse("4. Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mojzesova 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mz 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("4. MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOJZESOVA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MZ 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (sl)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (sl)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (sl)", ->
`
expect(p.parse("Zalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žalostinke 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Zal 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Žal 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ZALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽALOSTINKE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ZAL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("ŽAL 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (sl)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (sl)", ->
`
expect(p.parse("Razodetje 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Raz 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("RAZODETJE 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("RAZ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (sl)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (sl)", ->
`
expect(p.parse("5. Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mojzesova 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mz 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("5. MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOJZESOVA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MZ 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (sl)", ->
`
expect(p.parse("Jozue 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joz 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOZUE 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOZ 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (sl)", ->
`
expect(p.parse("Sodniki 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Sod 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("SODNIKI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("SOD 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (sl)", ->
`
expect(p.parse("Ruta 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (sl)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (sl)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (sl)", ->
`
expect(p.parse("Izaija 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Iz 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAIJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("IZ 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (sl)", ->
`
expect(p.parse("2. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelova 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELOVA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (sl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1. SAMPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELOVA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (sl)", ->
`
expect(p.parse("2. Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kraljev 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kr 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KRALJEV 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KR 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (sl)", ->
`
expect(p.parse("1. Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kraljev 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kr 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KRALJEV 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KR 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (sl)", ->
`
expect(p.parse("2. Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniska 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kroniška 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONISKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRONIŠKA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (sl)", ->
`
expect(p.parse("1. Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniska 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kroniška 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Krn 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONISKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRONIŠKA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRN 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (sl)", ->
`
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (sl)", ->
`
expect(p.parse("Nehemija 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (sl)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (sl)", ->
`
expect(p.parse("Estera 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (sl)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (sl)", ->
`
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (sl)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (sl)", ->
`
expect(p.parse("Pregovori 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prg 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("PREGOVORI 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PRG 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (sl)", ->
`
expect(p.parse("Pridigar 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Prd 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PRIDIGAR 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRD 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (sl)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (sl)", ->
`
expect(p.parse("Visoka pesem 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Vp 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("VISOKA PESEM 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("VP 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (sl)", ->
`
expect(p.parse("Jeremija 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (sl)", ->
`
expect(p.parse("Ezekiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezk 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("EZEKIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIos (sl)", ->
`
expect(p.parse("Ozej 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Oz 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("OZEJ 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("OZ 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIel (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (sl)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jl 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (sl)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (sl)", ->
`
expect(p.parse("Abdija 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Abd 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ABDIJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ABD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIic (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIic (sl)", ->
`
expect(p.parse("Mihej 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mih 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIHEJ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIH 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (sl)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (sl)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (sl)", ->
`
expect(p.parse("Sofonija 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sof 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SOFONIJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SOF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (sl)", ->
`
expect(p.parse("Agej 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Ag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("AGEJ 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("AG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (sl)", ->
`
expect(p.parse("Zaharija 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zah 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("ZAHARIJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (sl)", ->
`
expect(p.parse("Malahija 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAHIJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIatt (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIatt (sl)", ->
`
expect(p.parse("Matej 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATEJ 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mr 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MR 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Lk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("1. Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Janezovo pismo 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Jn 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JANEZOVO PISMO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("2. Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Janezovo pismo 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Jn 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2. JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JANEZOVO PISMO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (sl)", ->
`
expect(p.parse("3. Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Janezovo pismo 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Jn 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3. JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JANEZOVO PISMO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (sl)", ->
`
expect(p.parse("Janez 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Jn 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JANEZ 1:1").osis()).toEqual("John.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (sl)", ->
`
expect(p.parse("Apostolska dela 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apd 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLSKA DELA 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APD 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (sl)", ->
`
expect(p.parse("Rimljanom 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rim 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("RIMLJANOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("RIM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (sl)", ->
`
expect(p.parse("2. Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korincanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinčanom 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2. KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINCANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINČANOM 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (sl)", ->
`
expect(p.parse("1. Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korincanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinčanom 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINCANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINČANOM 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (sl)", ->
`
expect(p.parse("Galacanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galačanom 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALACANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALAČANOM 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (sl)", ->
`
expect(p.parse("Efezanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efežanom 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFEZANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFEŽANOM 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (sl)", ->
`
expect(p.parse("Filipljanom 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Flp 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPLJANOM 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FLP 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (sl)", ->
`
expect(p.parse("Kolosanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kološanom 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOŠANOM 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (sl)", ->
`
expect(p.parse("2. Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesalonicanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tesaloničanom 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tes 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONICANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESALONIČANOM 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TES 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (sl)", ->
`
expect(p.parse("1. Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesalonicanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tesaloničanom 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tes 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONICANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESALONIČANOM 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TES 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (sl)", ->
`
expect(p.parse("2. Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteju 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2. TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEJU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (sl)", ->
`
expect(p.parse("1. Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteju 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIMPI:NAME:<NAME>END_PIJU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEJU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (sl)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (sl)", ->
`
expect(p.parse("Filemonu 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Flm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMONU 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (sl)", ->
`
expect(p.parse("Hebrejcem 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREJCEM 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (sl)", ->
`
expect(p.parse("Jakob 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOB 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (sl)", ->
`
expect(p.parse("2. Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrovo pismo 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pt 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2. PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETROVO PISMO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PT 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (sl)", ->
`
expect(p.parse("1. Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrovo pismo 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pt 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETROVO PISMO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PT 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (sl)", ->
`
expect(p.parse("Juda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (sl)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (sl)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (sl)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (sl)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (sl)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (sl)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (sl)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (sl)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (sl)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["sl"]
it "should handle ranges (sl)", ->
expect(p.parse("Titus 1:1 do 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1do2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 DO 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (sl)", ->
expect(p.parse("Titus 1:1, poglavje 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 POGLAVJE 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (sl)", ->
expect(p.parse("Exod 1:1 stavek 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm STAVEK 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (sl)", ->
expect(p.parse("Exod 1:1 in 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 IN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (sl)", ->
expect(p.parse("Ps 3 Naslov, 4:2, 5:Naslov").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 NASLOV, 4:2, 5:NASLOV").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (sl)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (sl)", ->
expect(p.parse("Lev 1 (chr)").osis_and_translations()).toEqual [["Lev.1", "chr"]]
expect(p.parse("lev 1 chr").osis_and_translations()).toEqual [["Lev.1", "chr"]]
it "should handle book ranges (sl)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 do 3 Janezovo pismo").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (sl)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "ne) ->\n customerParams =\n firstName: 'Adam'\n lastName: 'Jones'\n creditCard:\n ",
"end": 1458,
"score": 0.9998409748077393,
"start": 1454,
"tag": "NAME",
"value": "Adam"
},
{
"context": "ms =\n firstName: 'Adam'\n lastName: 'Jones'\n creditCard:\n cardholderName: 'A",
"end": 1484,
"score": 0.9996095299720764,
"start": 1479,
"tag": "NAME",
"value": "Jones"
},
{
"context": "s'\n creditCard:\n cardholderName: 'Adam Jones'\n number: '5105105105105100'\n e",
"end": 1543,
"score": 0.9997946619987488,
"start": 1533,
"tag": "NAME",
"value": "Adam Jones"
},
{
"context": "t.equal(response.transaction.customer.firstName, 'Adam')\n assert.equal(response.transaction.cus",
"end": 2089,
"score": 0.9998694658279419,
"start": 2085,
"tag": "NAME",
"value": "Adam"
},
{
"context": "rt.equal(response.transaction.customer.lastName, 'Jones')\n assert.equal(response.transaction.cre",
"end": 2161,
"score": 0.9998033046722412,
"start": 2156,
"tag": "NAME",
"value": "Jones"
},
{
"context": "(response.transaction.creditCard.cardholderName, 'Adam Jones')\n assert.equal(response.transaction.cre",
"end": 2246,
"score": 0.9998190999031067,
"start": 2236,
"tag": "NAME",
"value": "Adam Jones"
},
{
"context": "ne) ->\n customerParams =\n firstName: 'Adam'\n lastName: 'Jones'\n creditCard:\n ",
"end": 2543,
"score": 0.9998699426651001,
"start": 2539,
"tag": "NAME",
"value": "Adam"
},
{
"context": "ms =\n firstName: 'Adam'\n lastName: 'Jones'\n creditCard:\n cardholderName: 'A",
"end": 2569,
"score": 0.999799370765686,
"start": 2564,
"tag": "NAME",
"value": "Jones"
},
{
"context": "s'\n creditCard:\n cardholderName: 'Adam Jones'\n number: '5105105105105100'\n e",
"end": 2628,
"score": 0.9998448491096497,
"start": 2618,
"tag": "NAME",
"value": "Adam Jones"
},
{
"context": "t.equal(response.transaction.customer.firstName, 'Adam')\n assert.equal(response.transaction.cus",
"end": 3201,
"score": 0.999873161315918,
"start": 3197,
"tag": "NAME",
"value": "Adam"
},
{
"context": "rt.equal(response.transaction.customer.lastName, 'Jones')\n assert.equal(response.transaction.cre",
"end": 3273,
"score": 0.99980628490448,
"start": 3268,
"tag": "NAME",
"value": "Jones"
},
{
"context": "(response.transaction.creditCard.cardholderName, 'Adam Jones')\n assert.equal(response.transaction.cre",
"end": 3358,
"score": 0.9998156428337097,
"start": 3348,
"tag": "NAME",
"value": "Adam Jones"
},
{
"context": "de: 'PAYPAL_CONSENT_CODE'\n token: \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n specHelper.ge",
"end": 7524,
"score": 0.8056727051734924,
"start": 7507,
"tag": "KEY",
"value": "PAYPAL_ACCOUNT_#{"
},
{
"context": " token: \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n specHelper.generateNonceForNew",
"end": 7541,
"score": 0.5437084436416626,
"start": 7535,
"tag": "KEY",
"value": "random"
},
{
"context": " token: \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n specHelper.generateNonceForNewPaymen",
"end": 7545,
"score": 0.5120767951011658,
"start": 7545,
"tag": "KEY",
"value": ""
},
{
"context": "count\", (done) ->\n paymentMethodToken = \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n myHttp = new ",
"end": 8875,
"score": 0.6244749426841736,
"start": 8860,
"tag": "KEY",
"value": "PAYPAL_ACCOUNT_"
},
{
"context": "asked\", (done) ->\n paymentMethodToken = \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n m",
"end": 10705,
"score": 0.783671498298645,
"start": 10702,
"tag": "KEY",
"value": "PAY"
},
{
"context": "ed\", (done) ->\n paymentMethodToken = \"PAYPAL_ACCOUNT_#{specHelper.randomId()}\"\n\n myHttp = new sp",
"end": 10717,
"score": 0.5706030130386353,
"start": 10705,
"tag": "PASSWORD",
"value": "PAL_ACCOUNT_"
},
{
"context": " paypalAccount:\n payeeEmail: 'payee@example.com'\n\n specHelper.defaultGateway.transacti",
"end": 13801,
"score": 0.9999063014984131,
"start": 13784,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": "l(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')\n\n done()\n\n it \"successfully",
"end": 14433,
"score": 0.9999158382415771,
"start": 14416,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": " options:\n payeeEmail: 'payee@example.com'\n\n specHelper.defaultGateway.transacti",
"end": 14885,
"score": 0.9999127388000488,
"start": 14868,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": "l(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')\n\n done()\n\n it \"successfully",
"end": 15517,
"score": 0.9999114871025085,
"start": 15500,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": " paypal:\n payeeEmail: 'payee@example.com'\n\n specHelper.defaultGateway.transacti",
"end": 16003,
"score": 0.9999160766601562,
"start": 15986,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": "l(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')\n\n done()\n\n it \"successfully",
"end": 16635,
"score": 0.9999105334281921,
"start": 16618,
"tag": "EMAIL",
"value": "payee@example.com"
},
{
"context": "ate: '05/16'\n descriptor:\n name: 'abc'\n phone: '1234567'\n url: '12345",
"end": 25426,
"score": 0.9983007311820984,
"start": 25423,
"tag": "NAME",
"value": "abc"
},
{
"context": "ckOutDate: '2014-08-08'\n lodgingName: 'Disney'\n\n specHelper.defaultGateway.transaction.sal",
"end": 28019,
"score": 0.999125599861145,
"start": 28013,
"tag": "NAME",
"value": "Disney"
},
{
"context": "ckOutDate: '2014-08-08'\n lodgingName: 'Disney'\n\n specHelper.defaultGateway.transaction.sal",
"end": 28666,
"score": 0.9987326264381409,
"start": 28660,
"tag": "NAME",
"value": "Disney"
},
{
"context": "ne) ->\n customerParams =\n firstName: 'Adam'\n lastName: 'Jones'\n\n specHelper.defa",
"end": 37618,
"score": 0.9998548626899719,
"start": 37614,
"tag": "NAME",
"value": "Adam"
},
{
"context": "ms =\n firstName: 'Adam'\n lastName: 'Jones'\n\n specHelper.defaultGateway.customer.create",
"end": 37644,
"score": 0.9995205402374268,
"start": 37639,
"tag": "NAME",
"value": "Jones"
},
{
"context": "ne) ->\n customerParams =\n firstName: 'Adam'\n lastName: 'Jones'\n\n specHelper.defa",
"end": 38425,
"score": 0.9998691082000732,
"start": 38421,
"tag": "NAME",
"value": "Adam"
},
{
"context": "ms =\n firstName: 'Adam'\n lastName: 'Jones'\n\n specHelper.defaultGateway.customer.create",
"end": 38451,
"score": 0.9995865821838379,
"start": 38446,
"tag": "NAME",
"value": "Jones"
},
{
"context": "tionDate: '05/2009'\n threeDSecureToken: threeDSecureToken\n\n specHelper.defaultGateway.t",
"end": 43637,
"score": 0.6262183785438538,
"start": 43631,
"tag": "PASSWORD",
"value": "threeD"
},
{
"context": " ->\n transactionParams =\n amount: '5.00'\n creditCard:\n number: '510510510",
"end": 53142,
"score": 0.4327108561992645,
"start": 53141,
"tag": "KEY",
"value": "0"
}
] | spec/integration/braintree/transaction_gateway_spec.coffee | StreamCo/braintree_node | 0 | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
{CreditCardNumbers} = require('../../../lib/braintree/test/credit_card_numbers')
{Nonces} = require('../../../lib/braintree/test/nonces')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{CreditCard} = require('../../../lib/braintree/credit_card')
{ValidationErrorCodes} = require('../../../lib/braintree/validation_error_codes')
{PaymentInstrumentTypes} = require('../../../lib/braintree/payment_instrument_types')
{Transaction} = require('../../../lib/braintree/transaction')
{Dispute} = require('../../../lib/braintree/dispute')
{Config} = require('../../../lib/braintree/config')
describe "TransactionGateway", ->
describe "sale", ->
it "charges a card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.isNull(response.transaction.voiceReferralNumber)
done()
it "can use a customer from the vault", (done) ->
customerParams =
firstName: 'Adam'
lastName: 'Jones'
creditCard:
cardholderName: 'Adam Jones'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
customer_id: response.customer.id
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, 'Adam')
assert.equal(response.transaction.customer.lastName, 'Jones')
assert.equal(response.transaction.creditCard.cardholderName, 'Adam Jones')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "can use a credit card from the vault", (done) ->
customerParams =
firstName: 'Adam'
lastName: 'Jones'
creditCard:
cardholderName: 'Adam Jones'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
payment_method_token: response.customer.creditCards[0].token
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, 'Adam')
assert.equal(response.transaction.customer.lastName, 'Jones')
assert.equal(response.transaction.creditCard.cardholderName, 'Adam Jones')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "returns payment_instrument_type for credit_card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.CreditCard)
done()
context "with apple pay", ->
it "returns ApplePayCard for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.ApplePayAmEx
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.ApplePayCard)
assert.isNotNull(response.transaction.applePayCard.card_type)
done()
context "with a paypal acount", ->
it "returns PayPalAccount for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.PayPalAccount)
done()
context "in-line capture", ->
it "includes processorSettlementResponse_code and processorSettlementResponseText for settlement declined transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.declineSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4001")
assert.equal(transaction.processorSettlementResponseText, "Settlement Declined")
done()
it "includes processorSettlementResponseCode and processorSettlementResponseText for settlement pending transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.pendSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4002")
assert.equal(transaction.processorSettlementResponseText, "Settlement Pending")
done()
context "as a vaulted payment method", ->
it "successfully creates a transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
customerId = response.customer.id
nonceParams =
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: "PAYPAL_ACCOUNT_#{specHelper.randomId()}"
specHelper.generateNonceForNewPaymentMethod nonceParams, customerId, (nonce) ->
paymentMethodParams =
paymentMethodNonce: nonce
customerId: customerId
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
paymentMethodToken = response.paymentMethod.token
transactionParams =
paymentMethodToken: paymentMethodToken
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.imageUrl)
assert.isString(response.transaction.paypalAccount.debugId)
done()
context "as a payment method nonce authorized for future payments", ->
it "successfully creates a transaction but doesn't vault a paypal account", (done) ->
paymentMethodToken = "PAYPAL_ACCOUNT_#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
)
)
it "vaults when explicitly asked", (done) ->
paymentMethodToken = "PAYPAL_ACCOUNT_#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.paypalAccount.token, paymentMethodToken)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.isNull(err)
done()
)
)
context "as a payment method nonce authorized for one-time use", ->
it "successfully creates a transaction", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "successfully creates a transaction with a payee email", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount:
payeeEmail: 'payee@example.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')
done()
it "successfully creates a transaction with a payee email in the options params", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
payeeEmail: 'payee@example.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')
done()
it "successfully creates a transaction with a payee email in transaction.options.paypal", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
payeeEmail: 'payee@example.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'payee@example.com')
done()
it "successfully creates a transaction with a PayPal custom field", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
customField: 'custom field junk'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.customField, 'custom field junk')
done()
it "does not vault even when explicitly asked", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "allows submitting for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
it "allows storing in the vault", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.match(response.transaction.customer.id, /^\d+$/)
assert.match(response.transaction.creditCard.token, /^\w+$/)
done()
it "can create transactions with custom fields", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.customFields.storeMe, 'custom value')
done()
it "allows specifying transactions as 'recurring'", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
recurring: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.recurring, true)
done()
it "sets card type indicators on the transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Unknown
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.equal(response.transaction.creditCard.prepaid, CreditCard.Prepaid.Unknown)
assert.equal(response.transaction.creditCard.durbinRegulated, CreditCard.DurbinRegulated.Unknown)
assert.equal(response.transaction.creditCard.commercial, CreditCard.Commercial.Unknown)
assert.equal(response.transaction.creditCard.healthcare, CreditCard.Healthcare.Unknown)
assert.equal(response.transaction.creditCard.debit, CreditCard.Debit.Unknown)
assert.equal(response.transaction.creditCard.payroll, CreditCard.Payroll.Unknown)
done()
it "handles processor declines", (done) ->
transactionParams =
amount: '2000.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.amount, '2000.00')
assert.equal(response.transaction.status, 'processor_declined')
assert.equal(response.transaction.additionalProcessorResponse, '2000 : Do Not Honor')
done()
it "handles risk data returned by the gateway", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: "4111111111111111"
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.riskData.decision, "Not Evaluated")
assert.equal(response.transaction.riskData.id, null)
done()
it "handles fraud rejection", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Fraud
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.status, Transaction.Status.GatewayRejected)
assert.equal(response.transaction.gatewayRejectionReason, Transaction.GatewayRejectionReason.Fraud)
done()
it "allows fraud params", (done) ->
transactionParams =
amount: '10.0'
deviceSessionId: "123456789"
fraudMerchantId: "0000000031"
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
it "handles descriptors", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: 'abc*def'
phone: '1234567890'
url: 'ebay.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.descriptor.name, 'abc*def')
assert.equal(response.transaction.descriptor.phone, '1234567890')
assert.equal(response.transaction.descriptor.url, 'ebay.com')
done()
it "handles descriptor validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: 'abc'
phone: '1234567'
url: '12345678901234'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('descriptor').on('name')[0].code,
ValidationErrorCodes.Descriptor.NameFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('phone')[0].code,
ValidationErrorCodes.Descriptor.PhoneFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('url')[0].code,
ValidationErrorCodes.Descriptor.UrlFormatIsInvalid
)
done()
it "handles lodging industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-08-08'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-06-06'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('checkOutDate')[0].code,
ValidationErrorCodes.Transaction.IndustryData.Lodging.CheckOutDateMustFollowCheckInDate
)
done()
it "handles travel cruise industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'flight'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: 'Disney'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'onfoot'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: 'Disney'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('travelPackage')[0].code,
ValidationErrorCodes.Transaction.IndustryData.TravelCruise.TravelPackageIsInvalid
)
done()
context "with a service fee", ->
it "persists the service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '1.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.serviceFeeAmount, '1.00')
done()
it "handles validation errors on service fees", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '5.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('serviceFeeAmount')[0].code,
ValidationErrorCodes.Transaction.ServiceFeeAmountIsTooLarge
)
done()
it "sub merchant accounts must provide a service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('merchantAccountId')[0].code,
ValidationErrorCodes.Transaction.SubMerchantAccountRequiresServiceFeeAmount
)
done()
context "with escrow status", ->
it "can specify transactions to be held for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "can not be held for escrow if not a submerchant", (done) ->
transactionParams =
merchantAccountId: specHelper.defaultMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
context "releaseFromEscrow", ->
it "can release an escrowed transaction", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.escrowStatus, Transaction.EscrowStatus.ReleasePending)
done()
it "cannot submit a non-escrowed transaction for release", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.releaseFromEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotReleaseFromEscrow
)
done()
context "cancelRelease", ->
it "can cancel release for a transaction that has been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.Held
)
done()
it "cannot cancel release a transaction that has not been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotCancelRelease
)
done()
context "holdInEscrow", ->
it "can hold authorized or submitted for settlement transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "cannot hold settled transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.settleTransaction response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
it "can use venmo sdk payment method codes", (done) ->
transactionParams =
amount: '1.00'
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.creditCard.bin, "411111")
done()
it "can use venmo sdk session", (done) ->
transactionParams =
amount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.transaction.creditCard.venmoSdk)
done()
it "can use vaulted credit card nonce", (done) ->
customerParams =
firstName: 'Adam'
lastName: 'Jones'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use vaulted PayPal account nonce", (done) ->
customerParams =
firstName: 'Adam'
lastName: 'Jones'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
paypalAccount:
consent_code: "PAYPAL_CONSENT_CODE"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "works with an unknown payment instrument", (done) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: Nonces.AbstractTransactable
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
describe "credit", ->
it "creates a credit", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
context "three d secure", (done) ->
it "creates a transaction with threeDSecureToken", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: threeDSecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "returns an error if sent null threeDSecureToken", (done) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: null
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTokenIsInvalid
)
done()
it "returns an error if 3ds lookup data doesn't match txn data", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/2009'
threeDSecureToken: threeDSecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTransactionDataDoesntMatchVerify
)
done()
describe "find", ->
it "finds a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.find response.transaction.id, (err, transaction) ->
assert.equal(transaction.amount, '5.00')
done()
it "exposes disbursementDetails", (done) ->
transactionId = "deposittransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.isDisbursed(), true)
disbursementDetails = transaction.disbursementDetails
assert.equal(disbursementDetails.settlementAmount, '100.00')
assert.equal(disbursementDetails.settlementCurrencyIsoCode, 'USD')
assert.equal(disbursementDetails.settlementCurrencyExchangeRate, '1')
assert.equal(disbursementDetails.disbursementDate, '2013-04-10')
assert.equal(disbursementDetails.success, true)
assert.equal(disbursementDetails.fundsHeld, false)
done()
it "exposes disputes", (done) ->
transactionId = "disputedtransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '250.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Won)
assert.equal(dispute.receivedDate, '2014-03-01')
assert.equal(dispute.replyByDate, '2014-03-21')
assert.equal(dispute.reason, Dispute.Reason.Fraud)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "exposes retrievals", (done) ->
transactionId = "retrievaltransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '1000.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Open)
assert.equal(dispute.reason, Dispute.Reason.Retrieval)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "returns a not found error if given a bad id", (done) ->
specHelper.defaultGateway.transaction.find 'nonexistent_transaction', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.transaction.find ' ', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "refund", ->
it "refunds a transaction", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "refunds a paypal transaction", (done) ->
specHelper.createPayPalTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "allows refunding partial amounts", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, '1.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
assert.equal(response.transaction.amount, '1.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.refund response.transaction.id, '5.00', (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91506')
done()
describe "submitForSettlement", ->
it "submits a transaction for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '5.00')
done()
it "submits a paypal transaction for settlement", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'settling')
assert.equal(response.transaction.amount, '5.00')
done()
it "allows submitting for a partial amount", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, '3.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '3.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91507')
done()
describe "void", ->
it "voids a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "voids a paypal transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91504')
done()
describe "cloneTransaction", ->
it "clones a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'false'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
transaction = response.transaction
assert.equal(transaction.amount, '123.45')
assert.equal(transaction.channel, 'MyShoppingCartProvider')
assert.equal(transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(transaction.creditCard.expirationDate, '05/2012')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, amount: '123.45', (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
'91543'
)
done()
it "can submit for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'true'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
| 26233 | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
{CreditCardNumbers} = require('../../../lib/braintree/test/credit_card_numbers')
{Nonces} = require('../../../lib/braintree/test/nonces')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{CreditCard} = require('../../../lib/braintree/credit_card')
{ValidationErrorCodes} = require('../../../lib/braintree/validation_error_codes')
{PaymentInstrumentTypes} = require('../../../lib/braintree/payment_instrument_types')
{Transaction} = require('../../../lib/braintree/transaction')
{Dispute} = require('../../../lib/braintree/dispute')
{Config} = require('../../../lib/braintree/config')
describe "TransactionGateway", ->
describe "sale", ->
it "charges a card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.isNull(response.transaction.voiceReferralNumber)
done()
it "can use a customer from the vault", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
customer_id: response.customer.id
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, '<NAME>')
assert.equal(response.transaction.customer.lastName, '<NAME>')
assert.equal(response.transaction.creditCard.cardholderName, '<NAME>')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "can use a credit card from the vault", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
payment_method_token: response.customer.creditCards[0].token
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, '<NAME>')
assert.equal(response.transaction.customer.lastName, '<NAME>')
assert.equal(response.transaction.creditCard.cardholderName, '<NAME>')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "returns payment_instrument_type for credit_card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.CreditCard)
done()
context "with apple pay", ->
it "returns ApplePayCard for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.ApplePayAmEx
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.ApplePayCard)
assert.isNotNull(response.transaction.applePayCard.card_type)
done()
context "with a paypal acount", ->
it "returns PayPalAccount for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.PayPalAccount)
done()
context "in-line capture", ->
it "includes processorSettlementResponse_code and processorSettlementResponseText for settlement declined transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.declineSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4001")
assert.equal(transaction.processorSettlementResponseText, "Settlement Declined")
done()
it "includes processorSettlementResponseCode and processorSettlementResponseText for settlement pending transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.pendSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4002")
assert.equal(transaction.processorSettlementResponseText, "Settlement Pending")
done()
context "as a vaulted payment method", ->
it "successfully creates a transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
customerId = response.customer.id
nonceParams =
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: "<KEY>specHelper.<KEY>Id()<KEY>}"
specHelper.generateNonceForNewPaymentMethod nonceParams, customerId, (nonce) ->
paymentMethodParams =
paymentMethodNonce: nonce
customerId: customerId
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
paymentMethodToken = response.paymentMethod.token
transactionParams =
paymentMethodToken: paymentMethodToken
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.imageUrl)
assert.isString(response.transaction.paypalAccount.debugId)
done()
context "as a payment method nonce authorized for future payments", ->
it "successfully creates a transaction but doesn't vault a paypal account", (done) ->
paymentMethodToken = "<KEY>#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
)
)
it "vaults when explicitly asked", (done) ->
paymentMethodToken = "<KEY> <PASSWORD>#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.paypalAccount.token, paymentMethodToken)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.isNull(err)
done()
)
)
context "as a payment method nonce authorized for one-time use", ->
it "successfully creates a transaction", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "successfully creates a transaction with a payee email", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount:
payeeEmail: '<EMAIL>'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, '<EMAIL>')
done()
it "successfully creates a transaction with a payee email in the options params", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
payeeEmail: '<EMAIL>'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, '<EMAIL>')
done()
it "successfully creates a transaction with a payee email in transaction.options.paypal", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
payeeEmail: '<EMAIL>'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, '<EMAIL>')
done()
it "successfully creates a transaction with a PayPal custom field", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
customField: 'custom field junk'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.customField, 'custom field junk')
done()
it "does not vault even when explicitly asked", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "allows submitting for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
it "allows storing in the vault", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.match(response.transaction.customer.id, /^\d+$/)
assert.match(response.transaction.creditCard.token, /^\w+$/)
done()
it "can create transactions with custom fields", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.customFields.storeMe, 'custom value')
done()
it "allows specifying transactions as 'recurring'", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
recurring: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.recurring, true)
done()
it "sets card type indicators on the transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Unknown
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.equal(response.transaction.creditCard.prepaid, CreditCard.Prepaid.Unknown)
assert.equal(response.transaction.creditCard.durbinRegulated, CreditCard.DurbinRegulated.Unknown)
assert.equal(response.transaction.creditCard.commercial, CreditCard.Commercial.Unknown)
assert.equal(response.transaction.creditCard.healthcare, CreditCard.Healthcare.Unknown)
assert.equal(response.transaction.creditCard.debit, CreditCard.Debit.Unknown)
assert.equal(response.transaction.creditCard.payroll, CreditCard.Payroll.Unknown)
done()
it "handles processor declines", (done) ->
transactionParams =
amount: '2000.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.amount, '2000.00')
assert.equal(response.transaction.status, 'processor_declined')
assert.equal(response.transaction.additionalProcessorResponse, '2000 : Do Not Honor')
done()
it "handles risk data returned by the gateway", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: "4111111111111111"
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.riskData.decision, "Not Evaluated")
assert.equal(response.transaction.riskData.id, null)
done()
it "handles fraud rejection", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Fraud
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.status, Transaction.Status.GatewayRejected)
assert.equal(response.transaction.gatewayRejectionReason, Transaction.GatewayRejectionReason.Fraud)
done()
it "allows fraud params", (done) ->
transactionParams =
amount: '10.0'
deviceSessionId: "123456789"
fraudMerchantId: "0000000031"
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
it "handles descriptors", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: 'abc*def'
phone: '1234567890'
url: 'ebay.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.descriptor.name, 'abc*def')
assert.equal(response.transaction.descriptor.phone, '1234567890')
assert.equal(response.transaction.descriptor.url, 'ebay.com')
done()
it "handles descriptor validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: '<NAME>'
phone: '1234567'
url: '12345678901234'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('descriptor').on('name')[0].code,
ValidationErrorCodes.Descriptor.NameFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('phone')[0].code,
ValidationErrorCodes.Descriptor.PhoneFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('url')[0].code,
ValidationErrorCodes.Descriptor.UrlFormatIsInvalid
)
done()
it "handles lodging industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-08-08'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-06-06'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('checkOutDate')[0].code,
ValidationErrorCodes.Transaction.IndustryData.Lodging.CheckOutDateMustFollowCheckInDate
)
done()
it "handles travel cruise industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'flight'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: '<NAME>'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'onfoot'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: '<NAME>'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('travelPackage')[0].code,
ValidationErrorCodes.Transaction.IndustryData.TravelCruise.TravelPackageIsInvalid
)
done()
context "with a service fee", ->
it "persists the service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '1.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.serviceFeeAmount, '1.00')
done()
it "handles validation errors on service fees", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '5.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('serviceFeeAmount')[0].code,
ValidationErrorCodes.Transaction.ServiceFeeAmountIsTooLarge
)
done()
it "sub merchant accounts must provide a service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('merchantAccountId')[0].code,
ValidationErrorCodes.Transaction.SubMerchantAccountRequiresServiceFeeAmount
)
done()
context "with escrow status", ->
it "can specify transactions to be held for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "can not be held for escrow if not a submerchant", (done) ->
transactionParams =
merchantAccountId: specHelper.defaultMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
context "releaseFromEscrow", ->
it "can release an escrowed transaction", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.escrowStatus, Transaction.EscrowStatus.ReleasePending)
done()
it "cannot submit a non-escrowed transaction for release", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.releaseFromEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotReleaseFromEscrow
)
done()
context "cancelRelease", ->
it "can cancel release for a transaction that has been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.Held
)
done()
it "cannot cancel release a transaction that has not been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotCancelRelease
)
done()
context "holdInEscrow", ->
it "can hold authorized or submitted for settlement transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "cannot hold settled transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.settleTransaction response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
it "can use venmo sdk payment method codes", (done) ->
transactionParams =
amount: '1.00'
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.creditCard.bin, "411111")
done()
it "can use venmo sdk session", (done) ->
transactionParams =
amount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.transaction.creditCard.venmoSdk)
done()
it "can use vaulted credit card nonce", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use vaulted PayPal account nonce", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
paypalAccount:
consent_code: "PAYPAL_CONSENT_CODE"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "works with an unknown payment instrument", (done) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: Nonces.AbstractTransactable
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
describe "credit", ->
it "creates a credit", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
context "three d secure", (done) ->
it "creates a transaction with threeDSecureToken", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: threeDSecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "returns an error if sent null threeDSecureToken", (done) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: null
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTokenIsInvalid
)
done()
it "returns an error if 3ds lookup data doesn't match txn data", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/2009'
threeDSecureToken: <PASSWORD>SecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTransactionDataDoesntMatchVerify
)
done()
describe "find", ->
it "finds a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.find response.transaction.id, (err, transaction) ->
assert.equal(transaction.amount, '5.00')
done()
it "exposes disbursementDetails", (done) ->
transactionId = "deposittransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.isDisbursed(), true)
disbursementDetails = transaction.disbursementDetails
assert.equal(disbursementDetails.settlementAmount, '100.00')
assert.equal(disbursementDetails.settlementCurrencyIsoCode, 'USD')
assert.equal(disbursementDetails.settlementCurrencyExchangeRate, '1')
assert.equal(disbursementDetails.disbursementDate, '2013-04-10')
assert.equal(disbursementDetails.success, true)
assert.equal(disbursementDetails.fundsHeld, false)
done()
it "exposes disputes", (done) ->
transactionId = "disputedtransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '250.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Won)
assert.equal(dispute.receivedDate, '2014-03-01')
assert.equal(dispute.replyByDate, '2014-03-21')
assert.equal(dispute.reason, Dispute.Reason.Fraud)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "exposes retrievals", (done) ->
transactionId = "retrievaltransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '1000.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Open)
assert.equal(dispute.reason, Dispute.Reason.Retrieval)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "returns a not found error if given a bad id", (done) ->
specHelper.defaultGateway.transaction.find 'nonexistent_transaction', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.transaction.find ' ', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "refund", ->
it "refunds a transaction", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "refunds a paypal transaction", (done) ->
specHelper.createPayPalTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "allows refunding partial amounts", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, '1.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
assert.equal(response.transaction.amount, '1.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.refund response.transaction.id, '5.00', (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91506')
done()
describe "submitForSettlement", ->
it "submits a transaction for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '5.00')
done()
it "submits a paypal transaction for settlement", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'settling')
assert.equal(response.transaction.amount, '5.00')
done()
it "allows submitting for a partial amount", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, '3.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '3.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91507')
done()
describe "void", ->
it "voids a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "voids a paypal transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.0<KEY>'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91504')
done()
describe "cloneTransaction", ->
it "clones a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'false'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
transaction = response.transaction
assert.equal(transaction.amount, '123.45')
assert.equal(transaction.channel, 'MyShoppingCartProvider')
assert.equal(transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(transaction.creditCard.expirationDate, '05/2012')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, amount: '123.45', (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
'91543'
)
done()
it "can submit for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'true'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
| true | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
{CreditCardNumbers} = require('../../../lib/braintree/test/credit_card_numbers')
{Nonces} = require('../../../lib/braintree/test/nonces')
{VenmoSdk} = require('../../../lib/braintree/test/venmo_sdk')
{CreditCard} = require('../../../lib/braintree/credit_card')
{ValidationErrorCodes} = require('../../../lib/braintree/validation_error_codes')
{PaymentInstrumentTypes} = require('../../../lib/braintree/payment_instrument_types')
{Transaction} = require('../../../lib/braintree/transaction')
{Dispute} = require('../../../lib/braintree/dispute')
{Config} = require('../../../lib/braintree/config')
describe "TransactionGateway", ->
describe "sale", ->
it "charges a card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.isNull(response.transaction.voiceReferralNumber)
done()
it "can use a customer from the vault", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
customer_id: response.customer.id
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.creditCard.cardholderName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "can use a credit card from the vault", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '5105105105105100'
expirationDate: '05/2014'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
transactionParams =
payment_method_token: response.customer.creditCards[0].token
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.creditCard.cardholderName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(response.transaction.creditCard.expirationDate, '05/2014')
done()
it "returns payment_instrument_type for credit_card", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.CreditCard)
done()
context "with apple pay", ->
it "returns ApplePayCard for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.ApplePayAmEx
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.ApplePayCard)
assert.isNotNull(response.transaction.applePayCard.card_type)
done()
context "with a paypal acount", ->
it "returns PayPalAccount for payment_instrument", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.paymentInstrumentType, PaymentInstrumentTypes.PayPalAccount)
done()
context "in-line capture", ->
it "includes processorSettlementResponse_code and processorSettlementResponseText for settlement declined transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.declineSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4001")
assert.equal(transaction.processorSettlementResponseText, "Settlement Declined")
done()
it "includes processorSettlementResponseCode and processorSettlementResponseText for settlement pending transactions", (done) ->
transactionParams =
paymentMethodNonce: Nonces.PayPalOneTimePayment
amount: '10.00'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
transactionId = response.transaction.id
specHelper.pendSettlingTransaction transactionId, (err, response) ->
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.processorSettlementResponseCode, "4002")
assert.equal(transaction.processorSettlementResponseText, "Settlement Pending")
done()
context "as a vaulted payment method", ->
it "successfully creates a transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
customerId = response.customer.id
nonceParams =
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: "PI:KEY:<KEY>END_PIspecHelper.PI:KEY:<KEY>END_PIId()PI:KEY:<KEY>END_PI}"
specHelper.generateNonceForNewPaymentMethod nonceParams, customerId, (nonce) ->
paymentMethodParams =
paymentMethodNonce: nonce
customerId: customerId
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
paymentMethodToken = response.paymentMethod.token
transactionParams =
paymentMethodToken: paymentMethodToken
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.imageUrl)
assert.isString(response.transaction.paypalAccount.debugId)
done()
context "as a payment method nonce authorized for future payments", ->
it "successfully creates a transaction but doesn't vault a paypal account", (done) ->
paymentMethodToken = "PI:KEY:<KEY>END_PI#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
)
)
it "vaults when explicitly asked", (done) ->
paymentMethodToken = "PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI#{specHelper.randomId()}"
myHttp = new specHelper.clientApiHttp(new Config(specHelper.defaultConfig))
specHelper.defaultGateway.clientToken.generate({}, (err, result) ->
clientToken = JSON.parse(specHelper.decodeClientToken(result.clientToken))
authorizationFingerprint = clientToken.authorizationFingerprint
params =
authorizationFingerprint: authorizationFingerprint
paypalAccount:
consentCode: 'PAYPAL_CONSENT_CODE'
token: paymentMethodToken
myHttp.post("/client_api/v1/payment_methods/paypal_accounts.json", params, (statusCode, body) ->
nonce = JSON.parse(body).paypalAccounts[0].nonce
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.equal(response.transaction.paypalAccount.token, paymentMethodToken)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
specHelper.defaultGateway.paypalAccount.find paymentMethodToken, (err, paypalAccount) ->
assert.isNull(err)
done()
)
)
context "as a payment method nonce authorized for one-time use", ->
it "successfully creates a transaction", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "successfully creates a transaction with a payee email", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount:
payeeEmail: 'PI:EMAIL:<EMAIL>END_PI'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'PI:EMAIL:<EMAIL>END_PI')
done()
it "successfully creates a transaction with a payee email in the options params", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
payeeEmail: 'PI:EMAIL:<EMAIL>END_PI'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'PI:EMAIL:<EMAIL>END_PI')
done()
it "successfully creates a transaction with a payee email in transaction.options.paypal", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
payeeEmail: 'PI:EMAIL:<EMAIL>END_PI'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.payeeEmail, 'PI:EMAIL:<EMAIL>END_PI')
done()
it "successfully creates a transaction with a PayPal custom field", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
paypalAccount: {}
options:
paypal:
customField: 'custom field junk'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
assert.equal(response.transaction.paypalAccount.customField, 'custom field junk')
done()
it "does not vault even when explicitly asked", (done) ->
nonce = Nonces.PayPalOneTimePayment
specHelper.defaultGateway.customer.create {}, (err, response) ->
transactionParams =
paymentMethodNonce: nonce
amount: '100.00'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'sale')
assert.isNull(response.transaction.paypalAccount.token)
assert.isString(response.transaction.paypalAccount.payerEmail)
assert.isString(response.transaction.paypalAccount.authorizationId)
assert.isString(response.transaction.paypalAccount.debugId)
done()
it "allows submitting for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
it "allows storing in the vault", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
storeInVault: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.match(response.transaction.customer.id, /^\d+$/)
assert.match(response.transaction.creditCard.token, /^\w+$/)
done()
it "can create transactions with custom fields", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
customFields:
storeMe: 'custom value'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.customFields.storeMe, 'custom value')
done()
it "allows specifying transactions as 'recurring'", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
recurring: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.recurring, true)
done()
it "sets card type indicators on the transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Unknown
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.equal(response.transaction.creditCard.prepaid, CreditCard.Prepaid.Unknown)
assert.equal(response.transaction.creditCard.durbinRegulated, CreditCard.DurbinRegulated.Unknown)
assert.equal(response.transaction.creditCard.commercial, CreditCard.Commercial.Unknown)
assert.equal(response.transaction.creditCard.healthcare, CreditCard.Healthcare.Unknown)
assert.equal(response.transaction.creditCard.debit, CreditCard.Debit.Unknown)
assert.equal(response.transaction.creditCard.payroll, CreditCard.Payroll.Unknown)
done()
it "handles processor declines", (done) ->
transactionParams =
amount: '2000.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.amount, '2000.00')
assert.equal(response.transaction.status, 'processor_declined')
assert.equal(response.transaction.additionalProcessorResponse, '2000 : Do Not Honor')
done()
it "handles risk data returned by the gateway", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: "4111111111111111"
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.riskData.decision, "Not Evaluated")
assert.equal(response.transaction.riskData.id, null)
done()
it "handles fraud rejection", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: CreditCardNumbers.CardTypeIndicators.Fraud
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.transaction.status, Transaction.Status.GatewayRejected)
assert.equal(response.transaction.gatewayRejectionReason, Transaction.GatewayRejectionReason.Fraud)
done()
it "allows fraud params", (done) ->
transactionParams =
amount: '10.0'
deviceSessionId: "123456789"
fraudMerchantId: "0000000031"
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
it "handles descriptors", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: 'abc*def'
phone: '1234567890'
url: 'ebay.com'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.descriptor.name, 'abc*def')
assert.equal(response.transaction.descriptor.phone, '1234567890')
assert.equal(response.transaction.descriptor.url, 'ebay.com')
done()
it "handles descriptor validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
descriptor:
name: 'PI:NAME:<NAME>END_PI'
phone: '1234567'
url: '12345678901234'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('descriptor').on('name')[0].code,
ValidationErrorCodes.Descriptor.NameFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('phone')[0].code,
ValidationErrorCodes.Descriptor.PhoneFormatIsInvalid
)
assert.equal(
response.errors.for('transaction').for('descriptor').on('url')[0].code,
ValidationErrorCodes.Descriptor.UrlFormatIsInvalid
)
done()
it "handles lodging industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-08-08'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.Lodging
data:
folioNumber: 'aaa'
checkInDate: '2014-07-07'
checkOutDate: '2014-06-06'
roomRate: '239.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('checkOutDate')[0].code,
ValidationErrorCodes.Transaction.IndustryData.Lodging.CheckOutDateMustFollowCheckInDate
)
done()
it "handles travel cruise industry data", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'flight'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isTrue(response.success)
done()
it "handles lodging industry data validations", (done) ->
transactionParams =
amount: '10.0'
creditCard:
number: '5105105105105100'
expirationDate: '05/16'
industry:
industryType: Transaction.IndustryData.TravelAndCruise
data:
travelPackage: 'onfoot'
departureDate: '2014-07-07'
lodgingCheckInDate: '2014-07-07'
lodgingCheckOutDate: '2014-08-08'
lodgingName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').for('industry').on('travelPackage')[0].code,
ValidationErrorCodes.Transaction.IndustryData.TravelCruise.TravelPackageIsInvalid
)
done()
context "with a service fee", ->
it "persists the service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '1.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.serviceFeeAmount, '1.00')
done()
it "handles validation errors on service fees", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
serviceFeeAmount: '5.00'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('serviceFeeAmount')[0].code,
ValidationErrorCodes.Transaction.ServiceFeeAmountIsTooLarge
)
done()
it "sub merchant accounts must provide a service fee", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId
amount: '1.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('merchantAccountId')[0].code,
ValidationErrorCodes.Transaction.SubMerchantAccountRequiresServiceFeeAmount
)
done()
context "with escrow status", ->
it "can specify transactions to be held for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "can not be held for escrow if not a submerchant", (done) ->
transactionParams =
merchantAccountId: specHelper.defaultMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
context "releaseFromEscrow", ->
it "can release an escrowed transaction", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.escrowStatus, Transaction.EscrowStatus.ReleasePending)
done()
it "cannot submit a non-escrowed transaction for release", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
holdInEscrow: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.releaseFromEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotReleaseFromEscrow
)
done()
context "cancelRelease", ->
it "can cancel release for a transaction that has been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.releaseFromEscrow transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.Held
)
done()
it "cannot cancel release a transaction that has not been submitted for release", (done) ->
specHelper.createEscrowedTransaction (transaction) ->
specHelper.defaultGateway.transaction.cancelRelease transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotCancelRelease
)
done()
context "holdInEscrow", ->
it "can hold authorized or submitted for settlement transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(
response.transaction.escrowStatus,
Transaction.EscrowStatus.HoldPending
)
done()
it "cannot hold settled transactions for escrow", (done) ->
transactionParams =
merchantAccountId: specHelper.nonDefaultSubMerchantAccountId,
amount: '10.00'
serviceFeeAmount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.settleTransaction response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.holdInEscrow response.transaction.id, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
ValidationErrorCodes.Transaction.CannotHoldInEscrow
)
done()
it "can use venmo sdk payment method codes", (done) ->
transactionParams =
amount: '1.00'
venmoSdkPaymentMethodCode: VenmoSdk.VisaPaymentMethodCode
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.creditCard.bin, "411111")
done()
it "can use venmo sdk session", (done) ->
transactionParams =
amount: '1.00'
creditCard:
number: "4111111111111111"
expirationDate: '05/12'
options:
venmoSdkSession: VenmoSdk.Session
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.isTrue(response.transaction.creditCard.venmoSdk)
done()
it "can use vaulted credit card nonce", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use vaulted PayPal account nonce", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
customerId = response.customer.id
paymentMethodParams =
paypalAccount:
consent_code: "PAYPAL_CONSENT_CODE"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, customerId, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "can use params nonce", (done) ->
paymentMethodParams =
creditCard:
number: "4111111111111111"
expirationMonth: "12"
expirationYear: "2099"
specHelper.generateNonceForNewPaymentMethod(paymentMethodParams, null, (nonce) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: nonce
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
)
it "works with an unknown payment instrument", (done) ->
transactionParams =
amount: '1.00'
paymentMethodNonce: Nonces.AbstractTransactable
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
describe "credit", ->
it "creates a credit", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.equal(response.transaction.amount, '5.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
it "handles validation errors", (done) ->
transactionParams =
creditCard:
number: '5105105105105100'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(response.message, 'Amount is required.\nExpiration date is required.')
assert.equal(
response.errors.for('transaction').on('amount')[0].code,
'81502'
)
assert.equal(
response.errors.for('transaction').on('amount')[0].attribute,
'amount'
)
assert.equal(
response.errors.for('transaction').for('creditCard').on('expirationDate')[0].code,
'81709'
)
errorCodes = (error.code for error in response.errors.deepErrors())
assert.equal(errorCodes.length, 2)
assert.include(errorCodes, '81502')
assert.include(errorCodes, '81709')
done()
context "three d secure", (done) ->
it "creates a transaction with threeDSecureToken", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: threeDSecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
done()
it "returns an error if sent null threeDSecureToken", (done) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '4111111111111111'
expirationDate: '05/2009'
threeDSecureToken: null
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTokenIsInvalid
)
done()
it "returns an error if 3ds lookup data doesn't match txn data", (done) ->
threeDVerificationParams =
number: '4111111111111111'
expirationMonth: '05'
expirationYear: '2009'
specHelper.create3DSVerification specHelper.threeDSecureMerchantAccountId, threeDVerificationParams, (threeDSecureToken) ->
transactionParams =
merchantAccountId: specHelper.threeDSecureMerchantAccountId
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/2009'
threeDSecureToken: PI:PASSWORD:<PASSWORD>END_PISecureToken
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('threeDSecureToken')[0].code,
ValidationErrorCodes.Transaction.ThreeDSecureTransactionDataDoesntMatchVerify
)
done()
describe "find", ->
it "finds a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.find response.transaction.id, (err, transaction) ->
assert.equal(transaction.amount, '5.00')
done()
it "exposes disbursementDetails", (done) ->
transactionId = "deposittransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
assert.equal(transaction.isDisbursed(), true)
disbursementDetails = transaction.disbursementDetails
assert.equal(disbursementDetails.settlementAmount, '100.00')
assert.equal(disbursementDetails.settlementCurrencyIsoCode, 'USD')
assert.equal(disbursementDetails.settlementCurrencyExchangeRate, '1')
assert.equal(disbursementDetails.disbursementDate, '2013-04-10')
assert.equal(disbursementDetails.success, true)
assert.equal(disbursementDetails.fundsHeld, false)
done()
it "exposes disputes", (done) ->
transactionId = "disputedtransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '250.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Won)
assert.equal(dispute.receivedDate, '2014-03-01')
assert.equal(dispute.replyByDate, '2014-03-21')
assert.equal(dispute.reason, Dispute.Reason.Fraud)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "exposes retrievals", (done) ->
transactionId = "retrievaltransaction"
specHelper.defaultGateway.transaction.find transactionId, (err, transaction) ->
dispute = transaction.disputes[0]
assert.equal(dispute.amount, '1000.00')
assert.equal(dispute.currencyIsoCode, 'USD')
assert.equal(dispute.status, Dispute.Status.Open)
assert.equal(dispute.reason, Dispute.Reason.Retrieval)
assert.equal(dispute.transactionDetails.id, transactionId)
assert.equal(dispute.transactionDetails.amount, '1000.00')
done()
it "returns a not found error if given a bad id", (done) ->
specHelper.defaultGateway.transaction.find 'nonexistent_transaction', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "handles whitespace ids", (done) ->
specHelper.defaultGateway.transaction.find ' ', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
describe "refund", ->
it "refunds a transaction", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "refunds a paypal transaction", (done) ->
specHelper.createPayPalTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
done()
it "allows refunding partial amounts", (done) ->
specHelper.createTransactionToRefund (transaction) ->
specHelper.defaultGateway.transaction.refund transaction.id, '1.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.type, 'credit')
assert.match(response.transaction.refund_id, /^\w+$/)
assert.equal(response.transaction.amount, '1.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.refund response.transaction.id, '5.00', (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91506')
done()
describe "submitForSettlement", ->
it "submits a transaction for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '5.00')
done()
it "submits a paypal transaction for settlement", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'settling')
assert.equal(response.transaction.amount, '5.00')
done()
it "allows submitting for a partial amount", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, '3.00', (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
assert.equal(response.transaction.amount, '3.00')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
options:
submitForSettlement: true
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.submitForSettlement response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91507')
done()
describe "void", ->
it "voids a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "voids a paypal transaction", (done) ->
specHelper.defaultGateway.customer.create {}, (err, response) ->
paymentMethodParams =
customerId: response.customer.id
paymentMethodNonce: Nonces.PayPalFuturePayment
specHelper.defaultGateway.paymentMethod.create paymentMethodParams, (err, response) ->
transactionParams =
amount: '5.00'
paymentMethodToken: response.paymentMethod.token
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'voided')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.0PI:KEY:<KEY>END_PI'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
specHelper.defaultGateway.transaction.void response.transaction.id, (err, response) ->
assert.isNull(err)
assert.isFalse(response.success)
assert.equal(response.errors.for('transaction').on('base')[0].code, '91504')
done()
describe "cloneTransaction", ->
it "clones a transaction", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'false'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
transaction = response.transaction
assert.equal(transaction.amount, '123.45')
assert.equal(transaction.channel, 'MyShoppingCartProvider')
assert.equal(transaction.creditCard.maskedNumber, '510510******5100')
assert.equal(transaction.creditCard.expirationDate, '05/2012')
done()
it "handles validation errors", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.credit transactionParams, (err, response) ->
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, amount: '123.45', (err, response) ->
assert.isFalse(response.success)
assert.equal(
response.errors.for('transaction').on('base')[0].code,
'91543'
)
done()
it "can submit for settlement", (done) ->
transactionParams =
amount: '5.00'
creditCard:
number: '5105105105105100'
expirationDate: '05/12'
specHelper.defaultGateway.transaction.sale transactionParams, (err, response) ->
cloneParams =
amount: '123.45'
channel: 'MyShoppingCartProvider'
options:
submitForSettlement: 'true'
specHelper.defaultGateway.transaction.cloneTransaction response.transaction.id, cloneParams, (err, response) ->
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'submitted_for_settlement')
done()
|
[
{
"context": "mailaddress': system.env.EMAIL,\n '#password': system.env.PASSWORD\n },\n true\n )\n\ncasper.then ->\n console.log",
"end": 386,
"score": 0.9951335191726685,
"start": 367,
"tag": "PASSWORD",
"value": "system.env.PASSWORD"
}
] | vote.coffee | kenchan/yuru-char-voter | 0 | system = require('system')
casper = require('casper').create()
casper.start 'http://www.yurugp.jp/vote/detail.php?id=' + system.env.CHARACTER_ID, ->
console.log @evaluate ->
document.querySelector("h3").innerHTML
@click('input[type=image]')
casper.then ->
@fillSelectors(
'form.inputBox', {
'#mailaddress': system.env.EMAIL,
'#password': system.env.PASSWORD
},
true
)
casper.then ->
console.log @evaluate ->
document.querySelector("title").innerHTML
casper.run()
| 140619 | system = require('system')
casper = require('casper').create()
casper.start 'http://www.yurugp.jp/vote/detail.php?id=' + system.env.CHARACTER_ID, ->
console.log @evaluate ->
document.querySelector("h3").innerHTML
@click('input[type=image]')
casper.then ->
@fillSelectors(
'form.inputBox', {
'#mailaddress': system.env.EMAIL,
'#password': <PASSWORD>
},
true
)
casper.then ->
console.log @evaluate ->
document.querySelector("title").innerHTML
casper.run()
| true | system = require('system')
casper = require('casper').create()
casper.start 'http://www.yurugp.jp/vote/detail.php?id=' + system.env.CHARACTER_ID, ->
console.log @evaluate ->
document.querySelector("h3").innerHTML
@click('input[type=image]')
casper.then ->
@fillSelectors(
'form.inputBox', {
'#mailaddress': system.env.EMAIL,
'#password': PI:PASSWORD:<PASSWORD>END_PI
},
true
)
casper.then ->
console.log @evaluate ->
document.querySelector("title").innerHTML
casper.run()
|
[
{
"context": ", cname : null ,cdn_subdomain : false, api_key : \"1234\", api_secret: \"b\" })\n @orig = _.clone(@cfg)\n\n ",
"end": 624,
"score": 0.9958975315093994,
"start": 620,
"tag": "KEY",
"value": "1234"
},
{
"context": "subdomain : false, api_key : \"1234\", api_secret: \"b\" })\n @orig = _.clone(@cfg)\n\n find_by_attr = (",
"end": 641,
"score": 0.992364227771759,
"start": 640,
"tag": "KEY",
"value": "b"
},
{
"context": "dinary_url(\"test\", {}, \"http://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should allow ov",
"end": 1127,
"score": 0.5680785179138184,
"start": 1123,
"tag": "USERNAME",
"value": "test"
},
{
"context": "test\", {secure:true}, \"https://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should allow ove",
"end": 1476,
"score": 0.7542917728424072,
"start": 1471,
"tag": "USERNAME",
"value": "test1"
},
{
"context": " {secure:true}, \"https://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should allow overr",
"end": 1478,
"score": 0.5536409616470337,
"start": 1477,
"tag": "USERNAME",
"value": "3"
},
{
"context": " {private_cdn: false}, \"http://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should allow overr",
"end": 2874,
"score": 0.7992030382156372,
"start": 2867,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "test\", {cname: false}, \"http://res.cloudinary.com/test123/image/upload/test\", {})\n cloudinary.config(\"",
"end": 3254,
"score": 0.9184882640838623,
"start": 3249,
"tag": "USERNAME",
"value": "test1"
},
{
"context": " {cname: false}, \"http://res.cloudinary.com/test123/image/upload/test\", {})\n cloudinary.config(\"cn",
"end": 3256,
"score": 0.6213032603263855,
"start": 3255,
"tag": "USERNAME",
"value": "3"
},
{
"context": "test\", {format:'jpg'}, \"http://res.cloudinary.com/test123/image/upload/test.jpg\", {})\n\n it \"should disallo",
"end": 3443,
"score": 0.9268359541893005,
"start": 3436,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "100, crop:'crop'}, \"http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test\", {width:1",
"end": 7019,
"score": 0.7695940136909485,
"start": 7018,
"tag": "USERNAME",
"value": "1"
},
{
"context": "ransformation:\"blip\"}, \"http://res.cloudinary.com/test123/image/upload/t_blip/test\", {})\n\n it \"should supp",
"end": 7741,
"score": 0.6747581958770752,
"start": 7734,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "ion:[\"blip\", \"blop\"]}, \"http://res.cloudinary.com/test123/image/upload/t_blip.blop/test\", {})\n\n it \"shou",
"end": 7931,
"score": 0.7700327038764954,
"start": 7926,
"tag": "USERNAME",
"value": "test1"
},
{
"context": "blip\", \"blop\"]}, \"http://res.cloudinary.com/test123/image/upload/t_blip.blop/test\", {})\n\n it \"should",
"end": 7933,
"score": 0.6357238292694092,
"start": 7932,
"tag": "USERNAME",
"value": "3"
},
{
"context": "op:'crop', width:100}, \"http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop",
"end": 8440,
"score": 0.9887251257896423,
"start": 8433,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "0, crop:'fill'}, {}]}, \"http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test\", {})\n\n it ",
"end": 8910,
"score": 0.9890081882476807,
"start": 8903,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "\"10x10\", crop:'crop'}, \"http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test\", {width:\"10\",",
"end": 9082,
"score": 0.9760809540748596,
"start": 9075,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "t\", {type:'facebook'}, \"http://res.cloudinary.com/test123/image/facebook/test\", {})\n\n it \"should use resou",
"end": 9274,
"score": 0.982724666595459,
"start": 9267,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "{resource_type:'raw'}, \"http://res.cloudinary.com/test123/raw/upload/test\", {})\n\n it \"should ignore http l",
"end": 9441,
"score": 0.9792543649673462,
"start": 9434,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "background:\"red\"}, \"http://res.cloudinary.com/test123/image/upload/b_red/test\", {})\n test_cloudinary",
"end": 10269,
"score": 0.6093909740447998,
"start": 10266,
"tag": "USERNAME",
"value": "123"
},
{
"context": "_image:\"default\"}, \"http://res.cloudinary.com/test123/image/upload/d_default/test\", {})\n\n it \"should",
"end": 10561,
"score": 0.5837993025779724,
"start": 10560,
"tag": "USERNAME",
"value": "1"
},
{
"context": "(\"test\", {angle:\"55\"}, \"http://res.cloudinary.com/test123/image/upload/a_55/test\", {})\n test_cloudinar",
"end": 10710,
"score": 0.6803785562515259,
"start": 10705,
"tag": "USERNAME",
"value": "test1"
},
{
"context": "e:[\"auto\", \"55\"]}, \"http://res.cloudinary.com/test123/image/upload/a_auto.55/test\", {})\n\n it \"should",
"end": 10830,
"score": 0.6571458578109741,
"start": 10829,
"tag": "USERNAME",
"value": "1"
},
{
"context": "st\", {effect:\"sepia\"}, \"http://res.cloudinary.com/test123/image/upload/e_sepia/test\", {})\n\n it \"should sup",
"end": 11228,
"score": 0.9777868390083313,
"start": 11221,
"tag": "USERNAME",
"value": "test123"
},
{
"context": ", {effect:{sepia:10}}, \"http://res.cloudinary.com/test123/image/upload/e_sepia:10/test\", {})\n\n it \"should ",
"end": 11399,
"score": 0.9831684231758118,
"start": 11392,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "effect:[\"sepia\", 10]}, \"http://res.cloudinary.com/test123/image/upload/e_sepia:10/test\", {}) \n\n for param,",
"end": 11577,
"score": 0.9744018316268921,
"start": 11570,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "y_url(\"test\", options, \"http://res.cloudinary.com/test123/image/upload/#{letter}_text:hello/test\", {})\n\n i",
"end": 11833,
"score": 0.8859630227088928,
"start": 11826,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "y_url(\"test\", options, \"http://res.cloudinary.com/test123/image/upload/h_100,#{letter}_text:hello,w_100/tes",
"end": 12090,
"score": 0.888701856136322,
"start": 12083,
"tag": "USERNAME",
"value": "test123"
},
{
"context": " {ssl_detected:true}, \"https://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should use secure ",
"end": 12346,
"score": 0.9561944603919983,
"start": 12339,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "d:true, secure:false}, \"http://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should use secure:",
"end": 12585,
"score": 0.77492356300354,
"start": 12578,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "{ssl_detected:false}, \"https://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should support ext",
"end": 12810,
"score": 0.8516894578933716,
"start": 12803,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "cdn_subdomain:false}, \"https://res.cloudinary.com/test123/image/upload/test\", {})\n\n it \"should support sec",
"end": 13587,
"score": 0.746122419834137,
"start": 13580,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "ffect:{sepia:10}}, \"http://res.cloudinary.com/test123/image/upload/e_sepia:10/test\", {})\n\n it \"shoul",
"end": 13987,
"score": 0.6281587481498718,
"start": 13986,
"tag": "USERNAME",
"value": "1"
},
{
"context": "lor:\"#ffaabbdd\"}}, \"http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test\", {",
"end": 14293,
"score": 0.5687890648841858,
"start": 14292,
"tag": "USERNAME",
"value": "1"
},
{
"context": "der:\"1px_solid_blue\"}, \"http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test\", {})\n tes",
"end": 14439,
"score": 0.8913834691047668,
"start": 14432,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "(\"test\", {border:\"2\"}, \"http://res.cloudinary.com/test123/image/upload/test\", {border:\"2\"})\n\n it \"should s",
"end": 14562,
"score": 0.7379258871078491,
"start": 14555,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "\"test\", {flags:\"abc\"}, \"http://res.cloudinary.com/test123/image/upload/fl_abc/test\", {})\n test_cloudinar",
"end": 14712,
"score": 0.8946600556373596,
"start": 14705,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "flags:[\"abc\", \"def\"]}, \"http://res.cloudinary.com/test123/image/upload/fl_abc.def/test\", {})\n\n it \"build_u",
"end": 14834,
"score": 0.8615493774414062,
"start": 14827,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "url(\"folder/test\", {}, \"http://res.cloudinary.com/test123/image/upload/v1/folder/test\", {})\n test_cloudi",
"end": 15851,
"score": 0.6679382920265198,
"start": 15844,
"tag": "USERNAME",
"value": "test123"
},
{
"context": "t\", {version:123}, \"http://res.cloudinary.com/test123/image/upload/v123/folder/test\", {})\n\n it \"shou",
"end": 15972,
"score": 0.6911196112632751,
"start": 15971,
"tag": "USERNAME",
"value": "1"
},
{
"context": ", {version:123}, \"http://res.cloudinary.com/test123/image/upload/v123/folder/test\", {})\n\n it \"should",
"end": 15974,
"score": 0.5457658171653748,
"start": 15973,
"tag": "USERNAME",
"value": "3"
},
{
"context": "(\"v1234/test\", {}, \"http://res.cloudinary.com/test123/image/upload/v1234/test\", {})\n\n it \"should all",
"end": 16159,
"score": 0.6641876697540283,
"start": 16158,
"tag": "USERNAME",
"value": "1"
},
{
"context": "\", {shorten:true}, \"http://res.cloudinary.com/test123/iu/test\", {})\n\n it \"should escape public_ids\" , ",
"end": 16329,
"score": 0.7021148800849915,
"start": 16326,
"tag": "USERNAME",
"value": "123"
},
{
"context": "rl(source)).to.eql(\"http://res.cloudinary.com/test123/image/upload/#{target}\")\n\n it \"should correctl",
"end": 16618,
"score": 0.6213851571083069,
"start": 16617,
"tag": "USERNAME",
"value": "1"
},
{
"context": " 20}, sign_url: true}, \"http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v12",
"end": 16842,
"score": 0.7470229864120483,
"start": 16837,
"tag": "USERNAME",
"value": "test1"
},
{
"context": ", sign_url: true}, \"http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg\", {}",
"end": 17013,
"score": 0.711005687713623,
"start": 17012,
"tag": "USERNAME",
"value": "1"
},
{
"context": "ted', sign_url: true}, \"http://res.cloudinary.com/test123/image/authenticated/s--Ai4Znfl3--/c_crop,h_20,w",
"end": 17435,
"score": 0.669866681098938,
"start": 17430,
"tag": "USERNAME",
"value": "test1"
},
{
"context": ", sign_url: true}, \"http://res.cloudinary.com/test123/image/fetch/s--hH_YcbiS--/v1234/http://google.c",
"end": 17648,
"score": 0.5145381093025208,
"start": 17647,
"tag": "USERNAME",
"value": "1"
},
{
"context": "3349cbb373e4812118d625047ede50b90e7b67\", api_key:\"1234\")\n\n it \"should support responsive width\" , ->\n ",
"end": 17977,
"score": 0.9924148321151733,
"start": 17973,
"tag": "KEY",
"value": "1234"
},
{
"context": "l(\"test\", {zoom: 1.2}, \"http://res.cloudinary.com/test123/image/upload/z_1.2/test\", {})\n\n describe \"encode",
"end": 18661,
"score": 0.9985681176185608,
"start": 18654,
"tag": "USERNAME",
"value": "test123"
},
{
"context": " cloudinary.config({api_key:'key',api_secret:'shhh'})\n sig = cloudinary.utils.webhook_signature(d",
"end": 19209,
"score": 0.7336663603782654,
"start": 19207,
"tag": "KEY",
"value": "hh"
}
] | keystone/node_modules/cloudinary/test/utilsspec.coffee | kinfen/sitecore-product | 0 | dotenv = require('dotenv')
dotenv.load()
expect = require("expect.js")
cloudinary = require("../cloudinary")
utils = require("../lib/utils")
api = require("../lib/api")
_ = require("lodash")
Q = require('q')
fs = require('fs')
describe "utils", ->
return console.warn("**** Please setup environment for api test to run!") if !cloudinary.config().api_secret?
afterEach () ->
cloudinary.config(_.defaults({secure:null},@orig))
beforeEach () ->
@cfg= cloudinary.config( {cloud_name:"test123", secure_distribution : null, private_cdn : false, secure : false, cname : null ,cdn_subdomain : false, api_key : "1234", api_secret: "b" })
@orig = _.clone(@cfg)
find_by_attr = (elements, attr, value) ->
for element in elements
return element if element[attr] == value
undefined
test_cloudinary_url = (public_id,options,expected_url,expected_options) ->
url = utils.url(public_id,options)
expect(url).to.eql(expected_url)
expect(options).to.eql(expected_options)
url
it "should use cloud_name from config" , ->
test_cloudinary_url("test", {}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cloud_name in options" , ->
test_cloudinary_url("test", {cloud_name:"test321"}, "http://res.cloudinary.com/test321/image/upload/test", {})
it "should use default secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true, secure_distribution:"something.else.com"}, "https://something.else.com/test123/image/upload/test", {})
it "should take secure distribution from config if secure=true" , ->
cloudinary.config("secure_distribution","config.secure.distribution.com")
test_cloudinary_url("test", {secure:true}, "https://config.secure.distribution.com/test123/image/upload/test", {})
it "should default to akamai if secure is given with private_cdn and no secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true}, "https://test123-res.cloudinary.com/image/upload/test", {})
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true, secure_distribution:"something.cloudfront.net"}, "https://something.cloudfront.net/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=true" , ->
test_cloudinary_url("test", {private_cdn: true}, "http://test123-res.cloudinary.com/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=false" , ->
cloudinary.config("private_cdn",true)
test_cloudinary_url("test", {private_cdn: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=example.com" , ->
test_cloudinary_url("test", {cname: "example.com"}, "http://example.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=false" , ->
cloudinary.config("cname","example.com")
test_cloudinary_url("test", {cname: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
cloudinary.config("cname",null)
it "should use format from options" , ->
test_cloudinary_url("test", {format:'jpg'}, "http://res.cloudinary.com/test123/image/upload/test.jpg", {})
it "should disallow url_suffix in shared distribution" , ->
expect(()-> utils.url("test", {url_suffix:"hello"})).to.be.throwError(/URL Suffix only supported in private CDN/)
it "should disallow url_suffix in non upload types" , ->
expect(()-> utils.url("test", {url_suffix:"hello", private_cdn:true, type:'facebook'})).to.be.throwError(/URL Suffix only supported for image\/upload and raw\/upload/)
it "should disallow url_suffix with / or ." , ->
expect(()-> utils.url("test", {url_suffix:"hello/world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
expect(()-> utils.url("test", {url_suffix:"hello.world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
it "should support url_suffix for private_cdn" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/images/test/hello", {})
test_cloudinary_url("test", {url_suffix:"hello", angle:0, private_cdn:true}, "http://test123-res.cloudinary.com/images/a_0/test/hello", {})
it "should put format after url_suffix" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg"}, "http://test123-res.cloudinary.com/images/test/hello.jpg", {})
it "should not sign the url_suffix" , ->
expected_signature = utils.url("test", format:"jpg", sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/test/hello.jpg", {})
expected_signature = utils.url("test", format:"jpg", angle:0, sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", angle:0, sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/a_0/test/hello.jpg", {})
it "should support url_suffix for raw uploads" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, resource_type:'raw'}, "http://test123-res.cloudinary.com/files/test/hello", {})
it "should support use_root_path in shared distribution" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:false}, "http://res.cloudinary.com/test123/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:false, angle:0}, "http://res.cloudinary.com/test123/a_0/test", {})
it "should support use_root_path for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:true}, "http://test123-res.cloudinary.com/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:true, angle:0}, "http://test123-res.cloudinary.com/a_0/test", {})
it "should support use_root_path together with url_suffix for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/test/hello", {})
it "should disllow use_root_path if not image/upload" , ->
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, type:'facebook'})).to.be.throwError(/Root path only supported for image\/upload/)
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, resource_type:'raw'})).to.be.throwError(/Root path only supported for image\/upload/)
it "should use width and height from options only if crop is given" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test", {width:100, height:100})
it "should not pass width and height to html in case angle was used" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'scale', angle:'auto'}, "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test", {})
it "should use x, y, radius, prefix, gravity and quality from options" , ->
test_cloudinary_url("test", {x:1, y:2, radius:3, gravity:'center', quality:0.4, prefix:"a"}, "http://res.cloudinary.com/test123/image/upload/g_center,p_a,q_0.4,r_3,x_1,y_2/test", {})
it "should support named transformation" , ->
test_cloudinary_url("test", {transformation:"blip"}, "http://res.cloudinary.com/test123/image/upload/t_blip/test", {})
it "should support array of named transformations" , ->
test_cloudinary_url("test", {transformation:["blip", "blop"]}, "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test", {})
it "should support base transformation" , ->
test_cloudinary_url("test", {transformation:{x:100, y:100, crop:'fill'}, crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test", {width:100})
it "should support array of base transformations" , ->
test_cloudinary_url("test", {transformation:[{x:100, y:100, width:200, crop:'fill'}, {radius:10}], crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test", {width:100})
it "should support array of transformations" , ->
result = utils.generate_transformation_string([{x:100, y:100, width:200, crop:'fill'}, {radius:10}])
expect(result).to.eql("c_fill,w_200,x_100,y_100/r_10")
it "should not include empty transformations" , ->
test_cloudinary_url("test", {transformation:[{}, {x:100, y:100, crop:'fill'}, {}]}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test", {})
it "should support size" , ->
test_cloudinary_url("test", {size:"10x10", crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test", {width:"10", height:"10"})
it "should use type from options" , ->
test_cloudinary_url("test", {type:'facebook'}, "http://res.cloudinary.com/test123/image/facebook/test", {})
it "should use resource_type from options" , ->
test_cloudinary_url("test", {resource_type:'raw'}, "http://res.cloudinary.com/test123/raw/upload/test", {})
it "should ignore http links only if type is not given" , ->
test_cloudinary_url("http://test", {type:null}, "http://test", {})
test_cloudinary_url("http://test", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://test" , {})
it "should escape fetch urls" , ->
test_cloudinary_url("http://blah.com/hello?a=b", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db", {})
it "should should escape http urls" , ->
test_cloudinary_url("http://www.youtube.com/watch?v=d9NF2edxy-M", {type:"youtube"}, "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M", {})
it "should support background" , ->
test_cloudinary_url("test", {background:"red"}, "http://res.cloudinary.com/test123/image/upload/b_red/test", {})
test_cloudinary_url("test", {background:"#112233"}, "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test", {})
it "should support default_image" , ->
test_cloudinary_url("test", {default_image:"default"}, "http://res.cloudinary.com/test123/image/upload/d_default/test", {})
it "should support angle" , ->
test_cloudinary_url("test", {angle:"55"}, "http://res.cloudinary.com/test123/image/upload/a_55/test", {})
test_cloudinary_url("test", {angle:["auto", "55"]}, "http://res.cloudinary.com/test123/image/upload/a_auto.55/test", {})
it "should support format for fetch urls" , ->
test_cloudinary_url("http://cloudinary.com/images/logo.png", {format:"jpg", type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png", {})
it "should support effect" , ->
test_cloudinary_url("test", {effect:"sepia"}, "http://res.cloudinary.com/test123/image/upload/e_sepia/test", {})
it "should support effect with hash param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support effect with array param" , ->
test_cloudinary_url("test", {effect:["sepia", 10]}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
for param,letter of {overlay:"l", underlay:"u"}
it "should support #{param}" , ->
options={}
options[param] = "text:hello"
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/#{letter}_text:hello/test", {})
it "should not pass width/height to html for #{param}" , ->
options = {height:100 , width:100}
options[param] = 'text:hello'
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/h_100,#{letter}_text:hello,w_100/test", {})
it "should use ssl_detected if secure is not given as parameter and not set to true in configuration" , ->
test_cloudinary_url("test", {ssl_detected:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure if given over ssl_detected and configuration" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:true, secure:false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure: true from configuration over ssl_detected" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support external cname" , ->
test_cloudinary_url("test", {cname:"hello.com"}, "http://hello.com/test123/image/upload/test", {})
it "should support external cname with cdn_subdomain on" , ->
test_cloudinary_url("test", {cname:"hello.com", cdn_subdomain:true}, "http://a2.hello.com/test123/image/upload/test", {})
it "should support cdn_subdomain with secure on if using shared_domain" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true}, "https://res-2.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain false override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain true override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:true, private_cdn:true}, "https://test123-res-2.cloudinary.com/image/upload/test", {})
it "should support string param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support border" , ->
test_cloudinary_url("test", {border:{width:5}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test", {})
test_cloudinary_url("test", {border:{width:5, color:"#ffaabbdd"}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test", {})
test_cloudinary_url("test", {border:"1px_solid_blue"}, "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test", {})
test_cloudinary_url("test", {border:"2"}, "http://res.cloudinary.com/test123/image/upload/test", {border:"2"})
it "should support flags" , ->
test_cloudinary_url("test", {flags:"abc"}, "http://res.cloudinary.com/test123/image/upload/fl_abc/test", {})
test_cloudinary_url("test", {flags:["abc", "def"]}, "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test", {})
it "build_upload_params should not destroy options" , ->
options = {width:100, crop:"scale"}
expect(utils.build_upload_params(options)['transformation']).to.eql("c_scale,w_100")
expect(Object.keys(options).length).to.eql(2)
it "build_upload_params canonize booleans" , ->
options = {backup:true, use_filename:false, colors:"true", exif:"false", colors:"true", image_metadata:"false", invalidate:1, eager_async:"1"}
params = utils.build_upload_params(options)
expected = api.only(params, Object.keys(options)...)
actual = { backup:1, use_filename:0, colors:1, exif:0, colors:1, image_metadata:0, invalidate:1, eager_async:1}
expect( expected ).to.eql( actual )
expect(utils.build_upload_params(backup:null)['backup']).to.eql(undefined)
expect(utils.build_upload_params({})['backup']).to.eql(undefined)
it "should add version if public_id contains /" , ->
test_cloudinary_url("folder/test", {}, "http://res.cloudinary.com/test123/image/upload/v1/folder/test", {})
test_cloudinary_url("folder/test", {version:123}, "http://res.cloudinary.com/test123/image/upload/v123/folder/test", {})
it "should not add version if public_id contains version already" , ->
test_cloudinary_url("v1234/test", {}, "http://res.cloudinary.com/test123/image/upload/v1234/test", {})
it "should allow to shorted image/upload urls" , ->
test_cloudinary_url("test", {shorten:true}, "http://res.cloudinary.com/test123/iu/test", {})
it "should escape public_ids" , ->
for source, target of { "a b": "a%20b", "a+b": "a%2Bb", "a%20b": "a%20b", "a-b": "a-b", "a??b": "a%3F%3Fb", "parentheses(interject)": "parentheses(interject)" }
expect(utils.url(source)).to.eql("http://res.cloudinary.com/test123/image/upload/#{target}")
it "should correctly sign URLs", ->
test_cloudinary_url("image.jpg", {version: 1234, transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, type: 'authenticated', sign_url: true}, "http://res.cloudinary.com/test123/image/authenticated/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("http://google.com/path/to/image.png", {type: "fetch", version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/fetch/s--hH_YcbiS--/v1234/http://google.com/path/to/image.png", {})
it "should correctly sign_request" , ->
params = utils.sign_request({public_id:"folder/file", version:"1234"})
expect(params).to.eql(public_id:"folder/file", version:"1234", signature:"7a3349cbb373e4812118d625047ede50b90e7b67", api_key:"1234")
it "should support responsive width" , ->
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test", {responsive: true})
cloudinary.config("responsive_width_transformation",{width: 'auto', crop: 'pad'})
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test", {responsive: true})
describe "zoom", ->
it "should support a decimal value", ->
test_cloudinary_url("test", {zoom: 1.2}, "http://res.cloudinary.com/test123/image/upload/z_1.2/test", {})
describe "encode_double_array", ->
it "should correctly encode double arrays" , ->
expect(utils.encode_double_array([1,2,3,4])).to.eql("1,2,3,4")
expect(utils.encode_double_array([[1,2,3,4],[5,6,7,8]])).to.eql("1,2,3,4|5,6,7,8")
it "should call validate_webhook_signature", ->
@timeout 1000
data = '{"public_id":"117e5550-7bfa-11e4-80d7-f962166bd3be","version":1417727468}'
timestamp = 1417727468
orig = cloudinary.config()
cloudinary.config({api_key:'key',api_secret:'shhh'})
sig = cloudinary.utils.webhook_signature(data, timestamp)
expect(sig).to.eql('bac927006d3ce039ef7632e2c03189348d02924a')
cloudinary.config(orig)
| 131750 | dotenv = require('dotenv')
dotenv.load()
expect = require("expect.js")
cloudinary = require("../cloudinary")
utils = require("../lib/utils")
api = require("../lib/api")
_ = require("lodash")
Q = require('q')
fs = require('fs')
describe "utils", ->
return console.warn("**** Please setup environment for api test to run!") if !cloudinary.config().api_secret?
afterEach () ->
cloudinary.config(_.defaults({secure:null},@orig))
beforeEach () ->
@cfg= cloudinary.config( {cloud_name:"test123", secure_distribution : null, private_cdn : false, secure : false, cname : null ,cdn_subdomain : false, api_key : "<KEY>", api_secret: "<KEY>" })
@orig = _.clone(@cfg)
find_by_attr = (elements, attr, value) ->
for element in elements
return element if element[attr] == value
undefined
test_cloudinary_url = (public_id,options,expected_url,expected_options) ->
url = utils.url(public_id,options)
expect(url).to.eql(expected_url)
expect(options).to.eql(expected_options)
url
it "should use cloud_name from config" , ->
test_cloudinary_url("test", {}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cloud_name in options" , ->
test_cloudinary_url("test", {cloud_name:"test321"}, "http://res.cloudinary.com/test321/image/upload/test", {})
it "should use default secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true, secure_distribution:"something.else.com"}, "https://something.else.com/test123/image/upload/test", {})
it "should take secure distribution from config if secure=true" , ->
cloudinary.config("secure_distribution","config.secure.distribution.com")
test_cloudinary_url("test", {secure:true}, "https://config.secure.distribution.com/test123/image/upload/test", {})
it "should default to akamai if secure is given with private_cdn and no secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true}, "https://test123-res.cloudinary.com/image/upload/test", {})
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true, secure_distribution:"something.cloudfront.net"}, "https://something.cloudfront.net/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=true" , ->
test_cloudinary_url("test", {private_cdn: true}, "http://test123-res.cloudinary.com/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=false" , ->
cloudinary.config("private_cdn",true)
test_cloudinary_url("test", {private_cdn: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=example.com" , ->
test_cloudinary_url("test", {cname: "example.com"}, "http://example.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=false" , ->
cloudinary.config("cname","example.com")
test_cloudinary_url("test", {cname: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
cloudinary.config("cname",null)
it "should use format from options" , ->
test_cloudinary_url("test", {format:'jpg'}, "http://res.cloudinary.com/test123/image/upload/test.jpg", {})
it "should disallow url_suffix in shared distribution" , ->
expect(()-> utils.url("test", {url_suffix:"hello"})).to.be.throwError(/URL Suffix only supported in private CDN/)
it "should disallow url_suffix in non upload types" , ->
expect(()-> utils.url("test", {url_suffix:"hello", private_cdn:true, type:'facebook'})).to.be.throwError(/URL Suffix only supported for image\/upload and raw\/upload/)
it "should disallow url_suffix with / or ." , ->
expect(()-> utils.url("test", {url_suffix:"hello/world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
expect(()-> utils.url("test", {url_suffix:"hello.world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
it "should support url_suffix for private_cdn" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/images/test/hello", {})
test_cloudinary_url("test", {url_suffix:"hello", angle:0, private_cdn:true}, "http://test123-res.cloudinary.com/images/a_0/test/hello", {})
it "should put format after url_suffix" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg"}, "http://test123-res.cloudinary.com/images/test/hello.jpg", {})
it "should not sign the url_suffix" , ->
expected_signature = utils.url("test", format:"jpg", sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/test/hello.jpg", {})
expected_signature = utils.url("test", format:"jpg", angle:0, sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", angle:0, sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/a_0/test/hello.jpg", {})
it "should support url_suffix for raw uploads" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, resource_type:'raw'}, "http://test123-res.cloudinary.com/files/test/hello", {})
it "should support use_root_path in shared distribution" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:false}, "http://res.cloudinary.com/test123/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:false, angle:0}, "http://res.cloudinary.com/test123/a_0/test", {})
it "should support use_root_path for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:true}, "http://test123-res.cloudinary.com/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:true, angle:0}, "http://test123-res.cloudinary.com/a_0/test", {})
it "should support use_root_path together with url_suffix for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/test/hello", {})
it "should disllow use_root_path if not image/upload" , ->
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, type:'facebook'})).to.be.throwError(/Root path only supported for image\/upload/)
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, resource_type:'raw'})).to.be.throwError(/Root path only supported for image\/upload/)
it "should use width and height from options only if crop is given" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test", {width:100, height:100})
it "should not pass width and height to html in case angle was used" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'scale', angle:'auto'}, "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test", {})
it "should use x, y, radius, prefix, gravity and quality from options" , ->
test_cloudinary_url("test", {x:1, y:2, radius:3, gravity:'center', quality:0.4, prefix:"a"}, "http://res.cloudinary.com/test123/image/upload/g_center,p_a,q_0.4,r_3,x_1,y_2/test", {})
it "should support named transformation" , ->
test_cloudinary_url("test", {transformation:"blip"}, "http://res.cloudinary.com/test123/image/upload/t_blip/test", {})
it "should support array of named transformations" , ->
test_cloudinary_url("test", {transformation:["blip", "blop"]}, "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test", {})
it "should support base transformation" , ->
test_cloudinary_url("test", {transformation:{x:100, y:100, crop:'fill'}, crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test", {width:100})
it "should support array of base transformations" , ->
test_cloudinary_url("test", {transformation:[{x:100, y:100, width:200, crop:'fill'}, {radius:10}], crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test", {width:100})
it "should support array of transformations" , ->
result = utils.generate_transformation_string([{x:100, y:100, width:200, crop:'fill'}, {radius:10}])
expect(result).to.eql("c_fill,w_200,x_100,y_100/r_10")
it "should not include empty transformations" , ->
test_cloudinary_url("test", {transformation:[{}, {x:100, y:100, crop:'fill'}, {}]}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test", {})
it "should support size" , ->
test_cloudinary_url("test", {size:"10x10", crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test", {width:"10", height:"10"})
it "should use type from options" , ->
test_cloudinary_url("test", {type:'facebook'}, "http://res.cloudinary.com/test123/image/facebook/test", {})
it "should use resource_type from options" , ->
test_cloudinary_url("test", {resource_type:'raw'}, "http://res.cloudinary.com/test123/raw/upload/test", {})
it "should ignore http links only if type is not given" , ->
test_cloudinary_url("http://test", {type:null}, "http://test", {})
test_cloudinary_url("http://test", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://test" , {})
it "should escape fetch urls" , ->
test_cloudinary_url("http://blah.com/hello?a=b", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db", {})
it "should should escape http urls" , ->
test_cloudinary_url("http://www.youtube.com/watch?v=d9NF2edxy-M", {type:"youtube"}, "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M", {})
it "should support background" , ->
test_cloudinary_url("test", {background:"red"}, "http://res.cloudinary.com/test123/image/upload/b_red/test", {})
test_cloudinary_url("test", {background:"#112233"}, "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test", {})
it "should support default_image" , ->
test_cloudinary_url("test", {default_image:"default"}, "http://res.cloudinary.com/test123/image/upload/d_default/test", {})
it "should support angle" , ->
test_cloudinary_url("test", {angle:"55"}, "http://res.cloudinary.com/test123/image/upload/a_55/test", {})
test_cloudinary_url("test", {angle:["auto", "55"]}, "http://res.cloudinary.com/test123/image/upload/a_auto.55/test", {})
it "should support format for fetch urls" , ->
test_cloudinary_url("http://cloudinary.com/images/logo.png", {format:"jpg", type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png", {})
it "should support effect" , ->
test_cloudinary_url("test", {effect:"sepia"}, "http://res.cloudinary.com/test123/image/upload/e_sepia/test", {})
it "should support effect with hash param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support effect with array param" , ->
test_cloudinary_url("test", {effect:["sepia", 10]}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
for param,letter of {overlay:"l", underlay:"u"}
it "should support #{param}" , ->
options={}
options[param] = "text:hello"
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/#{letter}_text:hello/test", {})
it "should not pass width/height to html for #{param}" , ->
options = {height:100 , width:100}
options[param] = 'text:hello'
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/h_100,#{letter}_text:hello,w_100/test", {})
it "should use ssl_detected if secure is not given as parameter and not set to true in configuration" , ->
test_cloudinary_url("test", {ssl_detected:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure if given over ssl_detected and configuration" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:true, secure:false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure: true from configuration over ssl_detected" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support external cname" , ->
test_cloudinary_url("test", {cname:"hello.com"}, "http://hello.com/test123/image/upload/test", {})
it "should support external cname with cdn_subdomain on" , ->
test_cloudinary_url("test", {cname:"hello.com", cdn_subdomain:true}, "http://a2.hello.com/test123/image/upload/test", {})
it "should support cdn_subdomain with secure on if using shared_domain" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true}, "https://res-2.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain false override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain true override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:true, private_cdn:true}, "https://test123-res-2.cloudinary.com/image/upload/test", {})
it "should support string param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support border" , ->
test_cloudinary_url("test", {border:{width:5}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test", {})
test_cloudinary_url("test", {border:{width:5, color:"#ffaabbdd"}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test", {})
test_cloudinary_url("test", {border:"1px_solid_blue"}, "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test", {})
test_cloudinary_url("test", {border:"2"}, "http://res.cloudinary.com/test123/image/upload/test", {border:"2"})
it "should support flags" , ->
test_cloudinary_url("test", {flags:"abc"}, "http://res.cloudinary.com/test123/image/upload/fl_abc/test", {})
test_cloudinary_url("test", {flags:["abc", "def"]}, "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test", {})
it "build_upload_params should not destroy options" , ->
options = {width:100, crop:"scale"}
expect(utils.build_upload_params(options)['transformation']).to.eql("c_scale,w_100")
expect(Object.keys(options).length).to.eql(2)
it "build_upload_params canonize booleans" , ->
options = {backup:true, use_filename:false, colors:"true", exif:"false", colors:"true", image_metadata:"false", invalidate:1, eager_async:"1"}
params = utils.build_upload_params(options)
expected = api.only(params, Object.keys(options)...)
actual = { backup:1, use_filename:0, colors:1, exif:0, colors:1, image_metadata:0, invalidate:1, eager_async:1}
expect( expected ).to.eql( actual )
expect(utils.build_upload_params(backup:null)['backup']).to.eql(undefined)
expect(utils.build_upload_params({})['backup']).to.eql(undefined)
it "should add version if public_id contains /" , ->
test_cloudinary_url("folder/test", {}, "http://res.cloudinary.com/test123/image/upload/v1/folder/test", {})
test_cloudinary_url("folder/test", {version:123}, "http://res.cloudinary.com/test123/image/upload/v123/folder/test", {})
it "should not add version if public_id contains version already" , ->
test_cloudinary_url("v1234/test", {}, "http://res.cloudinary.com/test123/image/upload/v1234/test", {})
it "should allow to shorted image/upload urls" , ->
test_cloudinary_url("test", {shorten:true}, "http://res.cloudinary.com/test123/iu/test", {})
it "should escape public_ids" , ->
for source, target of { "a b": "a%20b", "a+b": "a%2Bb", "a%20b": "a%20b", "a-b": "a-b", "a??b": "a%3F%3Fb", "parentheses(interject)": "parentheses(interject)" }
expect(utils.url(source)).to.eql("http://res.cloudinary.com/test123/image/upload/#{target}")
it "should correctly sign URLs", ->
test_cloudinary_url("image.jpg", {version: 1234, transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, type: 'authenticated', sign_url: true}, "http://res.cloudinary.com/test123/image/authenticated/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("http://google.com/path/to/image.png", {type: "fetch", version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/fetch/s--hH_YcbiS--/v1234/http://google.com/path/to/image.png", {})
it "should correctly sign_request" , ->
params = utils.sign_request({public_id:"folder/file", version:"1234"})
expect(params).to.eql(public_id:"folder/file", version:"1234", signature:"7a3349cbb373e4812118d625047ede50b90e7b67", api_key:"<KEY>")
it "should support responsive width" , ->
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test", {responsive: true})
cloudinary.config("responsive_width_transformation",{width: 'auto', crop: 'pad'})
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test", {responsive: true})
describe "zoom", ->
it "should support a decimal value", ->
test_cloudinary_url("test", {zoom: 1.2}, "http://res.cloudinary.com/test123/image/upload/z_1.2/test", {})
describe "encode_double_array", ->
it "should correctly encode double arrays" , ->
expect(utils.encode_double_array([1,2,3,4])).to.eql("1,2,3,4")
expect(utils.encode_double_array([[1,2,3,4],[5,6,7,8]])).to.eql("1,2,3,4|5,6,7,8")
it "should call validate_webhook_signature", ->
@timeout 1000
data = '{"public_id":"117e5550-7bfa-11e4-80d7-f962166bd3be","version":1417727468}'
timestamp = 1417727468
orig = cloudinary.config()
cloudinary.config({api_key:'key',api_secret:'sh<KEY>'})
sig = cloudinary.utils.webhook_signature(data, timestamp)
expect(sig).to.eql('bac927006d3ce039ef7632e2c03189348d02924a')
cloudinary.config(orig)
| true | dotenv = require('dotenv')
dotenv.load()
expect = require("expect.js")
cloudinary = require("../cloudinary")
utils = require("../lib/utils")
api = require("../lib/api")
_ = require("lodash")
Q = require('q')
fs = require('fs')
describe "utils", ->
return console.warn("**** Please setup environment for api test to run!") if !cloudinary.config().api_secret?
afterEach () ->
cloudinary.config(_.defaults({secure:null},@orig))
beforeEach () ->
@cfg= cloudinary.config( {cloud_name:"test123", secure_distribution : null, private_cdn : false, secure : false, cname : null ,cdn_subdomain : false, api_key : "PI:KEY:<KEY>END_PI", api_secret: "PI:KEY:<KEY>END_PI" })
@orig = _.clone(@cfg)
find_by_attr = (elements, attr, value) ->
for element in elements
return element if element[attr] == value
undefined
test_cloudinary_url = (public_id,options,expected_url,expected_options) ->
url = utils.url(public_id,options)
expect(url).to.eql(expected_url)
expect(options).to.eql(expected_options)
url
it "should use cloud_name from config" , ->
test_cloudinary_url("test", {}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cloud_name in options" , ->
test_cloudinary_url("test", {cloud_name:"test321"}, "http://res.cloudinary.com/test321/image/upload/test", {})
it "should use default secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding secure distribution if secure=true" , ->
test_cloudinary_url("test", {secure:true, secure_distribution:"something.else.com"}, "https://something.else.com/test123/image/upload/test", {})
it "should take secure distribution from config if secure=true" , ->
cloudinary.config("secure_distribution","config.secure.distribution.com")
test_cloudinary_url("test", {secure:true}, "https://config.secure.distribution.com/test123/image/upload/test", {})
it "should default to akamai if secure is given with private_cdn and no secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true}, "https://test123-res.cloudinary.com/image/upload/test", {})
it "should not add cloud_name if secure private_cdn and secure non akamai secure_distribution" , ->
test_cloudinary_url("test", {secure:true, private_cdn:true, secure_distribution:"something.cloudfront.net"}, "https://something.cloudfront.net/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=true" , ->
test_cloudinary_url("test", {private_cdn: true}, "http://test123-res.cloudinary.com/image/upload/test", {})
it "should allow overriding private_cdn if private_cdn=false" , ->
cloudinary.config("private_cdn",true)
test_cloudinary_url("test", {private_cdn: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=example.com" , ->
test_cloudinary_url("test", {cname: "example.com"}, "http://example.com/test123/image/upload/test", {})
it "should allow overriding cname if cname=false" , ->
cloudinary.config("cname","example.com")
test_cloudinary_url("test", {cname: false}, "http://res.cloudinary.com/test123/image/upload/test", {})
cloudinary.config("cname",null)
it "should use format from options" , ->
test_cloudinary_url("test", {format:'jpg'}, "http://res.cloudinary.com/test123/image/upload/test.jpg", {})
it "should disallow url_suffix in shared distribution" , ->
expect(()-> utils.url("test", {url_suffix:"hello"})).to.be.throwError(/URL Suffix only supported in private CDN/)
it "should disallow url_suffix in non upload types" , ->
expect(()-> utils.url("test", {url_suffix:"hello", private_cdn:true, type:'facebook'})).to.be.throwError(/URL Suffix only supported for image\/upload and raw\/upload/)
it "should disallow url_suffix with / or ." , ->
expect(()-> utils.url("test", {url_suffix:"hello/world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
expect(()-> utils.url("test", {url_suffix:"hello.world", private_cdn:true})).to.be.throwError(/url_suffix should not include . or \//)
it "should support url_suffix for private_cdn" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/images/test/hello", {})
test_cloudinary_url("test", {url_suffix:"hello", angle:0, private_cdn:true}, "http://test123-res.cloudinary.com/images/a_0/test/hello", {})
it "should put format after url_suffix" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg"}, "http://test123-res.cloudinary.com/images/test/hello.jpg", {})
it "should not sign the url_suffix" , ->
expected_signature = utils.url("test", format:"jpg", sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/test/hello.jpg", {})
expected_signature = utils.url("test", format:"jpg", angle:0, sign_url:true).match(/s--[0-9A-Za-z_-]{8}--/).toString()
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, format:"jpg", angle:0, sign_url:true}, "http://test123-res.cloudinary.com/images/#{expected_signature}/a_0/test/hello.jpg", {})
it "should support url_suffix for raw uploads" , ->
test_cloudinary_url("test", {url_suffix:"hello", private_cdn:true, resource_type:'raw'}, "http://test123-res.cloudinary.com/files/test/hello", {})
it "should support use_root_path in shared distribution" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:false}, "http://res.cloudinary.com/test123/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:false, angle:0}, "http://res.cloudinary.com/test123/a_0/test", {})
it "should support use_root_path for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, private_cdn:true}, "http://test123-res.cloudinary.com/test", {})
test_cloudinary_url("test", {use_root_path:true, private_cdn:true, angle:0}, "http://test123-res.cloudinary.com/a_0/test", {})
it "should support use_root_path together with url_suffix for private_cdn" , ->
test_cloudinary_url("test", {use_root_path:true, url_suffix:"hello", private_cdn:true}, "http://test123-res.cloudinary.com/test/hello", {})
it "should disllow use_root_path if not image/upload" , ->
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, type:'facebook'})).to.be.throwError(/Root path only supported for image\/upload/)
expect(()-> utils.url("test", {use_root_path:true, private_cdn:true, resource_type:'raw'})).to.be.throwError(/Root path only supported for image\/upload/)
it "should use width and height from options only if crop is given" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/test", {width:100, height:100})
it "should not pass width and height to html in case angle was used" , ->
test_cloudinary_url("test", {width:100, height:100, crop:'scale', angle:'auto'}, "http://res.cloudinary.com/test123/image/upload/a_auto,c_scale,h_100,w_100/test", {})
it "should use x, y, radius, prefix, gravity and quality from options" , ->
test_cloudinary_url("test", {x:1, y:2, radius:3, gravity:'center', quality:0.4, prefix:"a"}, "http://res.cloudinary.com/test123/image/upload/g_center,p_a,q_0.4,r_3,x_1,y_2/test", {})
it "should support named transformation" , ->
test_cloudinary_url("test", {transformation:"blip"}, "http://res.cloudinary.com/test123/image/upload/t_blip/test", {})
it "should support array of named transformations" , ->
test_cloudinary_url("test", {transformation:["blip", "blop"]}, "http://res.cloudinary.com/test123/image/upload/t_blip.blop/test", {})
it "should support base transformation" , ->
test_cloudinary_url("test", {transformation:{x:100, y:100, crop:'fill'}, crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/c_crop,w_100/test", {width:100})
it "should support array of base transformations" , ->
test_cloudinary_url("test", {transformation:[{x:100, y:100, width:200, crop:'fill'}, {radius:10}], crop:'crop', width:100}, "http://res.cloudinary.com/test123/image/upload/c_fill,w_200,x_100,y_100/r_10/c_crop,w_100/test", {width:100})
it "should support array of transformations" , ->
result = utils.generate_transformation_string([{x:100, y:100, width:200, crop:'fill'}, {radius:10}])
expect(result).to.eql("c_fill,w_200,x_100,y_100/r_10")
it "should not include empty transformations" , ->
test_cloudinary_url("test", {transformation:[{}, {x:100, y:100, crop:'fill'}, {}]}, "http://res.cloudinary.com/test123/image/upload/c_fill,x_100,y_100/test", {})
it "should support size" , ->
test_cloudinary_url("test", {size:"10x10", crop:'crop'}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_10,w_10/test", {width:"10", height:"10"})
it "should use type from options" , ->
test_cloudinary_url("test", {type:'facebook'}, "http://res.cloudinary.com/test123/image/facebook/test", {})
it "should use resource_type from options" , ->
test_cloudinary_url("test", {resource_type:'raw'}, "http://res.cloudinary.com/test123/raw/upload/test", {})
it "should ignore http links only if type is not given" , ->
test_cloudinary_url("http://test", {type:null}, "http://test", {})
test_cloudinary_url("http://test", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://test" , {})
it "should escape fetch urls" , ->
test_cloudinary_url("http://blah.com/hello?a=b", {type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/http://blah.com/hello%3Fa%3Db", {})
it "should should escape http urls" , ->
test_cloudinary_url("http://www.youtube.com/watch?v=d9NF2edxy-M", {type:"youtube"}, "http://res.cloudinary.com/test123/image/youtube/http://www.youtube.com/watch%3Fv%3Dd9NF2edxy-M", {})
it "should support background" , ->
test_cloudinary_url("test", {background:"red"}, "http://res.cloudinary.com/test123/image/upload/b_red/test", {})
test_cloudinary_url("test", {background:"#112233"}, "http://res.cloudinary.com/test123/image/upload/b_rgb:112233/test", {})
it "should support default_image" , ->
test_cloudinary_url("test", {default_image:"default"}, "http://res.cloudinary.com/test123/image/upload/d_default/test", {})
it "should support angle" , ->
test_cloudinary_url("test", {angle:"55"}, "http://res.cloudinary.com/test123/image/upload/a_55/test", {})
test_cloudinary_url("test", {angle:["auto", "55"]}, "http://res.cloudinary.com/test123/image/upload/a_auto.55/test", {})
it "should support format for fetch urls" , ->
test_cloudinary_url("http://cloudinary.com/images/logo.png", {format:"jpg", type:"fetch"}, "http://res.cloudinary.com/test123/image/fetch/f_jpg/http://cloudinary.com/images/logo.png", {})
it "should support effect" , ->
test_cloudinary_url("test", {effect:"sepia"}, "http://res.cloudinary.com/test123/image/upload/e_sepia/test", {})
it "should support effect with hash param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support effect with array param" , ->
test_cloudinary_url("test", {effect:["sepia", 10]}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
for param,letter of {overlay:"l", underlay:"u"}
it "should support #{param}" , ->
options={}
options[param] = "text:hello"
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/#{letter}_text:hello/test", {})
it "should not pass width/height to html for #{param}" , ->
options = {height:100 , width:100}
options[param] = 'text:hello'
test_cloudinary_url("test", options, "http://res.cloudinary.com/test123/image/upload/h_100,#{letter}_text:hello,w_100/test", {})
it "should use ssl_detected if secure is not given as parameter and not set to true in configuration" , ->
test_cloudinary_url("test", {ssl_detected:true}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure if given over ssl_detected and configuration" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:true, secure:false}, "http://res.cloudinary.com/test123/image/upload/test", {})
it "should use secure: true from configuration over ssl_detected" , ->
cloudinary.config("secure",true)
test_cloudinary_url("test", {ssl_detected:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support external cname" , ->
test_cloudinary_url("test", {cname:"hello.com"}, "http://hello.com/test123/image/upload/test", {})
it "should support external cname with cdn_subdomain on" , ->
test_cloudinary_url("test", {cname:"hello.com", cdn_subdomain:true}, "http://a2.hello.com/test123/image/upload/test", {})
it "should support cdn_subdomain with secure on if using shared_domain" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true}, "https://res-2.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain false override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:false}, "https://res.cloudinary.com/test123/image/upload/test", {})
it "should support secure_cdn_subdomain true override with secure" , ->
test_cloudinary_url("test", {secure:true, cdn_subdomain:true, secure_cdn_subdomain:true, private_cdn:true}, "https://test123-res-2.cloudinary.com/image/upload/test", {})
it "should support string param" , ->
test_cloudinary_url("test", {effect:{sepia:10}}, "http://res.cloudinary.com/test123/image/upload/e_sepia:10/test", {})
it "should support border" , ->
test_cloudinary_url("test", {border:{width:5}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_black/test", {})
test_cloudinary_url("test", {border:{width:5, color:"#ffaabbdd"}}, "http://res.cloudinary.com/test123/image/upload/bo_5px_solid_rgb:ffaabbdd/test", {})
test_cloudinary_url("test", {border:"1px_solid_blue"}, "http://res.cloudinary.com/test123/image/upload/bo_1px_solid_blue/test", {})
test_cloudinary_url("test", {border:"2"}, "http://res.cloudinary.com/test123/image/upload/test", {border:"2"})
it "should support flags" , ->
test_cloudinary_url("test", {flags:"abc"}, "http://res.cloudinary.com/test123/image/upload/fl_abc/test", {})
test_cloudinary_url("test", {flags:["abc", "def"]}, "http://res.cloudinary.com/test123/image/upload/fl_abc.def/test", {})
it "build_upload_params should not destroy options" , ->
options = {width:100, crop:"scale"}
expect(utils.build_upload_params(options)['transformation']).to.eql("c_scale,w_100")
expect(Object.keys(options).length).to.eql(2)
it "build_upload_params canonize booleans" , ->
options = {backup:true, use_filename:false, colors:"true", exif:"false", colors:"true", image_metadata:"false", invalidate:1, eager_async:"1"}
params = utils.build_upload_params(options)
expected = api.only(params, Object.keys(options)...)
actual = { backup:1, use_filename:0, colors:1, exif:0, colors:1, image_metadata:0, invalidate:1, eager_async:1}
expect( expected ).to.eql( actual )
expect(utils.build_upload_params(backup:null)['backup']).to.eql(undefined)
expect(utils.build_upload_params({})['backup']).to.eql(undefined)
it "should add version if public_id contains /" , ->
test_cloudinary_url("folder/test", {}, "http://res.cloudinary.com/test123/image/upload/v1/folder/test", {})
test_cloudinary_url("folder/test", {version:123}, "http://res.cloudinary.com/test123/image/upload/v123/folder/test", {})
it "should not add version if public_id contains version already" , ->
test_cloudinary_url("v1234/test", {}, "http://res.cloudinary.com/test123/image/upload/v1234/test", {})
it "should allow to shorted image/upload urls" , ->
test_cloudinary_url("test", {shorten:true}, "http://res.cloudinary.com/test123/iu/test", {})
it "should escape public_ids" , ->
for source, target of { "a b": "a%20b", "a+b": "a%2Bb", "a%20b": "a%20b", "a-b": "a-b", "a??b": "a%3F%3Fb", "parentheses(interject)": "parentheses(interject)" }
expect(utils.url(source)).to.eql("http://res.cloudinary.com/test123/image/upload/#{target}")
it "should correctly sign URLs", ->
test_cloudinary_url("image.jpg", {version: 1234, transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s----SjmNDA--/v1234/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, sign_url: true}, "http://res.cloudinary.com/test123/image/upload/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("image.jpg", {transformation: {crop: "crop", width: 10, height: 20}, type: 'authenticated', sign_url: true}, "http://res.cloudinary.com/test123/image/authenticated/s--Ai4Znfl3--/c_crop,h_20,w_10/image.jpg", {})
test_cloudinary_url("http://google.com/path/to/image.png", {type: "fetch", version: 1234, sign_url: true}, "http://res.cloudinary.com/test123/image/fetch/s--hH_YcbiS--/v1234/http://google.com/path/to/image.png", {})
it "should correctly sign_request" , ->
params = utils.sign_request({public_id:"folder/file", version:"1234"})
expect(params).to.eql(public_id:"folder/file", version:"1234", signature:"7a3349cbb373e4812118d625047ede50b90e7b67", api_key:"PI:KEY:<KEY>END_PI")
it "should support responsive width" , ->
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_limit,w_auto/test", {responsive: true})
cloudinary.config("responsive_width_transformation",{width: 'auto', crop: 'pad'})
test_cloudinary_url("test", {width:100, height:100, crop:"crop", responsive_width:true}, "http://res.cloudinary.com/test123/image/upload/c_crop,h_100,w_100/c_pad,w_auto/test", {responsive: true})
describe "zoom", ->
it "should support a decimal value", ->
test_cloudinary_url("test", {zoom: 1.2}, "http://res.cloudinary.com/test123/image/upload/z_1.2/test", {})
describe "encode_double_array", ->
it "should correctly encode double arrays" , ->
expect(utils.encode_double_array([1,2,3,4])).to.eql("1,2,3,4")
expect(utils.encode_double_array([[1,2,3,4],[5,6,7,8]])).to.eql("1,2,3,4|5,6,7,8")
it "should call validate_webhook_signature", ->
@timeout 1000
data = '{"public_id":"117e5550-7bfa-11e4-80d7-f962166bd3be","version":1417727468}'
timestamp = 1417727468
orig = cloudinary.config()
cloudinary.config({api_key:'key',api_secret:'shPI:KEY:<KEY>END_PI'})
sig = cloudinary.utils.webhook_signature(data, timestamp)
expect(sig).to.eql('bac927006d3ce039ef7632e2c03189348d02924a')
cloudinary.config(orig)
|
[
{
"context": "d: req.method\n route_key: [req.method,req.url].join ' '\n query: req.query ? null\n body: req.bod",
"end": 414,
"score": 0.5608383417129517,
"start": 410,
"tag": "KEY",
"value": "join"
}
] | core/request_logging.iced | tosadvisor/link-shortener | 7 | # vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2
_ = require('wegweg')({
globals: off
})
cache = require 'memory-cache'
request_logging = {}
stats_template =
count: 0
latency: 0
stats =
routes: {}
total: _.clone stats_template
request_logging.middleware = (req,res,next) ->
request_obj = {
_id: _.uuid()
url: req.url
method: req.method
route_key: [req.method,req.url].join ' '
query: req.query ? null
body: req.body ? null
cdate: new Date
metadata: req.metadata
}
log.info 'Request', req.method, req.url
req.request_id = request_obj._id
req.request_start = new Date
req.breakpoint = (str) ->
if conf.developer.show_breakpoints
elapsed = new Date - req.request_start
log.info "Breakpoint \"#{str}\": #{elapsed}ms"
cache.put "request_log:#{request_obj._id}", request_obj, (30 * 1000)
next()
eve.on 'request_log_response', (request_id) ->
if hit = cache.get "request_log:#{request_id}"
cache.del "request_log:#{request_id}"
stats[hit.route_key] ?= _.clone stats_template
++ stats[hit.route_key].count
++ stats.total.count
stats[hit.route_key].latency += (new Date - hit.cdate)
stats.total.latency += (new Date - hit.cdate)
# pull stats
request_logging.stats = ->
tmp = _.clone stats
for k,v of tmp
v.average_ms = 0
if v.count and v.latency
v.average_ms = (v.latency/v.count).toFixed 2
v.route_key = k
tmp[k] = v
arr = _.reverse _.sortBy (_.vals(tmp)), (x) ->
x.average_ms * -1
arr
module.exports = request_logging
| 54847 | # vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2
_ = require('wegweg')({
globals: off
})
cache = require 'memory-cache'
request_logging = {}
stats_template =
count: 0
latency: 0
stats =
routes: {}
total: _.clone stats_template
request_logging.middleware = (req,res,next) ->
request_obj = {
_id: _.uuid()
url: req.url
method: req.method
route_key: [req.method,req.url].<KEY> ' '
query: req.query ? null
body: req.body ? null
cdate: new Date
metadata: req.metadata
}
log.info 'Request', req.method, req.url
req.request_id = request_obj._id
req.request_start = new Date
req.breakpoint = (str) ->
if conf.developer.show_breakpoints
elapsed = new Date - req.request_start
log.info "Breakpoint \"#{str}\": #{elapsed}ms"
cache.put "request_log:#{request_obj._id}", request_obj, (30 * 1000)
next()
eve.on 'request_log_response', (request_id) ->
if hit = cache.get "request_log:#{request_id}"
cache.del "request_log:#{request_id}"
stats[hit.route_key] ?= _.clone stats_template
++ stats[hit.route_key].count
++ stats.total.count
stats[hit.route_key].latency += (new Date - hit.cdate)
stats.total.latency += (new Date - hit.cdate)
# pull stats
request_logging.stats = ->
tmp = _.clone stats
for k,v of tmp
v.average_ms = 0
if v.count and v.latency
v.average_ms = (v.latency/v.count).toFixed 2
v.route_key = k
tmp[k] = v
arr = _.reverse _.sortBy (_.vals(tmp)), (x) ->
x.average_ms * -1
arr
module.exports = request_logging
| true | # vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2
_ = require('wegweg')({
globals: off
})
cache = require 'memory-cache'
request_logging = {}
stats_template =
count: 0
latency: 0
stats =
routes: {}
total: _.clone stats_template
request_logging.middleware = (req,res,next) ->
request_obj = {
_id: _.uuid()
url: req.url
method: req.method
route_key: [req.method,req.url].PI:KEY:<KEY>END_PI ' '
query: req.query ? null
body: req.body ? null
cdate: new Date
metadata: req.metadata
}
log.info 'Request', req.method, req.url
req.request_id = request_obj._id
req.request_start = new Date
req.breakpoint = (str) ->
if conf.developer.show_breakpoints
elapsed = new Date - req.request_start
log.info "Breakpoint \"#{str}\": #{elapsed}ms"
cache.put "request_log:#{request_obj._id}", request_obj, (30 * 1000)
next()
eve.on 'request_log_response', (request_id) ->
if hit = cache.get "request_log:#{request_id}"
cache.del "request_log:#{request_id}"
stats[hit.route_key] ?= _.clone stats_template
++ stats[hit.route_key].count
++ stats.total.count
stats[hit.route_key].latency += (new Date - hit.cdate)
stats.total.latency += (new Date - hit.cdate)
# pull stats
request_logging.stats = ->
tmp = _.clone stats
for k,v of tmp
v.average_ms = 0
if v.count and v.latency
v.average_ms = (v.latency/v.count).toFixed 2
v.route_key = k
tmp[k] = v
arr = _.reverse _.sortBy (_.vals(tmp)), (x) ->
x.average_ms * -1
arr
module.exports = request_logging
|
[
{
"context": "dtown\"}\n {id: \"56c33d0f68dee9060073adaa\", name: \"Harlem\"}\n {id: \"56c33c8768dee9060073ada8\", name: \"East ",
"end": 421,
"score": 0.8436933755874634,
"start": 415,
"tag": "NAME",
"value": "Harlem"
},
{
"context": "oklyn\"}\n {id: \"56c33ad85e12af0600509eaf\", name: \"Bronx\"}\n {id: \"56c33ab95e12af0600509ead\", name: \"Hudso",
"end": 651,
"score": 0.8544403910636902,
"start": 646,
"tag": "NAME",
"value": "Bronx"
}
] | src/mobile/apps/fair_info/maps/armory_arts_week_neighborhoods.coffee | kanaabe/force | 1 | module.exports = [
{id: "56c33f4268dee9060073adb2", name: "Upper West Side"}
{id: "56c33ef55e12af0600509ec1", name: "Upper East Side"}
{id: "56c33ee25e12af0600509ebf", name: "Tribeca & Financial District"}
{id: "56c33e5368dee9060073adac", name: "Queens"}
{id: "56c33c5968dee9060073ada6", name: "Soho & Nolita"}
{id: "56c33d715e12af0600509eb9", name: "Midtown"}
{id: "56c33d0f68dee9060073adaa", name: "Harlem"}
{id: "56c33c8768dee9060073ada8", name: "East Village & Lower East Side"}
{id: "56c33baf68dee9060073ada2", name: "Chelsea"}
{id: "56c33b1a68dee9060073ad9e", name: "Brooklyn"}
{id: "56c33ad85e12af0600509eaf", name: "Bronx"}
{id: "56c33ab95e12af0600509ead", name: "Hudson Valley"}
]
| 120683 | module.exports = [
{id: "56c33f4268dee9060073adb2", name: "Upper West Side"}
{id: "56c33ef55e12af0600509ec1", name: "Upper East Side"}
{id: "56c33ee25e12af0600509ebf", name: "Tribeca & Financial District"}
{id: "56c33e5368dee9060073adac", name: "Queens"}
{id: "56c33c5968dee9060073ada6", name: "Soho & Nolita"}
{id: "56c33d715e12af0600509eb9", name: "Midtown"}
{id: "56c33d0f68dee9060073adaa", name: "<NAME>"}
{id: "56c33c8768dee9060073ada8", name: "East Village & Lower East Side"}
{id: "56c33baf68dee9060073ada2", name: "Chelsea"}
{id: "56c33b1a68dee9060073ad9e", name: "Brooklyn"}
{id: "56c33ad85e12af0600509eaf", name: "<NAME>"}
{id: "56c33ab95e12af0600509ead", name: "Hudson Valley"}
]
| true | module.exports = [
{id: "56c33f4268dee9060073adb2", name: "Upper West Side"}
{id: "56c33ef55e12af0600509ec1", name: "Upper East Side"}
{id: "56c33ee25e12af0600509ebf", name: "Tribeca & Financial District"}
{id: "56c33e5368dee9060073adac", name: "Queens"}
{id: "56c33c5968dee9060073ada6", name: "Soho & Nolita"}
{id: "56c33d715e12af0600509eb9", name: "Midtown"}
{id: "56c33d0f68dee9060073adaa", name: "PI:NAME:<NAME>END_PI"}
{id: "56c33c8768dee9060073ada8", name: "East Village & Lower East Side"}
{id: "56c33baf68dee9060073ada2", name: "Chelsea"}
{id: "56c33b1a68dee9060073ad9e", name: "Brooklyn"}
{id: "56c33ad85e12af0600509eaf", name: "PI:NAME:<NAME>END_PI"}
{id: "56c33ab95e12af0600509ead", name: "Hudson Valley"}
]
|
[
{
"context": "overview Tests for no-useless-call rule.\n# @author Toru Nagashima\n###\n\n'use strict'\n\n#-----------------------------",
"end": 77,
"score": 0.9998607635498047,
"start": 63,
"tag": "NAME",
"value": "Toru Nagashima"
}
] | src/tests/rules/no-useless-call.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-useless-call rule.
# @author Toru Nagashima
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-useless-call'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-useless-call', rule,
valid: [
# `this` binding is different.
'foo.apply(obj, 1, 2)'
'obj.foo.apply(null, 1, 2)'
'obj.foo.apply(otherObj, 1, 2)'
'a.b(x, y).c.foo.apply(a.b(x, z).c, 1, 2)'
'foo.apply(obj, [1, 2])'
'obj.foo.apply(null, [1, 2])'
'obj.foo.apply(otherObj, [1, 2])'
'a.b(x, y).c.foo.apply(a.b(x, z).c, [1, 2])'
'a.b.foo.apply(a.b.c, [1, 2])'
# ignores variadic.
'foo.apply(null, args)'
'obj.foo.apply(obj, args)'
# ignores computed property.
'foo[call](null, 1, 2)'
'foo[apply](null, [1, 2])'
# ignores incomplete things.
'foo.call()'
'obj.foo.call()'
'foo.apply()'
'obj.foo.apply()'
]
invalid: [
# call.
code: 'foo.call(undefined, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'foo.call(null, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'obj.foo.call(obj, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.call(a.b.c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.call(a.b(x, y).c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
# apply.
code: 'foo.apply(undefined, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'foo.apply(null, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'obj.foo.apply(obj, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.apply(a.b.c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.apply(a.b(x, y).c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '[].concat.apply([ ], [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '''
[].concat.apply([
###empty###
], [1, 2])
'''
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'abc.get("foo", 0).concat.apply(abc . get("foo", 0 ), [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
]
| 143809 | ###*
# @fileoverview Tests for no-useless-call rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-useless-call'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-useless-call', rule,
valid: [
# `this` binding is different.
'foo.apply(obj, 1, 2)'
'obj.foo.apply(null, 1, 2)'
'obj.foo.apply(otherObj, 1, 2)'
'a.b(x, y).c.foo.apply(a.b(x, z).c, 1, 2)'
'foo.apply(obj, [1, 2])'
'obj.foo.apply(null, [1, 2])'
'obj.foo.apply(otherObj, [1, 2])'
'a.b(x, y).c.foo.apply(a.b(x, z).c, [1, 2])'
'a.b.foo.apply(a.b.c, [1, 2])'
# ignores variadic.
'foo.apply(null, args)'
'obj.foo.apply(obj, args)'
# ignores computed property.
'foo[call](null, 1, 2)'
'foo[apply](null, [1, 2])'
# ignores incomplete things.
'foo.call()'
'obj.foo.call()'
'foo.apply()'
'obj.foo.apply()'
]
invalid: [
# call.
code: 'foo.call(undefined, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'foo.call(null, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'obj.foo.call(obj, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.call(a.b.c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.call(a.b(x, y).c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
# apply.
code: 'foo.apply(undefined, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'foo.apply(null, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'obj.foo.apply(obj, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.apply(a.b.c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.apply(a.b(x, y).c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '[].concat.apply([ ], [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '''
[].concat.apply([
###empty###
], [1, 2])
'''
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'abc.get("foo", 0).concat.apply(abc . get("foo", 0 ), [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
]
| true | ###*
# @fileoverview Tests for no-useless-call rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-useless-call'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-useless-call', rule,
valid: [
# `this` binding is different.
'foo.apply(obj, 1, 2)'
'obj.foo.apply(null, 1, 2)'
'obj.foo.apply(otherObj, 1, 2)'
'a.b(x, y).c.foo.apply(a.b(x, z).c, 1, 2)'
'foo.apply(obj, [1, 2])'
'obj.foo.apply(null, [1, 2])'
'obj.foo.apply(otherObj, [1, 2])'
'a.b(x, y).c.foo.apply(a.b(x, z).c, [1, 2])'
'a.b.foo.apply(a.b.c, [1, 2])'
# ignores variadic.
'foo.apply(null, args)'
'obj.foo.apply(obj, args)'
# ignores computed property.
'foo[call](null, 1, 2)'
'foo[apply](null, [1, 2])'
# ignores incomplete things.
'foo.call()'
'obj.foo.call()'
'foo.apply()'
'obj.foo.apply()'
]
invalid: [
# call.
code: 'foo.call(undefined, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'foo.call(null, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'obj.foo.call(obj, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.call(a.b.c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.call(a.b(x, y).c, 1, 2)'
errors: [message: "unnecessary '.call()'.", type: 'CallExpression']
,
# apply.
code: 'foo.apply(undefined, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'foo.apply(null, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'obj.foo.apply(obj, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b.c.foo.apply(a.b.c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'a.b(x, y).c.foo.apply(a.b(x, y).c, [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '[].concat.apply([ ], [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: '''
[].concat.apply([
###empty###
], [1, 2])
'''
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
,
code: 'abc.get("foo", 0).concat.apply(abc . get("foo", 0 ), [1, 2])'
errors: [message: "unnecessary '.apply()'.", type: 'CallExpression']
]
|
[
{
"context": "ourceRepo.setConfigValue('remote.origin.url', 'git@github.com:/another/repo')\n targetRepo.setConfigValue",
"end": 7838,
"score": 0.8259358406066895,
"start": 7827,
"tag": "EMAIL",
"value": "@github.com"
},
{
"context": "argetRepo.setConfigValue('remote.origin.url', 'git@github.com:/another/repo')\n patrick.snapsho",
"end": 7914,
"score": 0.6546378135681152,
"start": 7914,
"tag": "EMAIL",
"value": ""
},
{
"context": "po.setConfigValue('remote.origin.url', 'git@github.com:/another/repo')\n patrick.snapshot(sourcePa",
"end": 7925,
"score": 0.6446192264556885,
"start": 7922,
"tag": "EMAIL",
"value": "com"
}
] | spec/patrick-spec.coffee | atom/patrick | 5 | child_process = require 'child_process'
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
git = require 'git-utils'
rm = require('rimraf').sync
tmp = require 'tmp'
cp = require('wrench').copyDirSyncRecursive
patrick = require '../lib/patrick'
describe 'patrick', ->
[snapshotHandler, mirrorHandler, sourceRepo, targetRepo, sourcePath, targetPath] = []
waitsForCommand = (command, options) ->
finished = false
error = null
child_process.exec command, options, (err, stdout, stderr) ->
error = err
console.error 'Command failed', command, arguments if err?
finished = true
waitsFor command, ->
finished
runs ->
expect(error).toBeFalsy()
waitsForSnapshot = (mirrorOptions={})->
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorOptions, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeFalsy()
waitsForSourceRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(sourcePath, '.git'))
sourceRepo = git.open(sourcePath)
sourceRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: sourcePath}
waitsForTargetRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(targetPath, '.git'))
targetRepo = git.open(targetPath)
targetRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: targetPath}
beforeEach ->
sourcePath = null
targetPath = null
snapshotHandler = jasmine.createSpy('snapshot handler')
mirrorHandler = jasmine.createSpy('mirror handler')
tmp.dir (error, tempPath) -> sourcePath = tempPath
tmp.dir (error, tempPath) -> targetPath = tempPath
waitsFor 'tmp files', -> sourcePath and targetPath
waitsForSourceRepo 'ahead.git'
describe 'when the source has unpushed changes', ->
describe 'when the target has no unpushed changes', ->
it 'applies the unpushed changes to the target repo and updates the target HEAD', ->
waitsForTargetRepo 'master.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source repo has changes in the working directory', ->
it "applies the changes to the target repo's working directory", ->
waitsForTargetRepo 'master.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual sourceRepo.getStatus()
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
describe "when the target repository does not exist", ->
it "clones the repository to the target path and updates the target HEAD", ->
waitsForSnapshot()
runs ->
targetRepo = git.open(targetPath)
expect(targetRepo).toBeTruthy()
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has unpushed changes', ->
it 'creates and checks out a new branch at the source HEAD', ->
fs.writeFileSync(path.join(targetPath, 'new.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
waitsForCommand 'git add new.txt && git commit -am"new"', {cwd: targetPath}
waitsForSnapshot()
runs ->
expect(targetRepo.getShortHead()).toBe 'master-1'
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has working directory changes', ->
describe 'when the changes are the same as in the source working directory', ->
it 'mirrors the snapshot successfully', ->
waitsForTargetRepo 'ahead.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.writeFileSync(path.join(sourcePath, 'a2.txt'), 'NEWER BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
fs.writeFileSync(path.join(targetPath , 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(targetPath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(targetPath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a2.txt'), 'utf8')).toBe 'NEWER BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
describe 'when the changes differ from the source repository', ->
it 'fails to mirror the snapshot', ->
fs.writeFileSync(path.join(targetPath, 'dirty.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when the source and target have the same HEAD', ->
it 'does not change the target HEAD', ->
waitsForTargetRepo 'ahead.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source branch does not exist in the target repository', ->
it 'creates and checks out a new branch at the source HEAD', ->
waitsForTargetRepo 'master.git'
waitsForCommand 'git checkout -b blaster', {cwd: sourcePath}
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target location and the source use the same ssh URL', ->
it 'mirrors the snapshot', ->
waitsForTargetRepo 'master.git'
execSpy = null
runs ->
sourceRepo.setConfigValue('remote.origin.url', 'git@github.com:/another/repo')
targetRepo.setConfigValue('remote.origin.url', 'git@github.com:/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
execSpy = spyOn(child_process, 'exec')
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
execSpy.callCount > 0 or mirrorHandler.callCount > 0
runs ->
expect(execSpy.callCount).toBeGreaterThan 0
[command] = execSpy.argsForCall[0] if execSpy.argsForCall[0]
expect(command).toBe 'git fetch git@github.com:/another/repo'
describe 'when the target location has a different URL than the source', ->
it 'fails to mirror the snapshot', ->
waitsForTargetRepo 'master.git'
runs ->
targetRepo.setConfigValue('remote.origin.url', 'http://github.com/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when a progress callback is given', ->
it 'calls back for each operation with a description, command, and total operation count', ->
progressCallback = jasmine.createSpy('progress callback')
waitsForTargetRepo 'master.git'
waitsForSnapshot({progressCallback})
runs ->
expect(progressCallback.callCount).toBe 2
expect(progressCallback.argsForCall[0][0]).toBeTruthy()
expect(progressCallback.argsForCall[0][1]).toBeTruthy()
expect(progressCallback.argsForCall[0][2]).toBe 2
expect(progressCallback.argsForCall[1][0]).toBeTruthy()
expect(progressCallback.argsForCall[1][1]).toBeTruthy()
expect(progressCallback.argsForCall[1][2]).toBe 2
describe "when the path to snapshot does not contain a repository", ->
it "calls back with an error", ->
callback = jasmine.createSpy('callback')
patrick.snapshot(targetPath, callback)
waitsFor -> callback.callCount is 1
runs ->
expect(callback.argsForCall[0][0]).toBeTruthy()
expect(callback.argsForCall[0][1]).toBeFalsy()
| 179863 | child_process = require 'child_process'
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
git = require 'git-utils'
rm = require('rimraf').sync
tmp = require 'tmp'
cp = require('wrench').copyDirSyncRecursive
patrick = require '../lib/patrick'
describe 'patrick', ->
[snapshotHandler, mirrorHandler, sourceRepo, targetRepo, sourcePath, targetPath] = []
waitsForCommand = (command, options) ->
finished = false
error = null
child_process.exec command, options, (err, stdout, stderr) ->
error = err
console.error 'Command failed', command, arguments if err?
finished = true
waitsFor command, ->
finished
runs ->
expect(error).toBeFalsy()
waitsForSnapshot = (mirrorOptions={})->
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorOptions, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeFalsy()
waitsForSourceRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(sourcePath, '.git'))
sourceRepo = git.open(sourcePath)
sourceRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: sourcePath}
waitsForTargetRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(targetPath, '.git'))
targetRepo = git.open(targetPath)
targetRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: targetPath}
beforeEach ->
sourcePath = null
targetPath = null
snapshotHandler = jasmine.createSpy('snapshot handler')
mirrorHandler = jasmine.createSpy('mirror handler')
tmp.dir (error, tempPath) -> sourcePath = tempPath
tmp.dir (error, tempPath) -> targetPath = tempPath
waitsFor 'tmp files', -> sourcePath and targetPath
waitsForSourceRepo 'ahead.git'
describe 'when the source has unpushed changes', ->
describe 'when the target has no unpushed changes', ->
it 'applies the unpushed changes to the target repo and updates the target HEAD', ->
waitsForTargetRepo 'master.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source repo has changes in the working directory', ->
it "applies the changes to the target repo's working directory", ->
waitsForTargetRepo 'master.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual sourceRepo.getStatus()
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
describe "when the target repository does not exist", ->
it "clones the repository to the target path and updates the target HEAD", ->
waitsForSnapshot()
runs ->
targetRepo = git.open(targetPath)
expect(targetRepo).toBeTruthy()
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has unpushed changes', ->
it 'creates and checks out a new branch at the source HEAD', ->
fs.writeFileSync(path.join(targetPath, 'new.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
waitsForCommand 'git add new.txt && git commit -am"new"', {cwd: targetPath}
waitsForSnapshot()
runs ->
expect(targetRepo.getShortHead()).toBe 'master-1'
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has working directory changes', ->
describe 'when the changes are the same as in the source working directory', ->
it 'mirrors the snapshot successfully', ->
waitsForTargetRepo 'ahead.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.writeFileSync(path.join(sourcePath, 'a2.txt'), 'NEWER BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
fs.writeFileSync(path.join(targetPath , 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(targetPath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(targetPath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a2.txt'), 'utf8')).toBe 'NEWER BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
describe 'when the changes differ from the source repository', ->
it 'fails to mirror the snapshot', ->
fs.writeFileSync(path.join(targetPath, 'dirty.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when the source and target have the same HEAD', ->
it 'does not change the target HEAD', ->
waitsForTargetRepo 'ahead.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source branch does not exist in the target repository', ->
it 'creates and checks out a new branch at the source HEAD', ->
waitsForTargetRepo 'master.git'
waitsForCommand 'git checkout -b blaster', {cwd: sourcePath}
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target location and the source use the same ssh URL', ->
it 'mirrors the snapshot', ->
waitsForTargetRepo 'master.git'
execSpy = null
runs ->
sourceRepo.setConfigValue('remote.origin.url', 'git<EMAIL>:/another/repo')
targetRepo.setConfigValue('remote.origin.url', 'git<EMAIL>@github.<EMAIL>:/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
execSpy = spyOn(child_process, 'exec')
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
execSpy.callCount > 0 or mirrorHandler.callCount > 0
runs ->
expect(execSpy.callCount).toBeGreaterThan 0
[command] = execSpy.argsForCall[0] if execSpy.argsForCall[0]
expect(command).toBe 'git fetch git@github.com:/another/repo'
describe 'when the target location has a different URL than the source', ->
it 'fails to mirror the snapshot', ->
waitsForTargetRepo 'master.git'
runs ->
targetRepo.setConfigValue('remote.origin.url', 'http://github.com/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when a progress callback is given', ->
it 'calls back for each operation with a description, command, and total operation count', ->
progressCallback = jasmine.createSpy('progress callback')
waitsForTargetRepo 'master.git'
waitsForSnapshot({progressCallback})
runs ->
expect(progressCallback.callCount).toBe 2
expect(progressCallback.argsForCall[0][0]).toBeTruthy()
expect(progressCallback.argsForCall[0][1]).toBeTruthy()
expect(progressCallback.argsForCall[0][2]).toBe 2
expect(progressCallback.argsForCall[1][0]).toBeTruthy()
expect(progressCallback.argsForCall[1][1]).toBeTruthy()
expect(progressCallback.argsForCall[1][2]).toBe 2
describe "when the path to snapshot does not contain a repository", ->
it "calls back with an error", ->
callback = jasmine.createSpy('callback')
patrick.snapshot(targetPath, callback)
waitsFor -> callback.callCount is 1
runs ->
expect(callback.argsForCall[0][0]).toBeTruthy()
expect(callback.argsForCall[0][1]).toBeFalsy()
| true | child_process = require 'child_process'
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
git = require 'git-utils'
rm = require('rimraf').sync
tmp = require 'tmp'
cp = require('wrench').copyDirSyncRecursive
patrick = require '../lib/patrick'
describe 'patrick', ->
[snapshotHandler, mirrorHandler, sourceRepo, targetRepo, sourcePath, targetPath] = []
waitsForCommand = (command, options) ->
finished = false
error = null
child_process.exec command, options, (err, stdout, stderr) ->
error = err
console.error 'Command failed', command, arguments if err?
finished = true
waitsFor command, ->
finished
runs ->
expect(error).toBeFalsy()
waitsForSnapshot = (mirrorOptions={})->
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorOptions, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeFalsy()
waitsForSourceRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(sourcePath, '.git'))
sourceRepo = git.open(sourcePath)
sourceRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: sourcePath}
waitsForTargetRepo = (name) ->
runs ->
cp(path.join(__dirname, 'fixtures', name), path.join(targetPath, '.git'))
targetRepo = git.open(targetPath)
targetRepo.setConfigValue('remote.origin.url', "file://#{sourcePath}")
waitsForCommand 'git reset --hard HEAD', {cwd: targetPath}
beforeEach ->
sourcePath = null
targetPath = null
snapshotHandler = jasmine.createSpy('snapshot handler')
mirrorHandler = jasmine.createSpy('mirror handler')
tmp.dir (error, tempPath) -> sourcePath = tempPath
tmp.dir (error, tempPath) -> targetPath = tempPath
waitsFor 'tmp files', -> sourcePath and targetPath
waitsForSourceRepo 'ahead.git'
describe 'when the source has unpushed changes', ->
describe 'when the target has no unpushed changes', ->
it 'applies the unpushed changes to the target repo and updates the target HEAD', ->
waitsForTargetRepo 'master.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source repo has changes in the working directory', ->
it "applies the changes to the target repo's working directory", ->
waitsForTargetRepo 'master.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual sourceRepo.getStatus()
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
describe "when the target repository does not exist", ->
it "clones the repository to the target path and updates the target HEAD", ->
waitsForSnapshot()
runs ->
targetRepo = git.open(targetPath)
expect(targetRepo).toBeTruthy()
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has unpushed changes', ->
it 'creates and checks out a new branch at the source HEAD', ->
fs.writeFileSync(path.join(targetPath, 'new.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
waitsForCommand 'git add new.txt && git commit -am"new"', {cwd: targetPath}
waitsForSnapshot()
runs ->
expect(targetRepo.getShortHead()).toBe 'master-1'
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target has working directory changes', ->
describe 'when the changes are the same as in the source working directory', ->
it 'mirrors the snapshot successfully', ->
waitsForTargetRepo 'ahead.git'
runs ->
fs.writeFileSync(path.join(sourcePath, 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(sourcePath, 'a1.txt'), 'NEW BEANS')
fs.writeFileSync(path.join(sourcePath, 'a2.txt'), 'NEWER BEANS')
fs.unlinkSync(path.join(sourcePath, 'b.txt'))
fs.writeFileSync(path.join(targetPath , 'a.txt'), 'COOL BEANS')
fs.writeFileSync(path.join(targetPath, 'a1.txt'), 'NEW BEANS')
fs.unlinkSync(path.join(targetPath, 'b.txt'))
waitsForSnapshot()
runs ->
expect(fs.readFileSync(path.join(targetPath, 'a.txt'), 'utf8')).toBe 'COOL BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a1.txt'), 'utf8')).toBe 'NEW BEANS'
expect(fs.readFileSync(path.join(targetPath, 'a2.txt'), 'utf8')).toBe 'NEWER BEANS'
expect(fs.existsSync(path.join(targetPath, 'b.txt'))).toBe false
describe 'when the changes differ from the source repository', ->
it 'fails to mirror the snapshot', ->
fs.writeFileSync(path.join(targetPath, 'dirty.txt'), '')
waitsForTargetRepo 'ahead.git'
runs ->
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when the source and target have the same HEAD', ->
it 'does not change the target HEAD', ->
waitsForTargetRepo 'ahead.git'
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the source branch does not exist in the target repository', ->
it 'creates and checks out a new branch at the source HEAD', ->
waitsForTargetRepo 'master.git'
waitsForCommand 'git checkout -b blaster', {cwd: sourcePath}
waitsForSnapshot()
runs ->
expect(targetRepo.getHead()).toBe sourceRepo.getHead()
expect(targetRepo.getReferenceTarget('HEAD')).toBe sourceRepo.getReferenceTarget('HEAD')
expect(targetRepo.getStatus()).toEqual {}
describe 'when the target location and the source use the same ssh URL', ->
it 'mirrors the snapshot', ->
waitsForTargetRepo 'master.git'
execSpy = null
runs ->
sourceRepo.setConfigValue('remote.origin.url', 'gitPI:EMAIL:<EMAIL>END_PI:/another/repo')
targetRepo.setConfigValue('remote.origin.url', 'gitPI:EMAIL:<EMAIL>END_PI@github.PI:EMAIL:<EMAIL>END_PI:/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
execSpy = spyOn(child_process, 'exec')
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
execSpy.callCount > 0 or mirrorHandler.callCount > 0
runs ->
expect(execSpy.callCount).toBeGreaterThan 0
[command] = execSpy.argsForCall[0] if execSpy.argsForCall[0]
expect(command).toBe 'git fetch git@github.com:/another/repo'
describe 'when the target location has a different URL than the source', ->
it 'fails to mirror the snapshot', ->
waitsForTargetRepo 'master.git'
runs ->
targetRepo.setConfigValue('remote.origin.url', 'http://github.com/another/repo')
patrick.snapshot(sourcePath, snapshotHandler)
waitsFor 'snapshot handler', ->
snapshotHandler.callCount > 0
runs ->
[snapshotError, snapshot] = snapshotHandler.argsForCall[0]
expect(snapshotError).toBeFalsy()
expect(snapshot).not.toBeNull()
patrick.mirror(targetPath, snapshot, mirrorHandler)
waitsFor 'mirror handler', ->
mirrorHandler.callCount > 0
runs ->
[mirrorError] = mirrorHandler.argsForCall[0]
expect(mirrorError).toBeTruthy()
describe 'when a progress callback is given', ->
it 'calls back for each operation with a description, command, and total operation count', ->
progressCallback = jasmine.createSpy('progress callback')
waitsForTargetRepo 'master.git'
waitsForSnapshot({progressCallback})
runs ->
expect(progressCallback.callCount).toBe 2
expect(progressCallback.argsForCall[0][0]).toBeTruthy()
expect(progressCallback.argsForCall[0][1]).toBeTruthy()
expect(progressCallback.argsForCall[0][2]).toBe 2
expect(progressCallback.argsForCall[1][0]).toBeTruthy()
expect(progressCallback.argsForCall[1][1]).toBeTruthy()
expect(progressCallback.argsForCall[1][2]).toBe 2
describe "when the path to snapshot does not contain a repository", ->
it "calls back with an error", ->
callback = jasmine.createSpy('callback')
patrick.snapshot(targetPath, callback)
waitsFor -> callback.callCount is 1
runs ->
expect(callback.argsForCall[0][0]).toBeTruthy()
expect(callback.argsForCall[0][1]).toBeFalsy()
|
[
{
"context": " email: smartnoraConfig.email\n password: smartnoraConfig.password\n group: smartnoraConfig.group\n vali",
"end": 1191,
"score": 0.9987141489982605,
"start": 1167,
"tag": "PASSWORD",
"value": "smartnoraConfig.password"
},
{
"context": "status) =>\n env.logger.debug(\"Light device '#{@id}' status: \" + JSON.stringify(status.text,null,2))",
"end": 4595,
"score": 0.9448893070220947,
"start": 4592,
"tag": "USERNAME",
"value": "@id"
},
{
"context": ".next()\n @close$.complete()\n resolve(@id)\n )\n",
"end": 6190,
"score": 0.8053605556488037,
"start": 6187,
"tag": "USERNAME",
"value": "@id"
}
] | adapters/light.coffee | bertreb/pimatic-smartnora | 0 | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
events = require 'events'
rxjs = require("rxjs")
operators = require("rxjs/operators")
connection = require("../node_modules/node-red-contrib-smartnora/build/firebase/connection.js")
device_context = require("../node_modules/node-red-contrib-smartnora/build/firebase/device-context.js")
util = require("../node_modules/node-red-contrib-smartnora/build/nodes/util.js")
_ = require "lodash"
class LightAdapter extends events.EventEmitter
constructor: (config, pimaticDevice, smartnoraConfig) ->
@id = config.pimatic_device_id #pimaticDevice.config.id
@type = "noraf-light"
@pimaticDevice = pimaticDevice
#@subDeviceId = adapterConfig.pimaticSubDeviceId
#@UpdateState = adapterConfig.updateState
@state =
online: true
on: false
brightness: 100
@stateAvailable = @pimaticDevice.hasAction("changeStateTo")
@turnOnOffAvailable = @pimaticDevice.hasAction("turnOn") and @pimaticDevice.hasAction("turnOff")
noraConfig =
email: smartnoraConfig.email
password: smartnoraConfig.password
group: smartnoraConfig.group
valid: true
localExecution: false
env.logger.debug("noraCONFIG:" + JSON.stringify(noraConfig,null,2))
if !(noraConfig? and noraConfig.valid)
return
@close$ = new rxjs.Subject()
ctx = new device_context.DeviceContext(@)
ctx.update(@close$)
deviceConfig =
id: config.pimatic_device_id
type: "action.devices.types.LIGHT"
traits: [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness"
]
name: {
name: config.name
}
roomHint: config.roomHint
willReportState: true
state:
on: false
online: true
brightness: 100
attributes: {}
noraSpecific:
turnOnWhenBrightnessChanges: true
notifyState = (state) =>
stateString = state.on ? "on" : "off"
stateString += " #{state.brightness}"
ctx.state$.next(stateString)
# setup device stream from smartnora cloud
#
@device$ = connection.FirebaseConnection
.withLogger(env.logger)
.fromConfig(noraConfig, ctx)
.pipe(operators.switchMap((connection) => connection.withDevice(deviceConfig, ctx)), util.withLocalExecution(noraConfig), operators.publishReplay(1), operators.refCount(), operators.takeUntil(@close$))
@device$.pipe(operators.switchMap((d) => d.state$), operators.tap((state) => notifyState(state)), operators.takeUntil(@close$)).subscribe()
@device$.pipe(operators.switchMap((d) => d.stateUpdates$), operators.takeUntil(@close$)).subscribe((state) =>
env.logger.debug("received state: " + JSON.stringify(state,null,2))
if state.brightness isnt @state.brightness
@pimaticDevice.changeDimlevelTo(state.brightness)
else if state.on isnt @state.on
# switch dimmer
if @stateAvavailable
@pimaticDevice.changeStateTo(state.on)
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else if @turnOnOffAvailable
if state.on
@pimaticDevice.turnOn()
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.turnOff()
else
if state.on
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.changeDimlevelTo(0)
@state.on = state.on if state.on?
@state.brightness = state.brightness if state.brightness?
)
# setup device handling in Pimatic
#
@pimaticDevice.on "state", @pimaticDeviceStateHandler if @stateAvailable
@pimaticDevice.on "dimlevel", @pimaticDeviceDimlevelHandler
@pimaticDevice.system = @
initState: ()=>
if @stateAvavailable
@pimaticDevice.getState()
.then((state)=>
@state.on = state
return @pimaticDevice.getDimlevel()
)
.then((dimlevel)=>
@state.brightness = dimlevel
@lastBrightness = dimlevel
@setState(@state)
)
else
@pimaticDevice.getDimlevel()
.then((dimlevel)=>
@state.brightness = dimlevel
@setState(@state)
)
status: (status) =>
env.logger.debug("Light device '#{@id}' status: " + JSON.stringify(status.text,null,2))
if status.text is "connected"
@initState()
pimaticDeviceStateHandler: (state) ->
# device status changed, updating device status in Nora
@system.updateState(state)
pimaticDeviceDimlevelHandler: (dimlevel) ->
# device status changed, updating device status in Nora
@system.updateDimlevel(dimlevel)
updateState: (newState) =>
unless newState is @state.on
env.logger.debug "Update state to " + newState
@state.on = newState
@setState(@state)
updateDimlevel: (newDimlevel) =>
unless newDimlevel is @state.brightness
env.logger.debug "Update dimlevel to " + newDimlevel
@state.brightness = newDimlevel
@setState(@state)
setState: (newState)=>
for key,val of newState
@state[key] = val
env.logger.debug "Set smartnora state to: " + JSON.stringify(@state)
try
@device$.pipe(operators.first()).toPromise()
.then (device)=>
device.updateState(@state)
catch err
env.logger.debug("while updating state #{err.message}: #{err.stack}")
getType: () ->
return "light"
getState: () ->
return @state
destroy: ->
return new Promise((resolve,reject) =>
@pimaticDevice.removeListener "state", @pimaticDeviceStateHandler if @pimaticDeviceStateHandler?
@pimaticDevice.removeListener "dimlevel", @pimaticDeviceDimlevelHandler if @pimaticDeviceDimlevelHandler?
@close$.next()
@close$.complete()
resolve(@id)
)
| 87175 | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
events = require 'events'
rxjs = require("rxjs")
operators = require("rxjs/operators")
connection = require("../node_modules/node-red-contrib-smartnora/build/firebase/connection.js")
device_context = require("../node_modules/node-red-contrib-smartnora/build/firebase/device-context.js")
util = require("../node_modules/node-red-contrib-smartnora/build/nodes/util.js")
_ = require "lodash"
class LightAdapter extends events.EventEmitter
constructor: (config, pimaticDevice, smartnoraConfig) ->
@id = config.pimatic_device_id #pimaticDevice.config.id
@type = "noraf-light"
@pimaticDevice = pimaticDevice
#@subDeviceId = adapterConfig.pimaticSubDeviceId
#@UpdateState = adapterConfig.updateState
@state =
online: true
on: false
brightness: 100
@stateAvailable = @pimaticDevice.hasAction("changeStateTo")
@turnOnOffAvailable = @pimaticDevice.hasAction("turnOn") and @pimaticDevice.hasAction("turnOff")
noraConfig =
email: smartnoraConfig.email
password: <PASSWORD>
group: smartnoraConfig.group
valid: true
localExecution: false
env.logger.debug("noraCONFIG:" + JSON.stringify(noraConfig,null,2))
if !(noraConfig? and noraConfig.valid)
return
@close$ = new rxjs.Subject()
ctx = new device_context.DeviceContext(@)
ctx.update(@close$)
deviceConfig =
id: config.pimatic_device_id
type: "action.devices.types.LIGHT"
traits: [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness"
]
name: {
name: config.name
}
roomHint: config.roomHint
willReportState: true
state:
on: false
online: true
brightness: 100
attributes: {}
noraSpecific:
turnOnWhenBrightnessChanges: true
notifyState = (state) =>
stateString = state.on ? "on" : "off"
stateString += " #{state.brightness}"
ctx.state$.next(stateString)
# setup device stream from smartnora cloud
#
@device$ = connection.FirebaseConnection
.withLogger(env.logger)
.fromConfig(noraConfig, ctx)
.pipe(operators.switchMap((connection) => connection.withDevice(deviceConfig, ctx)), util.withLocalExecution(noraConfig), operators.publishReplay(1), operators.refCount(), operators.takeUntil(@close$))
@device$.pipe(operators.switchMap((d) => d.state$), operators.tap((state) => notifyState(state)), operators.takeUntil(@close$)).subscribe()
@device$.pipe(operators.switchMap((d) => d.stateUpdates$), operators.takeUntil(@close$)).subscribe((state) =>
env.logger.debug("received state: " + JSON.stringify(state,null,2))
if state.brightness isnt @state.brightness
@pimaticDevice.changeDimlevelTo(state.brightness)
else if state.on isnt @state.on
# switch dimmer
if @stateAvavailable
@pimaticDevice.changeStateTo(state.on)
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else if @turnOnOffAvailable
if state.on
@pimaticDevice.turnOn()
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.turnOff()
else
if state.on
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.changeDimlevelTo(0)
@state.on = state.on if state.on?
@state.brightness = state.brightness if state.brightness?
)
# setup device handling in Pimatic
#
@pimaticDevice.on "state", @pimaticDeviceStateHandler if @stateAvailable
@pimaticDevice.on "dimlevel", @pimaticDeviceDimlevelHandler
@pimaticDevice.system = @
initState: ()=>
if @stateAvavailable
@pimaticDevice.getState()
.then((state)=>
@state.on = state
return @pimaticDevice.getDimlevel()
)
.then((dimlevel)=>
@state.brightness = dimlevel
@lastBrightness = dimlevel
@setState(@state)
)
else
@pimaticDevice.getDimlevel()
.then((dimlevel)=>
@state.brightness = dimlevel
@setState(@state)
)
status: (status) =>
env.logger.debug("Light device '#{@id}' status: " + JSON.stringify(status.text,null,2))
if status.text is "connected"
@initState()
pimaticDeviceStateHandler: (state) ->
# device status changed, updating device status in Nora
@system.updateState(state)
pimaticDeviceDimlevelHandler: (dimlevel) ->
# device status changed, updating device status in Nora
@system.updateDimlevel(dimlevel)
updateState: (newState) =>
unless newState is @state.on
env.logger.debug "Update state to " + newState
@state.on = newState
@setState(@state)
updateDimlevel: (newDimlevel) =>
unless newDimlevel is @state.brightness
env.logger.debug "Update dimlevel to " + newDimlevel
@state.brightness = newDimlevel
@setState(@state)
setState: (newState)=>
for key,val of newState
@state[key] = val
env.logger.debug "Set smartnora state to: " + JSON.stringify(@state)
try
@device$.pipe(operators.first()).toPromise()
.then (device)=>
device.updateState(@state)
catch err
env.logger.debug("while updating state #{err.message}: #{err.stack}")
getType: () ->
return "light"
getState: () ->
return @state
destroy: ->
return new Promise((resolve,reject) =>
@pimaticDevice.removeListener "state", @pimaticDeviceStateHandler if @pimaticDeviceStateHandler?
@pimaticDevice.removeListener "dimlevel", @pimaticDeviceDimlevelHandler if @pimaticDeviceDimlevelHandler?
@close$.next()
@close$.complete()
resolve(@id)
)
| true | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
events = require 'events'
rxjs = require("rxjs")
operators = require("rxjs/operators")
connection = require("../node_modules/node-red-contrib-smartnora/build/firebase/connection.js")
device_context = require("../node_modules/node-red-contrib-smartnora/build/firebase/device-context.js")
util = require("../node_modules/node-red-contrib-smartnora/build/nodes/util.js")
_ = require "lodash"
class LightAdapter extends events.EventEmitter
constructor: (config, pimaticDevice, smartnoraConfig) ->
@id = config.pimatic_device_id #pimaticDevice.config.id
@type = "noraf-light"
@pimaticDevice = pimaticDevice
#@subDeviceId = adapterConfig.pimaticSubDeviceId
#@UpdateState = adapterConfig.updateState
@state =
online: true
on: false
brightness: 100
@stateAvailable = @pimaticDevice.hasAction("changeStateTo")
@turnOnOffAvailable = @pimaticDevice.hasAction("turnOn") and @pimaticDevice.hasAction("turnOff")
noraConfig =
email: smartnoraConfig.email
password: PI:PASSWORD:<PASSWORD>END_PI
group: smartnoraConfig.group
valid: true
localExecution: false
env.logger.debug("noraCONFIG:" + JSON.stringify(noraConfig,null,2))
if !(noraConfig? and noraConfig.valid)
return
@close$ = new rxjs.Subject()
ctx = new device_context.DeviceContext(@)
ctx.update(@close$)
deviceConfig =
id: config.pimatic_device_id
type: "action.devices.types.LIGHT"
traits: [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness"
]
name: {
name: config.name
}
roomHint: config.roomHint
willReportState: true
state:
on: false
online: true
brightness: 100
attributes: {}
noraSpecific:
turnOnWhenBrightnessChanges: true
notifyState = (state) =>
stateString = state.on ? "on" : "off"
stateString += " #{state.brightness}"
ctx.state$.next(stateString)
# setup device stream from smartnora cloud
#
@device$ = connection.FirebaseConnection
.withLogger(env.logger)
.fromConfig(noraConfig, ctx)
.pipe(operators.switchMap((connection) => connection.withDevice(deviceConfig, ctx)), util.withLocalExecution(noraConfig), operators.publishReplay(1), operators.refCount(), operators.takeUntil(@close$))
@device$.pipe(operators.switchMap((d) => d.state$), operators.tap((state) => notifyState(state)), operators.takeUntil(@close$)).subscribe()
@device$.pipe(operators.switchMap((d) => d.stateUpdates$), operators.takeUntil(@close$)).subscribe((state) =>
env.logger.debug("received state: " + JSON.stringify(state,null,2))
if state.brightness isnt @state.brightness
@pimaticDevice.changeDimlevelTo(state.brightness)
else if state.on isnt @state.on
# switch dimmer
if @stateAvavailable
@pimaticDevice.changeStateTo(state.on)
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else if @turnOnOffAvailable
if state.on
@pimaticDevice.turnOn()
.then ()=>
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.turnOff()
else
if state.on
@pimaticDevice.changeDimlevelTo(state.brightness)
else
@pimaticDevice.changeDimlevelTo(0)
@state.on = state.on if state.on?
@state.brightness = state.brightness if state.brightness?
)
# setup device handling in Pimatic
#
@pimaticDevice.on "state", @pimaticDeviceStateHandler if @stateAvailable
@pimaticDevice.on "dimlevel", @pimaticDeviceDimlevelHandler
@pimaticDevice.system = @
initState: ()=>
if @stateAvavailable
@pimaticDevice.getState()
.then((state)=>
@state.on = state
return @pimaticDevice.getDimlevel()
)
.then((dimlevel)=>
@state.brightness = dimlevel
@lastBrightness = dimlevel
@setState(@state)
)
else
@pimaticDevice.getDimlevel()
.then((dimlevel)=>
@state.brightness = dimlevel
@setState(@state)
)
status: (status) =>
env.logger.debug("Light device '#{@id}' status: " + JSON.stringify(status.text,null,2))
if status.text is "connected"
@initState()
pimaticDeviceStateHandler: (state) ->
# device status changed, updating device status in Nora
@system.updateState(state)
pimaticDeviceDimlevelHandler: (dimlevel) ->
# device status changed, updating device status in Nora
@system.updateDimlevel(dimlevel)
updateState: (newState) =>
unless newState is @state.on
env.logger.debug "Update state to " + newState
@state.on = newState
@setState(@state)
updateDimlevel: (newDimlevel) =>
unless newDimlevel is @state.brightness
env.logger.debug "Update dimlevel to " + newDimlevel
@state.brightness = newDimlevel
@setState(@state)
setState: (newState)=>
for key,val of newState
@state[key] = val
env.logger.debug "Set smartnora state to: " + JSON.stringify(@state)
try
@device$.pipe(operators.first()).toPromise()
.then (device)=>
device.updateState(@state)
catch err
env.logger.debug("while updating state #{err.message}: #{err.stack}")
getType: () ->
return "light"
getState: () ->
return @state
destroy: ->
return new Promise((resolve,reject) =>
@pimaticDevice.removeListener "state", @pimaticDeviceStateHandler if @pimaticDeviceStateHandler?
@pimaticDevice.removeListener "dimlevel", @pimaticDeviceDimlevelHandler if @pimaticDeviceDimlevelHandler?
@close$.next()
@close$.complete()
resolve(@id)
)
|
[
{
"context": "e.exports = class Eraser extends Pencil\n\n name: 'Eraser'\n iconName: 'eraser'\n\n constructor: () ->\n @",
"end": 132,
"score": 0.8719095587730408,
"start": 126,
"tag": "NAME",
"value": "Eraser"
}
] | src/tools/Eraser.coffee | juiceinc/literallycanvas | 0 | Pencil = require './Pencil'
{createShape} = require '../core/shapes'
module.exports = class Eraser extends Pencil
name: 'Eraser'
iconName: 'eraser'
constructor: () ->
@strokeWidth = 10
makePoint: (x, y, lc) ->
createShape('Point', {x, y, size: @strokeWidth, color: '#000'})
makeShape: -> createShape('ErasedLinePath')
| 61692 | Pencil = require './Pencil'
{createShape} = require '../core/shapes'
module.exports = class Eraser extends Pencil
name: '<NAME>'
iconName: 'eraser'
constructor: () ->
@strokeWidth = 10
makePoint: (x, y, lc) ->
createShape('Point', {x, y, size: @strokeWidth, color: '#000'})
makeShape: -> createShape('ErasedLinePath')
| true | Pencil = require './Pencil'
{createShape} = require '../core/shapes'
module.exports = class Eraser extends Pencil
name: 'PI:NAME:<NAME>END_PI'
iconName: 'eraser'
constructor: () ->
@strokeWidth = 10
makePoint: (x, y, lc) ->
createShape('Point', {x, y, size: @strokeWidth, color: '#000'})
makeShape: -> createShape('ErasedLinePath')
|
[
{
"context": "olorObject = new SolidColor()\n if keyClass == \"Grsc\"\n colorObject.grey.grey = color.getDouble(ch",
"end": 18949,
"score": 0.5900933742523193,
"start": 18945,
"tag": "KEY",
"value": "Grsc"
},
{
"context": "ouble(charIDToTypeID('Gry '))\n if keyClass == \"RGBC\"\n colorObject.rgb.red = colorDesc.getDouble(",
"end": 19045,
"score": 0.7189309000968933,
"start": 19041,
"tag": "KEY",
"value": "RGBC"
},
{
"context": "ouble(charIDToTypeID('Bl '))\n if keyClass == \"CMYC\"\n colorObject.cmyk.cyan = colorDesc.getDoubl",
"end": 19290,
"score": 0.9242751598358154,
"start": 19286,
"tag": "KEY",
"value": "CMYC"
},
{
"context": "ouble(charIDToTypeID('Blck'))\n if keyClass == \"LbCl\"\n colorObject.lab.l = colorDesc.getDouble(ch",
"end": 19618,
"score": 0.9221140146255493,
"start": 19614,
"tag": "KEY",
"value": "LbCl"
},
{
"context": "ctorMask' )\n keyKind = app.charIDToTypeID( 'Knd ' )\n ref.putEnumerated( app.charIDToTypeID( ",
"end": 23694,
"score": 0.5384049415588379,
"start": 23692,
"tag": "KEY",
"value": "nd"
}
] | PhotoshopScript/Baum.coffee | fum1h1ro/Baum2 | 0 | class Baum
@version = '0.6.1'
@maxLength = 1334
run: ->
@saveFolder = null
if app.documents.length == 0
filePaths = File.openDialog("Select a file", "*", true)
for filePath in filePaths
app.activeDocument = app.open(File(filePath))
@runOneFile(true)
else
@runOneFile(false)
alert('complete!')
runOneFile: (after_close) =>
@saveFolder = Folder.selectDialog("保存先フォルダの選択") if @saveFolder == null
return if @saveFolder == null
@documentName = app.activeDocument.name[0..-5]
copiedDoc = app.activeDocument.duplicate(app.activeDocument.name[..-5] + '.copy.psd')
Util.deselectLayers()
@removeUnvisibleLayers(copiedDoc)
@unlockAll(copiedDoc)
@rasterizeAll(copiedDoc)
@unvisibleAll(copiedDoc)
@layerBlendAll(copiedDoc, copiedDoc)
@removeCommentoutLayers(copiedDoc, copiedDoc) # blendの処理してから消す
@cropLayers(copiedDoc)
@resizePsd(copiedDoc)
@selectDocumentArea(copiedDoc)
@ungroupArtboard(copiedDoc)
@clipping(copiedDoc, copiedDoc)
copiedDoc.selection.deselect()
@psdToJson(copiedDoc)
@psdToImage(copiedDoc)
copiedDoc.close(SaveOptions.DONOTSAVECHANGES)
app.activeDocument.close(SaveOptions.DONOTSAVECHANGES) if after_close
selectDocumentArea: (document) ->
x1 = 0
y1 = 0
x2 = document.width.value
y2 = document.height.value
selReg = [[x1,y1],[x2,y1],[x2,y2],[x1,y2]]
document.selection.select(selReg)
clipping: (document, root) ->
document.resizeImage(document.width, document.height, 72, ResampleMethod.NEARESTNEIGHBOR)
if document.selection.bounds[0].value == 0 && document.selection.bounds[1].value == 0 && document.selection.bounds[2].value == document.width.value && document.selection.bounds[3].value == document.height.value
return
document.selection.invert()
@clearAll(document, root)
document.selection.invert()
x1 = document.selection.bounds[0]
y1 = document.selection.bounds[1]
x2 = document.selection.bounds[2]
y2 = document.selection.bounds[3]
document.resizeCanvas(x2,y2,AnchorPosition.TOPLEFT)
w = x2 - x1
h = y2 - y1
activeDocument.resizeCanvas(w,h,AnchorPosition.BOTTOMRIGHT)
clearAll: (document, root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@clearAll(document, layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
document.activeLayer = layer
document.selection.clear()
else
alert(layer)
resizePsd: (doc) ->
width = doc.width
height = doc.height
return if width < Baum.maxLength && height < Baum.maxLength
tmp = 0
if width > height
tmp = width / Baum.maxLength
else
tmp = height / Baum.maxLength
width = width / tmp
height = height / tmp
doc.resizeImage(width, height, doc.resolution, ResampleMethod.NEARESTNEIGHBOR)
removeUnvisibleLayers: (root) ->
removeLayers = []
for layer in root.layers
if layer.visible == false
layer.visible = true
if layer.bounds[0].value == 0 && layer.bounds[1].value == 0 && layer.bounds[2].value == 0 && layer.bounds[3].value == 0
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeUnvisibleLayers(layer)
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
removeCommentoutLayers: (document, root) ->
removeLayers = []
for layer in root.layers
if layer.name.startsWith('#')
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeCommentoutLayers(document, layer)
if root.typename == 'LayerSet'
document.activeLayer = root
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
cropLayers: (root) ->
bounds = [0,0,root.width,root.height];
root.crop(bounds)
rasterizeAll: (root) ->
for layer in root.layers
if layer.name.startsWith('*')
layer.name = layer.name[1..-1].strip()
if layer.typename == 'LayerSet'
Util.mergeGroup(layer)
else
@rasterize(layer)
else if layer.typename == 'LayerSet'
@rasterizeAll(layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
@rasterize(layer)
else
alert(layer)
t = 0
while(t < root.layers.length)
if root.layers[t].visible && root.layers[t].grouped
root.layers[t].merge()
else
t += 1
rasterize: (layer) ->
tmp = app.activeDocument.activeLayer
app.activeDocument.activeLayer = layer
# LayerStyle含めてラスタライズ
if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION && layer.opacity > 1
Util.rasterizeLayerStyle(layer)
# 普通にラスタライズ
layer.rasterize(RasterizeType.ENTIRELAYER)
# LayerMask
Util.rasterizeLayerMask(layer)
app.activeDocument.activeLayer = tmp
ungroupArtboard: (document) ->
for layer in document.layers
if layer.name.startsWith('Artboard') && layer.typename == 'LayerSet'
@ungroup(layer)
ungroup: (root) ->
layers = for layer in root.layers
layer
for i in [0...layers.length]
layers[i].moveBefore(root)
root.remove()
unlockAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unlockAll(layer)
else
if layer.allLocked
layer.allLocked = false
unvisibleAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unvisibleAll(layer)
else
layer.visible = false
layerBlendAll: (document, root) ->
if root.layers.length == 0
return
for i in [root.layers.length-1..0]
layer = root.layers[i]
if layer.typename == 'LayerSet'
@layerBlendAll(document, layer)
else
layer.visible = true
continue if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION
document.activeLayer = layer
try
# LayerKind.HUESATURATIONは0pxなのでエラーになる
Util.selectTransparency()
document.selection.bounds
document.selection.copy(true)
catch
layer.copy(true)
document.paste()
newLayer = document.activeLayer
newLayer.name = layer.name
document.activeLayer = layer
Util.selectTransparency()
document.selection.invert()
document.activeLayer = newLayer
try
document.selection.bounds
document.selection.cut()
layer.remove()
psdToJson: (targetDocument) ->
toJson = new PsdToJson()
json = toJson.run(targetDocument, @documentName)
Util.saveText(@saveFolder + "/" + @documentName + ".layout.txt", json)
psdToImage: (targetDocument) ->
toImage = new PsdToImage()
json = toImage.run(targetDocument, @saveFolder, @documentName)
class PsdToJson
run: (document, documentName) ->
layers = @allLayers(document, document)
imageSize = [document.width.value, document.height.value]
canvasSize = [document.width.value, document.height.value]
canvasBase = [document.width.value/2, document.height.value/2]
canvasLayer = @findLayer(document, '#Canvas')
if canvasLayer
bounds = canvasLayer.bounds
canvasSize = [bounds[2].value - bounds[0].value, bounds[3].value - bounds[1].value]
canvasBase = [(bounds[2].value + bounds[0].value)/2, (bounds[3].value + bounds[1].value)/2]
json = JSON.stringify({
info: {
version: Baum.version
canvas: {
image: {
w: imageSize[0]
h: imageSize[1]
}
size: {
w: canvasSize[0]
h: canvasSize[1]
}
base: {
x: canvasBase[0]
y: canvasBase[1]
}
}
}
root: {
type: 'Root'
name: documentName
elements: layers
}
})
json
findLayer: (root, name) ->
for layer in root.layers
return layer if layer.name == name
null
allLayers: (document, root) ->
layers = []
for layer in root.layers when layer.visible
hash = null
name = layer.name.split("@")[0]
opt = Util.parseOption(layer.name.split("@")[1])
if layer.typename == 'ArtLayer'
hash = @layerToHash(document, name, opt, layer)
else
hash = @groupToHash(document, name, opt, layer)
if hash
hash['name'] = name
layers.push(hash)
layers
parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
layerToHash: (document, name, opt, layer) ->
document.activeLayer = layer
hash = {}
if layer.kind == LayerKind.TEXT
text = layer.textItem
textSize = parseFloat(@getTextSize())
textType = 'paragraph'
scale = Util.getTextYScale(text) / 0.9
if text.kind != TextType.PARAGRAPHTEXT
text.kind = TextType.PARAGRAPHTEXT
textType = 'point'
text.height = textSize * (2.0 / scale)
textCenterOffset = text.size.value
pos = [text.position[0].value, text.position[1].value]
pos[1] = pos[1] - (textCenterOffset / (2.0 / scale))
text.position = pos
originalText = text.contents.replace(/\r\n/g, '__CRLF__').replace(/\r/g, '__CRLF__').replace(/\n/g, '__CRLF__').replace(/__CRLF__/g, '\r\n')
text.contents = "Z"
bounds = Util.getTextExtents(text)
vx = bounds.x
vy = bounds.y
ww = bounds.width
hh = bounds.height
vh = bounds.height
align = ''
textColor = 0x000000
try
align = text.justification.toString()[14..-1].toLowerCase()
textColor = text.color.rgb.hexValue
catch e
align = 'left'
hash = {
type: 'Text'
text: originalText
textType: textType
font: text.font
size: textSize
color: textColor
align: align
x: Math.round(vx * 100.0)/100.0
y: Math.round(vy * 100.0)/100.0
w: Math.round(ww * 100.0)/100.0
h: Math.round(hh * 100.0)/100.0
vh: Math.round(vh * 100.0)/100.0
opacity: Math.round(layer.opacity * 10.0)/10.0
}
if Util.hasStroke(document, layer)
hash['strokeSize'] = Util.getStrokeSize(document, layer)
hash['strokeColor'] = Util.getStrokeColor(document, layer).rgb.hexValue
else if opt['mask']
hash = {
type: 'Mask'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
else
hash = {
type: 'Image'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
hash['prefab'] = opt['prefab'] if opt['prefab']
hash['background'] = true if opt['background']
hash['slice'] = opt['slice'] if opt['slice']
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash
angleFromMatrix: (yy, xy) ->
toDegs = 180/Math.PI
return Math.atan2(yy, xy) * toDegs - 90
getActiveLayerTransform: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
if (desc.hasKey(stringIDToTypeID('transform')))
desc = desc.getObjectValue(stringIDToTypeID('transform'))
xx = desc.getDouble(stringIDToTypeID('xx'))
xy = desc.getDouble(stringIDToTypeID('xy'))
yy = desc.getDouble(stringIDToTypeID('yy'))
yx = desc.getDouble(stringIDToTypeID('yx'))
return {xx: xx, xy: xy, yy: yy, yx: yx}
return {xx: 0, xy: 0, yy: 0, yx: 0}
getTextSize: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
textSize = desc.getList(stringIDToTypeID('textStyleRange')).getObjectValue(0).getObjectValue(stringIDToTypeID('textStyle')).getDouble (stringIDToTypeID('size'))
if (desc.hasKey(stringIDToTypeID('transform')))
mFactor = desc.getObjectValue(stringIDToTypeID('transform')).getUnitDoubleValue (stringIDToTypeID("yy") )
textSize = (textSize* mFactor).toFixed(2)
return textSize
groupToHash: (document, name, opt, layer) ->
hash = {}
if name.endsWith('Button')
hash = { type: 'Button' }
else if name.endsWith('List')
hash = { type: 'List' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Slider')
hash = { type: 'Slider' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Scrollbar')
hash = { type: 'Scrollbar' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Toggle')
hash = { type: 'Toggle' }
else
hash = { type: 'Group' }
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash['elements'] = @allLayers(document, layer)
hash
class PsdToImage
baseFolder = null
fileNames = []
run: (document, saveFolder, documentName) ->
@baseFolder = Folder(saveFolder + "/" + documentName)
if @baseFolder.exists
removeFiles = @baseFolder.getFiles()
for i in [0...removeFiles.length]
if removeFiles[i].name.startsWith(documentName) && removeFiles[i].name.endsWith('.png')
removeFiles[i].remove()
@baseFolder.remove()
@baseFolder.create()
targets = @allLayers(document)
snapShotId = Util.takeSnapshot(document)
for target in targets
target.visible = true
@outputLayer(document, target)
Util.revertToSnapshot(document, snapShotId)
allLayers: (root) ->
for layer in root.layers when layer.kind == LayerKind.TEXT
layer.visible = false
list = for layer in root.layers when layer.visible
if layer.typename == 'ArtLayer'
layer.visible = false
layer
else
@allLayers(layer)
Array.prototype.concat.apply([], list) # list.flatten()
outputLayer: (doc, layer) ->
if !layer.isBackgroundLayer
layer.translate(-layer.bounds[0], -layer.bounds[1])
doc.resizeCanvas(layer.bounds[2] - layer.bounds[0], layer.bounds[3] - layer.bounds[1], AnchorPosition.TOPLEFT)
doc.trim(TrimType.TRANSPARENT)
layer.opacity = 100.0
fileName = Util.layerToImageName(layer)
opt = Util.parseOption(layer.name.split("@")[1])
if fileName in fileNames
alert("#{fileName}と同名のレイヤーが存在します。レイヤー名を変更してください。")
if opt['slice'] == 'false'
fileName += "-noslice"
fileNames.push(fileName)
saveFile = new File("#{@baseFolder.fsName}/#{fileName}.png")
options = new ExportOptionsSaveForWeb()
options.format = SaveDocumentType.PNG
options.PNG8 = false
options.optimized = true
options.interlaced = false
doc.exportDocument(saveFile, ExportType.SAVEFORWEB, options)
class Util
@saveText: (filePath, text) ->
file = File(filePath)
file.encoding = "UTF8"
file.open("w", "TEXT")
file.write(text)
file.close()
@layerToImageName: (layer) ->
encodeURI(Util.layerToImageNameLoop(layer)).replace(/%/g, '')
@layerToImageNameLoop: (layer) ->
return "" if layer instanceof Document
image = Util.layerToImageName(layer.parent)
imageName = image
if imageName != ""
imageName = imageName + "_"
imageName + layer.name.split("@")[0].replace('_', '').replace(' ', '-').toLowerCase()
@getLastSnapshotID: (doc) ->
hsObj = doc.historyStates
hsLength = hsObj.length
for i in [hsLength-1 .. -1]
if hsObj[i].snapshot
return i
@takeSnapshot: (doc) ->
desc153 = new ActionDescriptor()
ref119 = new ActionReference()
ref119.putClass(charIDToTypeID("SnpS"))
desc153.putReference(charIDToTypeID("null"), ref119 )
ref120 = new ActionReference()
ref120.putProperty(charIDToTypeID("HstS"), charIDToTypeID("CrnH") )
desc153.putReference(charIDToTypeID("From"), ref120 )
executeAction(charIDToTypeID("Mk "), desc153, DialogModes.NO )
return Util.getLastSnapshotID(doc)
@revertToSnapshot: (doc, snapshotID) ->
doc.activeHistoryState = doc.historyStates[snapshotID]
@hasStroke: (doc, layer) ->
doc.activeLayer = layer
res = false
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
hasFX = executeActionGet(ref).hasKey(stringIDToTypeID('layerEffects'))
if hasFX
hasStroke = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).hasKey(stringIDToTypeID('frameFX'))
if hasStroke
desc1 = executeActionGet(ref)
desc2 = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
if desc1.getBoolean(stringIDToTypeID('layerFXVisible')) && desc2.getBoolean(stringIDToTypeID('enabled'))
res = true
return res
@getStrokeSize: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return desc.getUnitDoubleValue (stringIDToTypeID('size'))
@getStrokeColor: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return Util.getColorFromDescriptor(desc.getObjectValue(stringIDToTypeID("color")), typeIDToCharID(desc.getClass(stringIDToTypeID("color"))))
@getColorFromDescriptor: (colorDesc, keyClass) ->
colorObject = new SolidColor()
if keyClass == "Grsc"
colorObject.grey.grey = color.getDouble(charIDToTypeID('Gry '))
if keyClass == "RGBC"
colorObject.rgb.red = colorDesc.getDouble(charIDToTypeID('Rd '))
colorObject.rgb.green = colorDesc.getDouble(charIDToTypeID('Grn '))
colorObject.rgb.blue = colorDesc.getDouble(charIDToTypeID('Bl '))
if keyClass == "CMYC"
colorObject.cmyk.cyan = colorDesc.getDouble(charIDToTypeID('Cyn '))
colorObject.cmyk.magenta = colorDesc.getDouble(charIDToTypeID('Mgnt'))
colorObject.cmyk.yellow = colorDesc.getDouble(charIDToTypeID('Ylw '))
colorObject.cmyk.black = colorDesc.getDouble(charIDToTypeID('Blck'))
if keyClass == "LbCl"
colorObject.lab.l = colorDesc.getDouble(charIDToTypeID('Lmnc'))
colorObject.lab.a = colorDesc.getDouble(charIDToTypeID('A '))
colorObject.lab.b = colorDesc.getDouble(charIDToTypeID('B '))
return colorObject
@deselectLayers: ->
desc01 = new ActionDescriptor()
ref01 = new ActionReference()
ref01.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') )
desc01.putReference( charIDToTypeID('null'), ref01 )
executeAction( stringIDToTypeID('selectNoLayers'), desc01, DialogModes.NO )
@selectTransparency: ->
idChnl = charIDToTypeID( "Chnl" )
actionSelect = new ActionReference()
actionSelect.putProperty( idChnl, charIDToTypeID( "fsel" ) )
actionTransparent = new ActionReference()
actionTransparent.putEnumerated( idChnl, idChnl, charIDToTypeID( "Trsp" ) )
actionDesc = new ActionDescriptor()
actionDesc.putReference( charIDToTypeID( "null" ), actionSelect )
actionDesc.putReference( charIDToTypeID( "T " ), actionTransparent )
executeAction( charIDToTypeID( "setd" ), actionDesc, DialogModes.NO )
@getTextExtents: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return { x:Math.round(text_item.position[0]), y:Math.round(text_item.position[1]) , width:Math.round(width*x_scale), height:Math.round(height*y_scale) }
@getTextYScale: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return y_scale
@rasterizeLayerStyle: (layer) ->
app.activeDocument.activeLayer = layer
idrasterizeLayer = stringIDToTypeID("rasterizeLayer")
desc5 = new ActionDescriptor()
idnull = charIDToTypeID("null")
ref4 = new ActionReference()
idLyr = charIDToTypeID("Lyr ")
idOrdn = charIDToTypeID("Ordn")
idTrgt = charIDToTypeID("Trgt")
ref4.putEnumerated(idLyr,idOrdn,idTrgt)
desc5.putReference(idnull,ref4)
idWhat = charIDToTypeID("What")
idrasterizeItem = stringIDToTypeID("rasterizeItem")
idlayerStyle = stringIDToTypeID("layerStyle")
desc5.putEnumerated(idWhat,idrasterizeItem,idlayerStyle)
executeAction(idrasterizeLayer,desc5,DialogModes.NO)
@rasterizeLayerMask: (layer) ->
app.activeDocument.activeLayer = layer
if Util.hasVectorMask()
Util.rasterizeLayer()
Util.selectVectorMask()
Util.rasterizeVectorMask()
Util.applyLayerMask()
if Util.hasLayerMask()
Util.rasterizeLayer()
Util.selectLayerMask()
Util.applyLayerMask()
@hasVectorMask: ->
hasVectorMask = false
try
ref = new ActionReference()
keyVectorMaskEnabled = app.stringIDToTypeID( 'vectorMask' )
keyKind = app.charIDToTypeID( 'Knd ' )
ref.putEnumerated( app.charIDToTypeID( 'Path' ), app.charIDToTypeID( 'Ordn' ), keyVectorMaskEnabled )
desc = executeActionGet( ref )
if desc.hasKey( keyKind )
kindValue = desc.getEnumerationValue( keyKind )
if (kindValue == keyVectorMaskEnabled)
hasVectorMask = true
catch e
hasVectorMask = false
return hasVectorMask
@hasLayerMask: ->
hasLayerMask = false
try
ref = new ActionReference()
keyUserMaskEnabled = app.charIDToTypeID( 'UsrM' )
ref.putProperty( app.charIDToTypeID( 'Prpr' ), keyUserMaskEnabled )
ref.putEnumerated( app.charIDToTypeID( 'Lyr ' ), app.charIDToTypeID( 'Ordn' ), app.charIDToTypeID( 'Trgt' ) )
desc = executeActionGet( ref )
if desc.hasKey( keyUserMaskEnabled )
hasLayerMask = true
catch e
hasLayerMask = false
return hasLayerMask
@rasterizeLayer: ->
try
id1242 = stringIDToTypeID( "rasterizeLayer" )
desc245 = new ActionDescriptor()
id1243 = charIDToTypeID( "null" )
ref184 = new ActionReference()
id1244 = charIDToTypeID( "Lyr " )
id1245 = charIDToTypeID( "Ordn" )
id1246 = charIDToTypeID( "Trgt" )
ref184.putEnumerated( id1244, id1245, id1246 )
desc245.putReference( id1243, ref184 )
executeAction( id1242, desc245, DialogModes.NO )
catch
@selectVectorMask: ->
try
id55 = charIDToTypeID( "slct" )
desc15 = new ActionDescriptor()
id56 = charIDToTypeID( "null" )
ref13 = new ActionReference()
id57 = charIDToTypeID( "Path" )
id58 = charIDToTypeID( "Path" )
id59 = stringIDToTypeID( "vectorMask" )
ref13.putEnumerated( id57, id58, id59 )
id60 = charIDToTypeID( "Lyr " )
id61 = charIDToTypeID( "Ordn" )
id62 = charIDToTypeID( "Trgt" )
ref13.putEnumerated( id60, id61, id62 )
desc15.putReference( id56, ref13 )
executeAction( id55, desc15, DialogModes.NO )
catch e
@selectLayerMask: ->
try
id759 = charIDToTypeID( "slct" )
desc153 = new ActionDescriptor()
id760 = charIDToTypeID( "null" )
ref92 = new ActionReference()
id761 = charIDToTypeID( "Chnl" )
id762 = charIDToTypeID( "Chnl" )
id763 = charIDToTypeID( "Msk " )
ref92.putEnumerated( id761, id762, id763 )
desc153.putReference( id760, ref92 )
id764 = charIDToTypeID( "MkVs" )
desc153.putBoolean( id764, false )
executeAction( id759, desc153, DialogModes.NO )
catch e
@rasterizeVectorMask: ->
try
id488 = stringIDToTypeID( "rasterizeLayer" )
desc44 = new ActionDescriptor()
id489 = charIDToTypeID( "null" )
ref29 = new ActionReference()
id490 = charIDToTypeID( "Lyr " )
id491 = charIDToTypeID( "Ordn" )
id492 = charIDToTypeID( "Trgt" )
ref29.putEnumerated( id490, id491, id492 )
desc44.putReference( id489, ref29 )
id493 = charIDToTypeID( "What" )
id494 = stringIDToTypeID( "rasterizeItem" )
id495 = stringIDToTypeID( "vectorMask" )
desc44.putEnumerated( id493, id494, id495 )
executeAction( id488, desc44, DialogModes.NO )
catch e
@applyLayerMask: ->
try
id765 = charIDToTypeID( "Dlt " )
desc154 = new ActionDescriptor()
id766 = charIDToTypeID( "null" )
ref93 = new ActionReference()
id767 = charIDToTypeID( "Chnl" )
id768 = charIDToTypeID( "Ordn" )
id769 = charIDToTypeID( "Trgt" )
ref93.putEnumerated( id767, id768, id769 )
desc154.putReference( id766, ref93 )
id770 = charIDToTypeID( "Aply" )
desc154.putBoolean( id770, true )
executeAction( id765, desc154, DialogModes.NO )
catch e
@mergeGroup: (layer) ->
app.activeDocument.activeLayer = layer
try
idMrgtwo = charIDToTypeID( "Mrg2" )
desc15 = new ActionDescriptor()
executeAction( idMrgtwo, desc15, DialogModes.NO )
catch e
@parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
String.prototype.startsWith = (str) ->
return this.slice(0, str.length) == str
String.prototype.endsWith = (suffix) ->
return this.indexOf(suffix, this.length - suffix.length) != -1
String.prototype.strip = ->
if String::trim? then @trim() else @replace /^\s+|\s+$/g, ""
setup = ->
preferences.rulerUnits = Units.PIXELS
preferences.typeUnits = TypeUnits.PIXELS
setup()
baum = new Baum()
baum.run()
| 201507 | class Baum
@version = '0.6.1'
@maxLength = 1334
run: ->
@saveFolder = null
if app.documents.length == 0
filePaths = File.openDialog("Select a file", "*", true)
for filePath in filePaths
app.activeDocument = app.open(File(filePath))
@runOneFile(true)
else
@runOneFile(false)
alert('complete!')
runOneFile: (after_close) =>
@saveFolder = Folder.selectDialog("保存先フォルダの選択") if @saveFolder == null
return if @saveFolder == null
@documentName = app.activeDocument.name[0..-5]
copiedDoc = app.activeDocument.duplicate(app.activeDocument.name[..-5] + '.copy.psd')
Util.deselectLayers()
@removeUnvisibleLayers(copiedDoc)
@unlockAll(copiedDoc)
@rasterizeAll(copiedDoc)
@unvisibleAll(copiedDoc)
@layerBlendAll(copiedDoc, copiedDoc)
@removeCommentoutLayers(copiedDoc, copiedDoc) # blendの処理してから消す
@cropLayers(copiedDoc)
@resizePsd(copiedDoc)
@selectDocumentArea(copiedDoc)
@ungroupArtboard(copiedDoc)
@clipping(copiedDoc, copiedDoc)
copiedDoc.selection.deselect()
@psdToJson(copiedDoc)
@psdToImage(copiedDoc)
copiedDoc.close(SaveOptions.DONOTSAVECHANGES)
app.activeDocument.close(SaveOptions.DONOTSAVECHANGES) if after_close
selectDocumentArea: (document) ->
x1 = 0
y1 = 0
x2 = document.width.value
y2 = document.height.value
selReg = [[x1,y1],[x2,y1],[x2,y2],[x1,y2]]
document.selection.select(selReg)
clipping: (document, root) ->
document.resizeImage(document.width, document.height, 72, ResampleMethod.NEARESTNEIGHBOR)
if document.selection.bounds[0].value == 0 && document.selection.bounds[1].value == 0 && document.selection.bounds[2].value == document.width.value && document.selection.bounds[3].value == document.height.value
return
document.selection.invert()
@clearAll(document, root)
document.selection.invert()
x1 = document.selection.bounds[0]
y1 = document.selection.bounds[1]
x2 = document.selection.bounds[2]
y2 = document.selection.bounds[3]
document.resizeCanvas(x2,y2,AnchorPosition.TOPLEFT)
w = x2 - x1
h = y2 - y1
activeDocument.resizeCanvas(w,h,AnchorPosition.BOTTOMRIGHT)
clearAll: (document, root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@clearAll(document, layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
document.activeLayer = layer
document.selection.clear()
else
alert(layer)
resizePsd: (doc) ->
width = doc.width
height = doc.height
return if width < Baum.maxLength && height < Baum.maxLength
tmp = 0
if width > height
tmp = width / Baum.maxLength
else
tmp = height / Baum.maxLength
width = width / tmp
height = height / tmp
doc.resizeImage(width, height, doc.resolution, ResampleMethod.NEARESTNEIGHBOR)
removeUnvisibleLayers: (root) ->
removeLayers = []
for layer in root.layers
if layer.visible == false
layer.visible = true
if layer.bounds[0].value == 0 && layer.bounds[1].value == 0 && layer.bounds[2].value == 0 && layer.bounds[3].value == 0
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeUnvisibleLayers(layer)
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
removeCommentoutLayers: (document, root) ->
removeLayers = []
for layer in root.layers
if layer.name.startsWith('#')
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeCommentoutLayers(document, layer)
if root.typename == 'LayerSet'
document.activeLayer = root
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
cropLayers: (root) ->
bounds = [0,0,root.width,root.height];
root.crop(bounds)
rasterizeAll: (root) ->
for layer in root.layers
if layer.name.startsWith('*')
layer.name = layer.name[1..-1].strip()
if layer.typename == 'LayerSet'
Util.mergeGroup(layer)
else
@rasterize(layer)
else if layer.typename == 'LayerSet'
@rasterizeAll(layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
@rasterize(layer)
else
alert(layer)
t = 0
while(t < root.layers.length)
if root.layers[t].visible && root.layers[t].grouped
root.layers[t].merge()
else
t += 1
rasterize: (layer) ->
tmp = app.activeDocument.activeLayer
app.activeDocument.activeLayer = layer
# LayerStyle含めてラスタライズ
if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION && layer.opacity > 1
Util.rasterizeLayerStyle(layer)
# 普通にラスタライズ
layer.rasterize(RasterizeType.ENTIRELAYER)
# LayerMask
Util.rasterizeLayerMask(layer)
app.activeDocument.activeLayer = tmp
ungroupArtboard: (document) ->
for layer in document.layers
if layer.name.startsWith('Artboard') && layer.typename == 'LayerSet'
@ungroup(layer)
ungroup: (root) ->
layers = for layer in root.layers
layer
for i in [0...layers.length]
layers[i].moveBefore(root)
root.remove()
unlockAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unlockAll(layer)
else
if layer.allLocked
layer.allLocked = false
unvisibleAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unvisibleAll(layer)
else
layer.visible = false
layerBlendAll: (document, root) ->
if root.layers.length == 0
return
for i in [root.layers.length-1..0]
layer = root.layers[i]
if layer.typename == 'LayerSet'
@layerBlendAll(document, layer)
else
layer.visible = true
continue if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION
document.activeLayer = layer
try
# LayerKind.HUESATURATIONは0pxなのでエラーになる
Util.selectTransparency()
document.selection.bounds
document.selection.copy(true)
catch
layer.copy(true)
document.paste()
newLayer = document.activeLayer
newLayer.name = layer.name
document.activeLayer = layer
Util.selectTransparency()
document.selection.invert()
document.activeLayer = newLayer
try
document.selection.bounds
document.selection.cut()
layer.remove()
psdToJson: (targetDocument) ->
toJson = new PsdToJson()
json = toJson.run(targetDocument, @documentName)
Util.saveText(@saveFolder + "/" + @documentName + ".layout.txt", json)
psdToImage: (targetDocument) ->
toImage = new PsdToImage()
json = toImage.run(targetDocument, @saveFolder, @documentName)
class PsdToJson
run: (document, documentName) ->
layers = @allLayers(document, document)
imageSize = [document.width.value, document.height.value]
canvasSize = [document.width.value, document.height.value]
canvasBase = [document.width.value/2, document.height.value/2]
canvasLayer = @findLayer(document, '#Canvas')
if canvasLayer
bounds = canvasLayer.bounds
canvasSize = [bounds[2].value - bounds[0].value, bounds[3].value - bounds[1].value]
canvasBase = [(bounds[2].value + bounds[0].value)/2, (bounds[3].value + bounds[1].value)/2]
json = JSON.stringify({
info: {
version: Baum.version
canvas: {
image: {
w: imageSize[0]
h: imageSize[1]
}
size: {
w: canvasSize[0]
h: canvasSize[1]
}
base: {
x: canvasBase[0]
y: canvasBase[1]
}
}
}
root: {
type: 'Root'
name: documentName
elements: layers
}
})
json
findLayer: (root, name) ->
for layer in root.layers
return layer if layer.name == name
null
allLayers: (document, root) ->
layers = []
for layer in root.layers when layer.visible
hash = null
name = layer.name.split("@")[0]
opt = Util.parseOption(layer.name.split("@")[1])
if layer.typename == 'ArtLayer'
hash = @layerToHash(document, name, opt, layer)
else
hash = @groupToHash(document, name, opt, layer)
if hash
hash['name'] = name
layers.push(hash)
layers
parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
layerToHash: (document, name, opt, layer) ->
document.activeLayer = layer
hash = {}
if layer.kind == LayerKind.TEXT
text = layer.textItem
textSize = parseFloat(@getTextSize())
textType = 'paragraph'
scale = Util.getTextYScale(text) / 0.9
if text.kind != TextType.PARAGRAPHTEXT
text.kind = TextType.PARAGRAPHTEXT
textType = 'point'
text.height = textSize * (2.0 / scale)
textCenterOffset = text.size.value
pos = [text.position[0].value, text.position[1].value]
pos[1] = pos[1] - (textCenterOffset / (2.0 / scale))
text.position = pos
originalText = text.contents.replace(/\r\n/g, '__CRLF__').replace(/\r/g, '__CRLF__').replace(/\n/g, '__CRLF__').replace(/__CRLF__/g, '\r\n')
text.contents = "Z"
bounds = Util.getTextExtents(text)
vx = bounds.x
vy = bounds.y
ww = bounds.width
hh = bounds.height
vh = bounds.height
align = ''
textColor = 0x000000
try
align = text.justification.toString()[14..-1].toLowerCase()
textColor = text.color.rgb.hexValue
catch e
align = 'left'
hash = {
type: 'Text'
text: originalText
textType: textType
font: text.font
size: textSize
color: textColor
align: align
x: Math.round(vx * 100.0)/100.0
y: Math.round(vy * 100.0)/100.0
w: Math.round(ww * 100.0)/100.0
h: Math.round(hh * 100.0)/100.0
vh: Math.round(vh * 100.0)/100.0
opacity: Math.round(layer.opacity * 10.0)/10.0
}
if Util.hasStroke(document, layer)
hash['strokeSize'] = Util.getStrokeSize(document, layer)
hash['strokeColor'] = Util.getStrokeColor(document, layer).rgb.hexValue
else if opt['mask']
hash = {
type: 'Mask'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
else
hash = {
type: 'Image'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
hash['prefab'] = opt['prefab'] if opt['prefab']
hash['background'] = true if opt['background']
hash['slice'] = opt['slice'] if opt['slice']
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash
angleFromMatrix: (yy, xy) ->
toDegs = 180/Math.PI
return Math.atan2(yy, xy) * toDegs - 90
getActiveLayerTransform: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
if (desc.hasKey(stringIDToTypeID('transform')))
desc = desc.getObjectValue(stringIDToTypeID('transform'))
xx = desc.getDouble(stringIDToTypeID('xx'))
xy = desc.getDouble(stringIDToTypeID('xy'))
yy = desc.getDouble(stringIDToTypeID('yy'))
yx = desc.getDouble(stringIDToTypeID('yx'))
return {xx: xx, xy: xy, yy: yy, yx: yx}
return {xx: 0, xy: 0, yy: 0, yx: 0}
getTextSize: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
textSize = desc.getList(stringIDToTypeID('textStyleRange')).getObjectValue(0).getObjectValue(stringIDToTypeID('textStyle')).getDouble (stringIDToTypeID('size'))
if (desc.hasKey(stringIDToTypeID('transform')))
mFactor = desc.getObjectValue(stringIDToTypeID('transform')).getUnitDoubleValue (stringIDToTypeID("yy") )
textSize = (textSize* mFactor).toFixed(2)
return textSize
groupToHash: (document, name, opt, layer) ->
hash = {}
if name.endsWith('Button')
hash = { type: 'Button' }
else if name.endsWith('List')
hash = { type: 'List' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Slider')
hash = { type: 'Slider' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Scrollbar')
hash = { type: 'Scrollbar' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Toggle')
hash = { type: 'Toggle' }
else
hash = { type: 'Group' }
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash['elements'] = @allLayers(document, layer)
hash
class PsdToImage
baseFolder = null
fileNames = []
run: (document, saveFolder, documentName) ->
@baseFolder = Folder(saveFolder + "/" + documentName)
if @baseFolder.exists
removeFiles = @baseFolder.getFiles()
for i in [0...removeFiles.length]
if removeFiles[i].name.startsWith(documentName) && removeFiles[i].name.endsWith('.png')
removeFiles[i].remove()
@baseFolder.remove()
@baseFolder.create()
targets = @allLayers(document)
snapShotId = Util.takeSnapshot(document)
for target in targets
target.visible = true
@outputLayer(document, target)
Util.revertToSnapshot(document, snapShotId)
allLayers: (root) ->
for layer in root.layers when layer.kind == LayerKind.TEXT
layer.visible = false
list = for layer in root.layers when layer.visible
if layer.typename == 'ArtLayer'
layer.visible = false
layer
else
@allLayers(layer)
Array.prototype.concat.apply([], list) # list.flatten()
outputLayer: (doc, layer) ->
if !layer.isBackgroundLayer
layer.translate(-layer.bounds[0], -layer.bounds[1])
doc.resizeCanvas(layer.bounds[2] - layer.bounds[0], layer.bounds[3] - layer.bounds[1], AnchorPosition.TOPLEFT)
doc.trim(TrimType.TRANSPARENT)
layer.opacity = 100.0
fileName = Util.layerToImageName(layer)
opt = Util.parseOption(layer.name.split("@")[1])
if fileName in fileNames
alert("#{fileName}と同名のレイヤーが存在します。レイヤー名を変更してください。")
if opt['slice'] == 'false'
fileName += "-noslice"
fileNames.push(fileName)
saveFile = new File("#{@baseFolder.fsName}/#{fileName}.png")
options = new ExportOptionsSaveForWeb()
options.format = SaveDocumentType.PNG
options.PNG8 = false
options.optimized = true
options.interlaced = false
doc.exportDocument(saveFile, ExportType.SAVEFORWEB, options)
class Util
@saveText: (filePath, text) ->
file = File(filePath)
file.encoding = "UTF8"
file.open("w", "TEXT")
file.write(text)
file.close()
@layerToImageName: (layer) ->
encodeURI(Util.layerToImageNameLoop(layer)).replace(/%/g, '')
@layerToImageNameLoop: (layer) ->
return "" if layer instanceof Document
image = Util.layerToImageName(layer.parent)
imageName = image
if imageName != ""
imageName = imageName + "_"
imageName + layer.name.split("@")[0].replace('_', '').replace(' ', '-').toLowerCase()
@getLastSnapshotID: (doc) ->
hsObj = doc.historyStates
hsLength = hsObj.length
for i in [hsLength-1 .. -1]
if hsObj[i].snapshot
return i
@takeSnapshot: (doc) ->
desc153 = new ActionDescriptor()
ref119 = new ActionReference()
ref119.putClass(charIDToTypeID("SnpS"))
desc153.putReference(charIDToTypeID("null"), ref119 )
ref120 = new ActionReference()
ref120.putProperty(charIDToTypeID("HstS"), charIDToTypeID("CrnH") )
desc153.putReference(charIDToTypeID("From"), ref120 )
executeAction(charIDToTypeID("Mk "), desc153, DialogModes.NO )
return Util.getLastSnapshotID(doc)
@revertToSnapshot: (doc, snapshotID) ->
doc.activeHistoryState = doc.historyStates[snapshotID]
@hasStroke: (doc, layer) ->
doc.activeLayer = layer
res = false
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
hasFX = executeActionGet(ref).hasKey(stringIDToTypeID('layerEffects'))
if hasFX
hasStroke = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).hasKey(stringIDToTypeID('frameFX'))
if hasStroke
desc1 = executeActionGet(ref)
desc2 = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
if desc1.getBoolean(stringIDToTypeID('layerFXVisible')) && desc2.getBoolean(stringIDToTypeID('enabled'))
res = true
return res
@getStrokeSize: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return desc.getUnitDoubleValue (stringIDToTypeID('size'))
@getStrokeColor: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return Util.getColorFromDescriptor(desc.getObjectValue(stringIDToTypeID("color")), typeIDToCharID(desc.getClass(stringIDToTypeID("color"))))
@getColorFromDescriptor: (colorDesc, keyClass) ->
colorObject = new SolidColor()
if keyClass == "<KEY>"
colorObject.grey.grey = color.getDouble(charIDToTypeID('Gry '))
if keyClass == "<KEY>"
colorObject.rgb.red = colorDesc.getDouble(charIDToTypeID('Rd '))
colorObject.rgb.green = colorDesc.getDouble(charIDToTypeID('Grn '))
colorObject.rgb.blue = colorDesc.getDouble(charIDToTypeID('Bl '))
if keyClass == "<KEY>"
colorObject.cmyk.cyan = colorDesc.getDouble(charIDToTypeID('Cyn '))
colorObject.cmyk.magenta = colorDesc.getDouble(charIDToTypeID('Mgnt'))
colorObject.cmyk.yellow = colorDesc.getDouble(charIDToTypeID('Ylw '))
colorObject.cmyk.black = colorDesc.getDouble(charIDToTypeID('Blck'))
if keyClass == "<KEY>"
colorObject.lab.l = colorDesc.getDouble(charIDToTypeID('Lmnc'))
colorObject.lab.a = colorDesc.getDouble(charIDToTypeID('A '))
colorObject.lab.b = colorDesc.getDouble(charIDToTypeID('B '))
return colorObject
@deselectLayers: ->
desc01 = new ActionDescriptor()
ref01 = new ActionReference()
ref01.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') )
desc01.putReference( charIDToTypeID('null'), ref01 )
executeAction( stringIDToTypeID('selectNoLayers'), desc01, DialogModes.NO )
@selectTransparency: ->
idChnl = charIDToTypeID( "Chnl" )
actionSelect = new ActionReference()
actionSelect.putProperty( idChnl, charIDToTypeID( "fsel" ) )
actionTransparent = new ActionReference()
actionTransparent.putEnumerated( idChnl, idChnl, charIDToTypeID( "Trsp" ) )
actionDesc = new ActionDescriptor()
actionDesc.putReference( charIDToTypeID( "null" ), actionSelect )
actionDesc.putReference( charIDToTypeID( "T " ), actionTransparent )
executeAction( charIDToTypeID( "setd" ), actionDesc, DialogModes.NO )
@getTextExtents: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return { x:Math.round(text_item.position[0]), y:Math.round(text_item.position[1]) , width:Math.round(width*x_scale), height:Math.round(height*y_scale) }
@getTextYScale: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return y_scale
@rasterizeLayerStyle: (layer) ->
app.activeDocument.activeLayer = layer
idrasterizeLayer = stringIDToTypeID("rasterizeLayer")
desc5 = new ActionDescriptor()
idnull = charIDToTypeID("null")
ref4 = new ActionReference()
idLyr = charIDToTypeID("Lyr ")
idOrdn = charIDToTypeID("Ordn")
idTrgt = charIDToTypeID("Trgt")
ref4.putEnumerated(idLyr,idOrdn,idTrgt)
desc5.putReference(idnull,ref4)
idWhat = charIDToTypeID("What")
idrasterizeItem = stringIDToTypeID("rasterizeItem")
idlayerStyle = stringIDToTypeID("layerStyle")
desc5.putEnumerated(idWhat,idrasterizeItem,idlayerStyle)
executeAction(idrasterizeLayer,desc5,DialogModes.NO)
@rasterizeLayerMask: (layer) ->
app.activeDocument.activeLayer = layer
if Util.hasVectorMask()
Util.rasterizeLayer()
Util.selectVectorMask()
Util.rasterizeVectorMask()
Util.applyLayerMask()
if Util.hasLayerMask()
Util.rasterizeLayer()
Util.selectLayerMask()
Util.applyLayerMask()
@hasVectorMask: ->
hasVectorMask = false
try
ref = new ActionReference()
keyVectorMaskEnabled = app.stringIDToTypeID( 'vectorMask' )
keyKind = app.charIDToTypeID( 'K<KEY> ' )
ref.putEnumerated( app.charIDToTypeID( 'Path' ), app.charIDToTypeID( 'Ordn' ), keyVectorMaskEnabled )
desc = executeActionGet( ref )
if desc.hasKey( keyKind )
kindValue = desc.getEnumerationValue( keyKind )
if (kindValue == keyVectorMaskEnabled)
hasVectorMask = true
catch e
hasVectorMask = false
return hasVectorMask
@hasLayerMask: ->
hasLayerMask = false
try
ref = new ActionReference()
keyUserMaskEnabled = app.charIDToTypeID( 'UsrM' )
ref.putProperty( app.charIDToTypeID( 'Prpr' ), keyUserMaskEnabled )
ref.putEnumerated( app.charIDToTypeID( 'Lyr ' ), app.charIDToTypeID( 'Ordn' ), app.charIDToTypeID( 'Trgt' ) )
desc = executeActionGet( ref )
if desc.hasKey( keyUserMaskEnabled )
hasLayerMask = true
catch e
hasLayerMask = false
return hasLayerMask
@rasterizeLayer: ->
try
id1242 = stringIDToTypeID( "rasterizeLayer" )
desc245 = new ActionDescriptor()
id1243 = charIDToTypeID( "null" )
ref184 = new ActionReference()
id1244 = charIDToTypeID( "Lyr " )
id1245 = charIDToTypeID( "Ordn" )
id1246 = charIDToTypeID( "Trgt" )
ref184.putEnumerated( id1244, id1245, id1246 )
desc245.putReference( id1243, ref184 )
executeAction( id1242, desc245, DialogModes.NO )
catch
@selectVectorMask: ->
try
id55 = charIDToTypeID( "slct" )
desc15 = new ActionDescriptor()
id56 = charIDToTypeID( "null" )
ref13 = new ActionReference()
id57 = charIDToTypeID( "Path" )
id58 = charIDToTypeID( "Path" )
id59 = stringIDToTypeID( "vectorMask" )
ref13.putEnumerated( id57, id58, id59 )
id60 = charIDToTypeID( "Lyr " )
id61 = charIDToTypeID( "Ordn" )
id62 = charIDToTypeID( "Trgt" )
ref13.putEnumerated( id60, id61, id62 )
desc15.putReference( id56, ref13 )
executeAction( id55, desc15, DialogModes.NO )
catch e
@selectLayerMask: ->
try
id759 = charIDToTypeID( "slct" )
desc153 = new ActionDescriptor()
id760 = charIDToTypeID( "null" )
ref92 = new ActionReference()
id761 = charIDToTypeID( "Chnl" )
id762 = charIDToTypeID( "Chnl" )
id763 = charIDToTypeID( "Msk " )
ref92.putEnumerated( id761, id762, id763 )
desc153.putReference( id760, ref92 )
id764 = charIDToTypeID( "MkVs" )
desc153.putBoolean( id764, false )
executeAction( id759, desc153, DialogModes.NO )
catch e
@rasterizeVectorMask: ->
try
id488 = stringIDToTypeID( "rasterizeLayer" )
desc44 = new ActionDescriptor()
id489 = charIDToTypeID( "null" )
ref29 = new ActionReference()
id490 = charIDToTypeID( "Lyr " )
id491 = charIDToTypeID( "Ordn" )
id492 = charIDToTypeID( "Trgt" )
ref29.putEnumerated( id490, id491, id492 )
desc44.putReference( id489, ref29 )
id493 = charIDToTypeID( "What" )
id494 = stringIDToTypeID( "rasterizeItem" )
id495 = stringIDToTypeID( "vectorMask" )
desc44.putEnumerated( id493, id494, id495 )
executeAction( id488, desc44, DialogModes.NO )
catch e
@applyLayerMask: ->
try
id765 = charIDToTypeID( "Dlt " )
desc154 = new ActionDescriptor()
id766 = charIDToTypeID( "null" )
ref93 = new ActionReference()
id767 = charIDToTypeID( "Chnl" )
id768 = charIDToTypeID( "Ordn" )
id769 = charIDToTypeID( "Trgt" )
ref93.putEnumerated( id767, id768, id769 )
desc154.putReference( id766, ref93 )
id770 = charIDToTypeID( "Aply" )
desc154.putBoolean( id770, true )
executeAction( id765, desc154, DialogModes.NO )
catch e
@mergeGroup: (layer) ->
app.activeDocument.activeLayer = layer
try
idMrgtwo = charIDToTypeID( "Mrg2" )
desc15 = new ActionDescriptor()
executeAction( idMrgtwo, desc15, DialogModes.NO )
catch e
@parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
String.prototype.startsWith = (str) ->
return this.slice(0, str.length) == str
String.prototype.endsWith = (suffix) ->
return this.indexOf(suffix, this.length - suffix.length) != -1
String.prototype.strip = ->
if String::trim? then @trim() else @replace /^\s+|\s+$/g, ""
setup = ->
preferences.rulerUnits = Units.PIXELS
preferences.typeUnits = TypeUnits.PIXELS
setup()
baum = new Baum()
baum.run()
| true | class Baum
@version = '0.6.1'
@maxLength = 1334
run: ->
@saveFolder = null
if app.documents.length == 0
filePaths = File.openDialog("Select a file", "*", true)
for filePath in filePaths
app.activeDocument = app.open(File(filePath))
@runOneFile(true)
else
@runOneFile(false)
alert('complete!')
runOneFile: (after_close) =>
@saveFolder = Folder.selectDialog("保存先フォルダの選択") if @saveFolder == null
return if @saveFolder == null
@documentName = app.activeDocument.name[0..-5]
copiedDoc = app.activeDocument.duplicate(app.activeDocument.name[..-5] + '.copy.psd')
Util.deselectLayers()
@removeUnvisibleLayers(copiedDoc)
@unlockAll(copiedDoc)
@rasterizeAll(copiedDoc)
@unvisibleAll(copiedDoc)
@layerBlendAll(copiedDoc, copiedDoc)
@removeCommentoutLayers(copiedDoc, copiedDoc) # blendの処理してから消す
@cropLayers(copiedDoc)
@resizePsd(copiedDoc)
@selectDocumentArea(copiedDoc)
@ungroupArtboard(copiedDoc)
@clipping(copiedDoc, copiedDoc)
copiedDoc.selection.deselect()
@psdToJson(copiedDoc)
@psdToImage(copiedDoc)
copiedDoc.close(SaveOptions.DONOTSAVECHANGES)
app.activeDocument.close(SaveOptions.DONOTSAVECHANGES) if after_close
selectDocumentArea: (document) ->
x1 = 0
y1 = 0
x2 = document.width.value
y2 = document.height.value
selReg = [[x1,y1],[x2,y1],[x2,y2],[x1,y2]]
document.selection.select(selReg)
clipping: (document, root) ->
document.resizeImage(document.width, document.height, 72, ResampleMethod.NEARESTNEIGHBOR)
if document.selection.bounds[0].value == 0 && document.selection.bounds[1].value == 0 && document.selection.bounds[2].value == document.width.value && document.selection.bounds[3].value == document.height.value
return
document.selection.invert()
@clearAll(document, root)
document.selection.invert()
x1 = document.selection.bounds[0]
y1 = document.selection.bounds[1]
x2 = document.selection.bounds[2]
y2 = document.selection.bounds[3]
document.resizeCanvas(x2,y2,AnchorPosition.TOPLEFT)
w = x2 - x1
h = y2 - y1
activeDocument.resizeCanvas(w,h,AnchorPosition.BOTTOMRIGHT)
clearAll: (document, root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@clearAll(document, layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
document.activeLayer = layer
document.selection.clear()
else
alert(layer)
resizePsd: (doc) ->
width = doc.width
height = doc.height
return if width < Baum.maxLength && height < Baum.maxLength
tmp = 0
if width > height
tmp = width / Baum.maxLength
else
tmp = height / Baum.maxLength
width = width / tmp
height = height / tmp
doc.resizeImage(width, height, doc.resolution, ResampleMethod.NEARESTNEIGHBOR)
removeUnvisibleLayers: (root) ->
removeLayers = []
for layer in root.layers
if layer.visible == false
layer.visible = true
if layer.bounds[0].value == 0 && layer.bounds[1].value == 0 && layer.bounds[2].value == 0 && layer.bounds[3].value == 0
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeUnvisibleLayers(layer)
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
removeCommentoutLayers: (document, root) ->
removeLayers = []
for layer in root.layers
if layer.name.startsWith('#')
removeLayers.push(layer)
continue
if layer.typename == 'LayerSet'
@removeCommentoutLayers(document, layer)
if root.typename == 'LayerSet'
document.activeLayer = root
if removeLayers.length > 0
for i in [removeLayers.length-1..0]
removeLayers[i].remove()
cropLayers: (root) ->
bounds = [0,0,root.width,root.height];
root.crop(bounds)
rasterizeAll: (root) ->
for layer in root.layers
if layer.name.startsWith('*')
layer.name = layer.name[1..-1].strip()
if layer.typename == 'LayerSet'
Util.mergeGroup(layer)
else
@rasterize(layer)
else if layer.typename == 'LayerSet'
@rasterizeAll(layer)
else if layer.typename == 'ArtLayer'
if layer.kind != LayerKind.TEXT
@rasterize(layer)
else
alert(layer)
t = 0
while(t < root.layers.length)
if root.layers[t].visible && root.layers[t].grouped
root.layers[t].merge()
else
t += 1
rasterize: (layer) ->
tmp = app.activeDocument.activeLayer
app.activeDocument.activeLayer = layer
# LayerStyle含めてラスタライズ
if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION && layer.opacity > 1
Util.rasterizeLayerStyle(layer)
# 普通にラスタライズ
layer.rasterize(RasterizeType.ENTIRELAYER)
# LayerMask
Util.rasterizeLayerMask(layer)
app.activeDocument.activeLayer = tmp
ungroupArtboard: (document) ->
for layer in document.layers
if layer.name.startsWith('Artboard') && layer.typename == 'LayerSet'
@ungroup(layer)
ungroup: (root) ->
layers = for layer in root.layers
layer
for i in [0...layers.length]
layers[i].moveBefore(root)
root.remove()
unlockAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unlockAll(layer)
else
if layer.allLocked
layer.allLocked = false
unvisibleAll: (root) ->
for layer in root.layers
if layer.typename == 'LayerSet'
@unvisibleAll(layer)
else
layer.visible = false
layerBlendAll: (document, root) ->
if root.layers.length == 0
return
for i in [root.layers.length-1..0]
layer = root.layers[i]
if layer.typename == 'LayerSet'
@layerBlendAll(document, layer)
else
layer.visible = true
continue if layer.blendMode != BlendMode.OVERLAY && layer.kind != LayerKind.HUESATURATION
document.activeLayer = layer
try
# LayerKind.HUESATURATIONは0pxなのでエラーになる
Util.selectTransparency()
document.selection.bounds
document.selection.copy(true)
catch
layer.copy(true)
document.paste()
newLayer = document.activeLayer
newLayer.name = layer.name
document.activeLayer = layer
Util.selectTransparency()
document.selection.invert()
document.activeLayer = newLayer
try
document.selection.bounds
document.selection.cut()
layer.remove()
psdToJson: (targetDocument) ->
toJson = new PsdToJson()
json = toJson.run(targetDocument, @documentName)
Util.saveText(@saveFolder + "/" + @documentName + ".layout.txt", json)
psdToImage: (targetDocument) ->
toImage = new PsdToImage()
json = toImage.run(targetDocument, @saveFolder, @documentName)
class PsdToJson
run: (document, documentName) ->
layers = @allLayers(document, document)
imageSize = [document.width.value, document.height.value]
canvasSize = [document.width.value, document.height.value]
canvasBase = [document.width.value/2, document.height.value/2]
canvasLayer = @findLayer(document, '#Canvas')
if canvasLayer
bounds = canvasLayer.bounds
canvasSize = [bounds[2].value - bounds[0].value, bounds[3].value - bounds[1].value]
canvasBase = [(bounds[2].value + bounds[0].value)/2, (bounds[3].value + bounds[1].value)/2]
json = JSON.stringify({
info: {
version: Baum.version
canvas: {
image: {
w: imageSize[0]
h: imageSize[1]
}
size: {
w: canvasSize[0]
h: canvasSize[1]
}
base: {
x: canvasBase[0]
y: canvasBase[1]
}
}
}
root: {
type: 'Root'
name: documentName
elements: layers
}
})
json
findLayer: (root, name) ->
for layer in root.layers
return layer if layer.name == name
null
allLayers: (document, root) ->
layers = []
for layer in root.layers when layer.visible
hash = null
name = layer.name.split("@")[0]
opt = Util.parseOption(layer.name.split("@")[1])
if layer.typename == 'ArtLayer'
hash = @layerToHash(document, name, opt, layer)
else
hash = @groupToHash(document, name, opt, layer)
if hash
hash['name'] = name
layers.push(hash)
layers
parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
layerToHash: (document, name, opt, layer) ->
document.activeLayer = layer
hash = {}
if layer.kind == LayerKind.TEXT
text = layer.textItem
textSize = parseFloat(@getTextSize())
textType = 'paragraph'
scale = Util.getTextYScale(text) / 0.9
if text.kind != TextType.PARAGRAPHTEXT
text.kind = TextType.PARAGRAPHTEXT
textType = 'point'
text.height = textSize * (2.0 / scale)
textCenterOffset = text.size.value
pos = [text.position[0].value, text.position[1].value]
pos[1] = pos[1] - (textCenterOffset / (2.0 / scale))
text.position = pos
originalText = text.contents.replace(/\r\n/g, '__CRLF__').replace(/\r/g, '__CRLF__').replace(/\n/g, '__CRLF__').replace(/__CRLF__/g, '\r\n')
text.contents = "Z"
bounds = Util.getTextExtents(text)
vx = bounds.x
vy = bounds.y
ww = bounds.width
hh = bounds.height
vh = bounds.height
align = ''
textColor = 0x000000
try
align = text.justification.toString()[14..-1].toLowerCase()
textColor = text.color.rgb.hexValue
catch e
align = 'left'
hash = {
type: 'Text'
text: originalText
textType: textType
font: text.font
size: textSize
color: textColor
align: align
x: Math.round(vx * 100.0)/100.0
y: Math.round(vy * 100.0)/100.0
w: Math.round(ww * 100.0)/100.0
h: Math.round(hh * 100.0)/100.0
vh: Math.round(vh * 100.0)/100.0
opacity: Math.round(layer.opacity * 10.0)/10.0
}
if Util.hasStroke(document, layer)
hash['strokeSize'] = Util.getStrokeSize(document, layer)
hash['strokeColor'] = Util.getStrokeColor(document, layer).rgb.hexValue
else if opt['mask']
hash = {
type: 'Mask'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
else
hash = {
type: 'Image'
image: Util.layerToImageName(layer)
x: layer.bounds[0].value
y: layer.bounds[1].value
w: layer.bounds[2].value - layer.bounds[0].value
h: layer.bounds[3].value - layer.bounds[1].value
opacity: Math.round(layer.opacity * 10.0)/10.0
}
hash['prefab'] = opt['prefab'] if opt['prefab']
hash['background'] = true if opt['background']
hash['slice'] = opt['slice'] if opt['slice']
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash
angleFromMatrix: (yy, xy) ->
toDegs = 180/Math.PI
return Math.atan2(yy, xy) * toDegs - 90
getActiveLayerTransform: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
if (desc.hasKey(stringIDToTypeID('transform')))
desc = desc.getObjectValue(stringIDToTypeID('transform'))
xx = desc.getDouble(stringIDToTypeID('xx'))
xy = desc.getDouble(stringIDToTypeID('xy'))
yy = desc.getDouble(stringIDToTypeID('yy'))
yx = desc.getDouble(stringIDToTypeID('yx'))
return {xx: xx, xy: xy, yy: yy, yx: yx}
return {xx: 0, xy: 0, yy: 0, yx: 0}
getTextSize: ->
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
textSize = desc.getList(stringIDToTypeID('textStyleRange')).getObjectValue(0).getObjectValue(stringIDToTypeID('textStyle')).getDouble (stringIDToTypeID('size'))
if (desc.hasKey(stringIDToTypeID('transform')))
mFactor = desc.getObjectValue(stringIDToTypeID('transform')).getUnitDoubleValue (stringIDToTypeID("yy") )
textSize = (textSize* mFactor).toFixed(2)
return textSize
groupToHash: (document, name, opt, layer) ->
hash = {}
if name.endsWith('Button')
hash = { type: 'Button' }
else if name.endsWith('List')
hash = { type: 'List' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Slider')
hash = { type: 'Slider' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Scrollbar')
hash = { type: 'Scrollbar' }
hash['scroll'] = opt['scroll'] if opt['scroll']
else if name.endsWith('Toggle')
hash = { type: 'Toggle' }
else
hash = { type: 'Group' }
hash['pivot'] = opt['pivot'] if opt['pivot']
hash['stretchx'] = opt['stretchx'] if opt['stretchx']
hash['stretchy'] = opt['stretchy'] if opt['stretchy']
hash['stretchxy'] = opt['stretchxy'] if opt['stretchxy']
hash['elements'] = @allLayers(document, layer)
hash
class PsdToImage
baseFolder = null
fileNames = []
run: (document, saveFolder, documentName) ->
@baseFolder = Folder(saveFolder + "/" + documentName)
if @baseFolder.exists
removeFiles = @baseFolder.getFiles()
for i in [0...removeFiles.length]
if removeFiles[i].name.startsWith(documentName) && removeFiles[i].name.endsWith('.png')
removeFiles[i].remove()
@baseFolder.remove()
@baseFolder.create()
targets = @allLayers(document)
snapShotId = Util.takeSnapshot(document)
for target in targets
target.visible = true
@outputLayer(document, target)
Util.revertToSnapshot(document, snapShotId)
allLayers: (root) ->
for layer in root.layers when layer.kind == LayerKind.TEXT
layer.visible = false
list = for layer in root.layers when layer.visible
if layer.typename == 'ArtLayer'
layer.visible = false
layer
else
@allLayers(layer)
Array.prototype.concat.apply([], list) # list.flatten()
outputLayer: (doc, layer) ->
if !layer.isBackgroundLayer
layer.translate(-layer.bounds[0], -layer.bounds[1])
doc.resizeCanvas(layer.bounds[2] - layer.bounds[0], layer.bounds[3] - layer.bounds[1], AnchorPosition.TOPLEFT)
doc.trim(TrimType.TRANSPARENT)
layer.opacity = 100.0
fileName = Util.layerToImageName(layer)
opt = Util.parseOption(layer.name.split("@")[1])
if fileName in fileNames
alert("#{fileName}と同名のレイヤーが存在します。レイヤー名を変更してください。")
if opt['slice'] == 'false'
fileName += "-noslice"
fileNames.push(fileName)
saveFile = new File("#{@baseFolder.fsName}/#{fileName}.png")
options = new ExportOptionsSaveForWeb()
options.format = SaveDocumentType.PNG
options.PNG8 = false
options.optimized = true
options.interlaced = false
doc.exportDocument(saveFile, ExportType.SAVEFORWEB, options)
class Util
@saveText: (filePath, text) ->
file = File(filePath)
file.encoding = "UTF8"
file.open("w", "TEXT")
file.write(text)
file.close()
@layerToImageName: (layer) ->
encodeURI(Util.layerToImageNameLoop(layer)).replace(/%/g, '')
@layerToImageNameLoop: (layer) ->
return "" if layer instanceof Document
image = Util.layerToImageName(layer.parent)
imageName = image
if imageName != ""
imageName = imageName + "_"
imageName + layer.name.split("@")[0].replace('_', '').replace(' ', '-').toLowerCase()
@getLastSnapshotID: (doc) ->
hsObj = doc.historyStates
hsLength = hsObj.length
for i in [hsLength-1 .. -1]
if hsObj[i].snapshot
return i
@takeSnapshot: (doc) ->
desc153 = new ActionDescriptor()
ref119 = new ActionReference()
ref119.putClass(charIDToTypeID("SnpS"))
desc153.putReference(charIDToTypeID("null"), ref119 )
ref120 = new ActionReference()
ref120.putProperty(charIDToTypeID("HstS"), charIDToTypeID("CrnH") )
desc153.putReference(charIDToTypeID("From"), ref120 )
executeAction(charIDToTypeID("Mk "), desc153, DialogModes.NO )
return Util.getLastSnapshotID(doc)
@revertToSnapshot: (doc, snapshotID) ->
doc.activeHistoryState = doc.historyStates[snapshotID]
@hasStroke: (doc, layer) ->
doc.activeLayer = layer
res = false
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
hasFX = executeActionGet(ref).hasKey(stringIDToTypeID('layerEffects'))
if hasFX
hasStroke = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).hasKey(stringIDToTypeID('frameFX'))
if hasStroke
desc1 = executeActionGet(ref)
desc2 = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
if desc1.getBoolean(stringIDToTypeID('layerFXVisible')) && desc2.getBoolean(stringIDToTypeID('enabled'))
res = true
return res
@getStrokeSize: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return desc.getUnitDoubleValue (stringIDToTypeID('size'))
@getStrokeColor: (doc, layer) ->
doc.activeLayer = layer
ref = new ActionReference()
ref.putEnumerated(charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt"))
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('layerEffects')).getObjectValue(stringIDToTypeID('frameFX'))
return Util.getColorFromDescriptor(desc.getObjectValue(stringIDToTypeID("color")), typeIDToCharID(desc.getClass(stringIDToTypeID("color"))))
@getColorFromDescriptor: (colorDesc, keyClass) ->
colorObject = new SolidColor()
if keyClass == "PI:KEY:<KEY>END_PI"
colorObject.grey.grey = color.getDouble(charIDToTypeID('Gry '))
if keyClass == "PI:KEY:<KEY>END_PI"
colorObject.rgb.red = colorDesc.getDouble(charIDToTypeID('Rd '))
colorObject.rgb.green = colorDesc.getDouble(charIDToTypeID('Grn '))
colorObject.rgb.blue = colorDesc.getDouble(charIDToTypeID('Bl '))
if keyClass == "PI:KEY:<KEY>END_PI"
colorObject.cmyk.cyan = colorDesc.getDouble(charIDToTypeID('Cyn '))
colorObject.cmyk.magenta = colorDesc.getDouble(charIDToTypeID('Mgnt'))
colorObject.cmyk.yellow = colorDesc.getDouble(charIDToTypeID('Ylw '))
colorObject.cmyk.black = colorDesc.getDouble(charIDToTypeID('Blck'))
if keyClass == "PI:KEY:<KEY>END_PI"
colorObject.lab.l = colorDesc.getDouble(charIDToTypeID('Lmnc'))
colorObject.lab.a = colorDesc.getDouble(charIDToTypeID('A '))
colorObject.lab.b = colorDesc.getDouble(charIDToTypeID('B '))
return colorObject
@deselectLayers: ->
desc01 = new ActionDescriptor()
ref01 = new ActionReference()
ref01.putEnumerated( charIDToTypeID('Lyr '), charIDToTypeID('Ordn'), charIDToTypeID('Trgt') )
desc01.putReference( charIDToTypeID('null'), ref01 )
executeAction( stringIDToTypeID('selectNoLayers'), desc01, DialogModes.NO )
@selectTransparency: ->
idChnl = charIDToTypeID( "Chnl" )
actionSelect = new ActionReference()
actionSelect.putProperty( idChnl, charIDToTypeID( "fsel" ) )
actionTransparent = new ActionReference()
actionTransparent.putEnumerated( idChnl, idChnl, charIDToTypeID( "Trsp" ) )
actionDesc = new ActionDescriptor()
actionDesc.putReference( charIDToTypeID( "null" ), actionSelect )
actionDesc.putReference( charIDToTypeID( "T " ), actionTransparent )
executeAction( charIDToTypeID( "setd" ), actionDesc, DialogModes.NO )
@getTextExtents: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return { x:Math.round(text_item.position[0]), y:Math.round(text_item.position[1]) , width:Math.round(width*x_scale), height:Math.round(height*y_scale) }
@getTextYScale: (text_item) ->
app.activeDocument.activeLayer = text_item.parent
ref = new ActionReference()
ref.putEnumerated( charIDToTypeID("Lyr "), charIDToTypeID("Ordn"), charIDToTypeID("Trgt") )
desc = executeActionGet(ref).getObjectValue(stringIDToTypeID('textKey'))
bounds = desc.getObjectValue(stringIDToTypeID('bounds'))
width = bounds.getUnitDoubleValue (stringIDToTypeID('right'))
height = bounds.getUnitDoubleValue (stringIDToTypeID('bottom'))
x_scale = 1
y_scale = 1
if desc.hasKey(stringIDToTypeID('transform'))
transform = desc.getObjectValue(stringIDToTypeID('transform'))
x_scale = transform.getUnitDoubleValue (stringIDToTypeID('xx'))
y_scale = transform.getUnitDoubleValue (stringIDToTypeID('yy'))
return y_scale
@rasterizeLayerStyle: (layer) ->
app.activeDocument.activeLayer = layer
idrasterizeLayer = stringIDToTypeID("rasterizeLayer")
desc5 = new ActionDescriptor()
idnull = charIDToTypeID("null")
ref4 = new ActionReference()
idLyr = charIDToTypeID("Lyr ")
idOrdn = charIDToTypeID("Ordn")
idTrgt = charIDToTypeID("Trgt")
ref4.putEnumerated(idLyr,idOrdn,idTrgt)
desc5.putReference(idnull,ref4)
idWhat = charIDToTypeID("What")
idrasterizeItem = stringIDToTypeID("rasterizeItem")
idlayerStyle = stringIDToTypeID("layerStyle")
desc5.putEnumerated(idWhat,idrasterizeItem,idlayerStyle)
executeAction(idrasterizeLayer,desc5,DialogModes.NO)
@rasterizeLayerMask: (layer) ->
app.activeDocument.activeLayer = layer
if Util.hasVectorMask()
Util.rasterizeLayer()
Util.selectVectorMask()
Util.rasterizeVectorMask()
Util.applyLayerMask()
if Util.hasLayerMask()
Util.rasterizeLayer()
Util.selectLayerMask()
Util.applyLayerMask()
@hasVectorMask: ->
hasVectorMask = false
try
ref = new ActionReference()
keyVectorMaskEnabled = app.stringIDToTypeID( 'vectorMask' )
keyKind = app.charIDToTypeID( 'KPI:KEY:<KEY>END_PI ' )
ref.putEnumerated( app.charIDToTypeID( 'Path' ), app.charIDToTypeID( 'Ordn' ), keyVectorMaskEnabled )
desc = executeActionGet( ref )
if desc.hasKey( keyKind )
kindValue = desc.getEnumerationValue( keyKind )
if (kindValue == keyVectorMaskEnabled)
hasVectorMask = true
catch e
hasVectorMask = false
return hasVectorMask
@hasLayerMask: ->
hasLayerMask = false
try
ref = new ActionReference()
keyUserMaskEnabled = app.charIDToTypeID( 'UsrM' )
ref.putProperty( app.charIDToTypeID( 'Prpr' ), keyUserMaskEnabled )
ref.putEnumerated( app.charIDToTypeID( 'Lyr ' ), app.charIDToTypeID( 'Ordn' ), app.charIDToTypeID( 'Trgt' ) )
desc = executeActionGet( ref )
if desc.hasKey( keyUserMaskEnabled )
hasLayerMask = true
catch e
hasLayerMask = false
return hasLayerMask
@rasterizeLayer: ->
try
id1242 = stringIDToTypeID( "rasterizeLayer" )
desc245 = new ActionDescriptor()
id1243 = charIDToTypeID( "null" )
ref184 = new ActionReference()
id1244 = charIDToTypeID( "Lyr " )
id1245 = charIDToTypeID( "Ordn" )
id1246 = charIDToTypeID( "Trgt" )
ref184.putEnumerated( id1244, id1245, id1246 )
desc245.putReference( id1243, ref184 )
executeAction( id1242, desc245, DialogModes.NO )
catch
@selectVectorMask: ->
try
id55 = charIDToTypeID( "slct" )
desc15 = new ActionDescriptor()
id56 = charIDToTypeID( "null" )
ref13 = new ActionReference()
id57 = charIDToTypeID( "Path" )
id58 = charIDToTypeID( "Path" )
id59 = stringIDToTypeID( "vectorMask" )
ref13.putEnumerated( id57, id58, id59 )
id60 = charIDToTypeID( "Lyr " )
id61 = charIDToTypeID( "Ordn" )
id62 = charIDToTypeID( "Trgt" )
ref13.putEnumerated( id60, id61, id62 )
desc15.putReference( id56, ref13 )
executeAction( id55, desc15, DialogModes.NO )
catch e
@selectLayerMask: ->
try
id759 = charIDToTypeID( "slct" )
desc153 = new ActionDescriptor()
id760 = charIDToTypeID( "null" )
ref92 = new ActionReference()
id761 = charIDToTypeID( "Chnl" )
id762 = charIDToTypeID( "Chnl" )
id763 = charIDToTypeID( "Msk " )
ref92.putEnumerated( id761, id762, id763 )
desc153.putReference( id760, ref92 )
id764 = charIDToTypeID( "MkVs" )
desc153.putBoolean( id764, false )
executeAction( id759, desc153, DialogModes.NO )
catch e
@rasterizeVectorMask: ->
try
id488 = stringIDToTypeID( "rasterizeLayer" )
desc44 = new ActionDescriptor()
id489 = charIDToTypeID( "null" )
ref29 = new ActionReference()
id490 = charIDToTypeID( "Lyr " )
id491 = charIDToTypeID( "Ordn" )
id492 = charIDToTypeID( "Trgt" )
ref29.putEnumerated( id490, id491, id492 )
desc44.putReference( id489, ref29 )
id493 = charIDToTypeID( "What" )
id494 = stringIDToTypeID( "rasterizeItem" )
id495 = stringIDToTypeID( "vectorMask" )
desc44.putEnumerated( id493, id494, id495 )
executeAction( id488, desc44, DialogModes.NO )
catch e
@applyLayerMask: ->
try
id765 = charIDToTypeID( "Dlt " )
desc154 = new ActionDescriptor()
id766 = charIDToTypeID( "null" )
ref93 = new ActionReference()
id767 = charIDToTypeID( "Chnl" )
id768 = charIDToTypeID( "Ordn" )
id769 = charIDToTypeID( "Trgt" )
ref93.putEnumerated( id767, id768, id769 )
desc154.putReference( id766, ref93 )
id770 = charIDToTypeID( "Aply" )
desc154.putBoolean( id770, true )
executeAction( id765, desc154, DialogModes.NO )
catch e
@mergeGroup: (layer) ->
app.activeDocument.activeLayer = layer
try
idMrgtwo = charIDToTypeID( "Mrg2" )
desc15 = new ActionDescriptor()
executeAction( idMrgtwo, desc15, DialogModes.NO )
catch e
@parseOption: (text) ->
return {} unless text
opt = {}
for optText in text.split(",")
elements = optText.split("=")
elements[1] = 'true' if elements.length == 1
opt[elements[0].toLowerCase()] = elements[1].toLowerCase()
return opt
String.prototype.startsWith = (str) ->
return this.slice(0, str.length) == str
String.prototype.endsWith = (suffix) ->
return this.indexOf(suffix, this.length - suffix.length) != -1
String.prototype.strip = ->
if String::trim? then @trim() else @replace /^\s+|\s+$/g, ""
setup = ->
preferences.rulerUnits = Units.PIXELS
preferences.typeUnits = TypeUnits.PIXELS
setup()
baum = new Baum()
baum.run()
|
[
{
"context": "restaurants = [\n {\n name: \"Denver Biscuit Co\"\n address: \"5412 S. Broadway St\"\n coordinat",
"end": 48,
"score": 0.999803364276886,
"start": 31,
"tag": "NAME",
"value": "Denver Biscuit Co"
},
{
"context": " close: \"2300\"\n recommendation:\n who: \"corwin\"\n what: \"The Franklin\"\n why: \"Fresh mad",
"end": 235,
"score": 0.9991617202758789,
"start": 229,
"tag": "NAME",
"value": "corwin"
},
{
"context": ", what more is there to want?\"\n }\n {\n name: \"Illegal Pete's\"\n address: \"1530 16th St\"\n coordinates:\n ",
"end": 374,
"score": 0.9998432397842407,
"start": 360,
"tag": "NAME",
"value": "Illegal Pete's"
},
{
"context": " close: \"2400\"\n recommendation:\n who: \"joshua\"\n what: \"A giant burrito\"\n why: \"Authen",
"end": 554,
"score": 0.9981684684753418,
"start": 548,
"tag": "NAME",
"value": "joshua"
},
{
"context": " Denver. The line moves fast.\"\n }\n {\n name: \"Wynkoop Brewery\"\n address: \"1634 18th St\"\n coordinates:\n ",
"end": 676,
"score": 0.9998922944068909,
"start": 661,
"tag": "NAME",
"value": "Wynkoop Brewery"
},
{
"context": "en: \"1100\"\n close: \"0200\"\n }\n {\n name: \"Ace\"\n address: \"501 E 17th Ave\"\n coordinates:\n ",
"end": 840,
"score": 0.9998729228973389,
"start": 837,
"tag": "NAME",
"value": "Ace"
},
{
"context": "close: \"2400\"\n recommendation:\n who: \"elliot\"\n why: \"It's a hip spot in Uptown with a wel",
"end": 1022,
"score": 0.5573796629905701,
"start": 1020,
"tag": "NAME",
"value": "ot"
},
{
"context": "t bored, play some ping pong.\"\n }\n {\n name: \"D Bar\"\n address: \"19th & Pennsylvania St\"\n coordi",
"end": 1178,
"score": 0.9998713731765747,
"start": 1173,
"tag": "NAME",
"value": "D Bar"
},
{
"context": "h churros with chocolate dip.\"\n }\n {\n name: \"Denver Pizza Company\"\n address: \"309 W 11th Ave\"\n coordinates:\n ",
"end": 1480,
"score": 0.9891992807388306,
"start": 1460,
"tag": "NAME",
"value": "Denver Pizza Company"
},
{
"context": " close: \"2200\"\n recommendation:\n who: \"amy\"\n what: \"Get the 5280 pizza. Soooo good.\"\n ",
"end": 1659,
"score": 0.8243482112884521,
"start": 1656,
"tag": "NAME",
"value": "amy"
},
{
"context": " close: \"1400\"\n recommendation:\n who: \"joshua\"\n why: \"Homestyle vegetarian spot with quite",
"end": 1919,
"score": 0.9866107106208801,
"start": 1913,
"tag": "NAME",
"value": "joshua"
},
{
"context": "uite a few vegan options too.\"\n }\n {\n name: \"Sams No. 3\"\n address: \"1500 Curtis Street\"\n coordinate",
"end": 2025,
"score": 0.9273069500923157,
"start": 2015,
"tag": "NAME",
"value": "Sams No. 3"
},
{
"context": " close: \"2300\"\n recommendation:\n who: \"andrew\"\n what: \"Everything\"\n why: \"HUGE portio",
"end": 2211,
"score": 0.9811984300613403,
"start": 2205,
"tag": "NAME",
"value": "andrew"
},
{
"context": "bly the best diner in Denver.\"\n }\n {\n name: \"Larkburger\"\n address: \"1617 California St\"\n coordinate",
"end": 2370,
"score": 0.999870777130127,
"start": 2360,
"tag": "NAME",
"value": "Larkburger"
},
{
"context": " close: \"1700\"\n recommendation:\n who: \"andrew\"\n what: \"Parmesan Truffle Fries\"\n why: ",
"end": 2556,
"score": 0.9625583291053772,
"start": 2550,
"tag": "NAME",
"value": "andrew"
},
{
"context": "l ingredients and great meat.\"\n }\n {\n name: \"Linger\"\n address: \"2030 W 30th Ave\"\n coordinates:\n",
"end": 2708,
"score": 0.9997830986976624,
"start": 2702,
"tag": "NAME",
"value": "Linger"
},
{
"context": " close: \"1700\"\n recommendation:\n who: \"rachel\"\n why: \"Old mortuary that is now one of the ",
"end": 2891,
"score": 0.9976816177368164,
"start": 2885,
"tag": "NAME",
"value": "rachel"
},
{
"context": "e best restaurants in Denver.\"\n }\n {\n name: \"Hops & Pie\"\n address: \"3920 Tennyson St\"\n coordinates:",
"end": 2999,
"score": 0.9997633099555969,
"start": 2989,
"tag": "NAME",
"value": "Hops & Pie"
},
{
"context": " close: \"2300\"\n recommendation:\n who: \"andrew\"\n what: \"Slice of the day\"\n why: \"Best ",
"end": 3183,
"score": 0.9856456518173218,
"start": 3177,
"tag": "NAME",
"value": "andrew"
},
{
"context": "zza slices that change daily.\"\n }\n {\n name: \"Biju’s Little Curry Shop\"\n address: \"1441 26th St\"\n coordinates:\n ",
"end": 3353,
"score": 0.9531182050704956,
"start": 3329,
"tag": "NAME",
"value": "Biju’s Little Curry Shop"
},
{
"context": " close: \"2100\"\n recommendation:\n who: \"andrew\"\n why: \"Fantastic curry by one of the top pr",
"end": 3533,
"score": 0.967731237411499,
"start": 3527,
"tag": "NAME",
"value": "andrew"
},
{
"context": "althy and absurdly delicious.\"\n }\n {\n name: \"Zoe Ma Ma’s\"\n address: \"1625 Wynkoop St\"\n coordinates:\n",
"end": 3676,
"score": 0.9998007416725159,
"start": 3665,
"tag": "NAME",
"value": "Zoe Ma Ma’s"
},
{
"context": " close: \"2000\"\n recommendation:\n who: \"andrew\"\n what: \"Za Jiang Mian\"\n why: \"Some of ",
"end": 3859,
"score": 0.9889649152755737,
"start": 3853,
"tag": "NAME",
"value": "andrew"
},
{
"context": " recommendation:\n who: \"andrew\"\n what: \"Za Jiang Mian\"\n why: \"Some of the best authentic Chinese f",
"end": 3887,
"score": 0.9934276342391968,
"start": 3874,
"tag": "NAME",
"value": "Za Jiang Mian"
},
{
"context": " right next to Union Station!\"\n }\n {\n name: \"Kitchen Next Door\"\n address: \"1701 Wynkoop St\"\n ",
"end": 4006,
"score": 0.5838943719863892,
"start": 4005,
"tag": "NAME",
"value": "K"
},
{
"context": " close: \"2300\"\n recommendation:\n who: \"andrew\"\n what: \"Salmon Salad Sandwich & Garlic Smas",
"end": 4205,
"score": 0.9677767157554626,
"start": 4199,
"tag": "NAME",
"value": "andrew"
},
{
"context": "ices, and solid beer choices.\"\n }\n {\n name: \"Avanti\"\n address: \"3200 Pecos St\"\n coordinates:\n ",
"end": 4367,
"score": 0.9994615316390991,
"start": 4361,
"tag": "NAME",
"value": "Avanti"
},
{
"context": " close: \"0200\"\n recommendation:\n who: \"corwin\"\n what: \"Arepas\"\n why: \"Good street foo",
"end": 4548,
"score": 0.9981441497802734,
"start": 4542,
"tag": "NAME",
"value": "corwin"
},
{
"context": " recommendation:\n who: \"corwin\"\n what: \"Arepas\"\n why: \"Good street food in a trendy envi",
"end": 4566,
"score": 0.6637327075004578,
"start": 4563,
"tag": "NAME",
"value": "Are"
},
{
"context": " of the best views in Denver.\"\n }\n {\n name: \"Gaetano’s Italian Restaurant\"\n address: \"3760 Tejon St",
"end": 4685,
"score": 0.9030148386955261,
"start": 4678,
"tag": "NAME",
"value": "Gaetano"
},
{
"context": "best views in Denver.\"\n }\n {\n name: \"Gaetano’s Italian Restaurant\"\n address: \"3760 Tejon St\"\n",
"end": 4687,
"score": 0.5531919598579407,
"start": 4686,
"tag": "NAME",
"value": "s"
},
{
"context": " close: \"2400\"\n recommendation:\n who: \"andrew\"\n what: \"Lasagne\"\n why: \"The hip Highla",
"end": 4887,
"score": 0.9633985161781311,
"start": 4881,
"tag": "NAME",
"value": "andrew"
},
{
"context": "ral hub for their operations.\"\n }\n {\n name: \"Rosenberg’s Bagels\"\n address: \"725 E 26th Ave\"\n coordinates:\n ",
"end": 5100,
"score": 0.9994868040084839,
"start": 5082,
"tag": "NAME",
"value": "Rosenberg’s Bagels"
},
{
"context": " close: \"1500\"\n recommendation:\n who: \"andrew\"\n what: \"Taylor Pork Roll, Cheddar Cheese, F",
"end": 5282,
"score": 0.9767483472824097,
"start": 5276,
"tag": "NAME",
"value": "andrew"
},
{
"context": "ges the water to match NYC’s.\"\n }\n {\n name: \"Brava! Pizzeria della Strada\"\n address: \"1601 Arapaho",
"end": 5487,
"score": 0.8869457244873047,
"start": 5482,
"tag": "NAME",
"value": "Brava"
},
{
"context": "e water to match NYC’s.\"\n }\n {\n name: \"Brava! Pizzeria della Strada\"\n address: \"1601 Arapahoe St\"\n coordinates:",
"end": 5510,
"score": 0.9930181503295898,
"start": 5489,
"tag": "NAME",
"value": "Pizzeria della Strada"
},
{
"context": " close: \"1500\"\n recommendation:\n who: \"joshua\"\n what: \"Fun Guy pizza with truffle oil\"\n ",
"end": 5694,
"score": 0.9637351036071777,
"start": 5688,
"tag": "NAME",
"value": "joshua"
},
{
"context": " some great wood-fired pizza.\"\n }\n {\n name: \"Snooze\"\n address: \"2262 Larimer St\"\n coordinates:\n",
"end": 5837,
"score": 0.9997697472572327,
"start": 5831,
"tag": "NAME",
"value": "Snooze"
},
{
"context": " close: \"1430\"\n recommendation:\n who: \"joshua\"\n why: \"A hipster favorite. There's always a",
"end": 6020,
"score": 0.8454484939575195,
"start": 6014,
"tag": "NAME",
"value": "joshua"
},
{
"context": " always a crowd, so go early.\"\n }\n {\n name: \"Biker Jim's Gourmet Dogs\"\n address: \"2148 Larimer St\"\n coordinates:\n",
"end": 6135,
"score": 0.998500406742096,
"start": 6111,
"tag": "NAME",
"value": "Biker Jim's Gourmet Dogs"
}
] | source/assets/javascripts/picks/restaurants.coffee | isabella232/summit-guide-2015 | 1 | restaurants = [
{
name: "Denver Biscuit Co"
address: "5412 S. Broadway St"
coordinates:
lat: 39.794618
long: -104.987326
hours:
open: "0900"
close: "2300"
recommendation:
who: "corwin"
what: "The Franklin"
why: "Fresh made biscuits, fried chicken, what more is there to want?"
}
{
name: "Illegal Pete's"
address: "1530 16th St"
coordinates:
lat: 39.750870
long: -104.999999
hours:
open: "0700"
close: "2400"
recommendation:
who: "joshua"
what: "A giant burrito"
why: "Authentically Denver. The line moves fast."
}
{
name: "Wynkoop Brewery"
address: "1634 18th St"
coordinates:
lat: 39.753394
long: -104.998427
hours:
open: "1100"
close: "0200"
}
{
name: "Ace"
address: "501 E 17th Ave"
coordinates:
lat: 39.743502
long: -104.980749
hours:
open: "1100"
close: "2400"
recommendation:
who: "elliot"
why: "It's a hip spot in Uptown with a welcoming atmosphere. Eat, drink, laugh, and if you get bored, play some ping pong."
}
{
name: "D Bar"
address: "19th & Pennsylvania St"
coordinates:
lat: 39.746184
long: -104.981086
hours:
open: "1100"
close: "2200"
recommendation:
who: "amy"
what: "Amazing Desserts! Try the fresh churros with chocolate dip."
}
{
name: "Denver Pizza Company"
address: "309 W 11th Ave"
coordinates:
lat: 39.734038
long: -104.992046
hours:
open: "1100"
close: "2200"
recommendation:
who: "amy"
what: "Get the 5280 pizza. Soooo good."
}
{
name: "City O City"
address: "206 E 13th Ave"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "0700"
close: "1400"
recommendation:
who: "joshua"
why: "Homestyle vegetarian spot with quite a few vegan options too."
}
{
name: "Sams No. 3"
address: "1500 Curtis Street"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "1730"
close: "2300"
recommendation:
who: "andrew"
what: "Everything"
why: "HUGE portions. Featured on Diners Drive-ins and Dives. Probably the best diner in Denver."
}
{
name: "Larkburger"
address: "1617 California St"
coordinates:
lat: 39.745693
long: -104.992055
hours:
open: "1100"
close: "1700"
recommendation:
who: "andrew"
what: "Parmesan Truffle Fries"
why: "A burger that’s very Colorado with all natural ingredients and great meat."
}
{
name: "Linger"
address: "2030 W 30th Ave"
coordinates:
lat: 39.759456
long: -105.011358
hours:
open: "1100"
close: "1700"
recommendation:
who: "rachel"
why: "Old mortuary that is now one of the best restaurants in Denver."
}
{
name: "Hops & Pie"
address: "3920 Tennyson St"
coordinates:
lat: 39.771347
long: -105.043775
hours:
open: "1130"
close: "2300"
recommendation:
who: "andrew"
what: "Slice of the day"
why: "Best place to try rare craft beers and eat crazy pizza slices that change daily."
}
{
name: "Biju’s Little Curry Shop"
address: "1441 26th St"
coordinates:
lat: 39.759525
long: -104.986551
hours:
open: "1100"
close: "2100"
recommendation:
who: "andrew"
why: "Fantastic curry by one of the top pro cycling chefs in the world. Healthy and absurdly delicious."
}
{
name: "Zoe Ma Ma’s"
address: "1625 Wynkoop St"
coordinates:
lat: 39.752168
long: -105.000720
hours:
open: "1100"
close: "2000"
recommendation:
who: "andrew"
what: "Za Jiang Mian"
why: "Some of the best authentic Chinese food in Denver, and right next to Union Station!"
}
{
name: "Kitchen Next Door"
address: "1701 Wynkoop St"
coordinates:
lat: 39.753122
long: -105.000145
hours:
open: "1100"
close: "2300"
recommendation:
who: "andrew"
what: "Salmon Salad Sandwich & Garlic Smashersn"
why: "Very Colorado. Healthy food, great prices, and solid beer choices."
}
{
name: "Avanti"
address: "3200 Pecos St"
coordinates:
lat: 39.762246
long: -105.006119
hours:
open: "1100"
close: "0200"
recommendation:
who: "corwin"
what: "Arepas"
why: "Good street food in a trendy environemnt. One of the best views in Denver."
}
{
name: "Gaetano’s Italian Restaurant"
address: "3760 Tejon St"
coordinates:
lat: 39.769083
long: -105.010928
hours:
open: "1100"
close: "2400"
recommendation:
who: "andrew"
what: "Lasagne"
why: "The hip Highlands neighborhood used to be the home of the Italian mafia in Denver. This restaurant was a central hub for their operations."
}
{
name: "Rosenberg’s Bagels"
address: "725 E 26th Ave"
coordinates:
lat: 39.754811
long: -104.977389
hours:
open: "0600"
close: "1500"
recommendation:
who: "andrew"
what: "Taylor Pork Roll, Cheddar Cheese, Fried Egg, Everything Bagel Sandwich"
why: "The best bagels outside of NYC. He molecularly changes the water to match NYC’s."
}
{
name: "Brava! Pizzeria della Strada"
address: "1601 Arapahoe St"
coordinates:
lat: 39.748099
long: -104.995687
hours:
open: "1100"
close: "1500"
recommendation:
who: "joshua"
what: "Fun Guy pizza with truffle oil"
why: "Get some fresh air and enjoy some great wood-fired pizza."
}
{
name: "Snooze"
address: "2262 Larimer St"
coordinates:
lat: 39.755494
long: -104.988906
hours:
open: "0630"
close: "1430"
recommendation:
who: "joshua"
why: "A hipster favorite. There's always a crowd, so go early."
}
{
name: "Biker Jim's Gourmet Dogs"
address: "2148 Larimer St"
coordinates:
lat: 39.7544491
long: -104.9902803
hours:
open: "1100"
close: "2000"
recommendation:
who: "elliot"
why: "A Denver staple. Put on your culinary adventurer hat and try some exotic offerings like rattlesnake, pheasant, and elk."
}
]
localStorage.setItem("restaurants", JSON.stringify(restaurants))
| 49499 | restaurants = [
{
name: "<NAME>"
address: "5412 S. Broadway St"
coordinates:
lat: 39.794618
long: -104.987326
hours:
open: "0900"
close: "2300"
recommendation:
who: "<NAME>"
what: "The Franklin"
why: "Fresh made biscuits, fried chicken, what more is there to want?"
}
{
name: "<NAME>"
address: "1530 16th St"
coordinates:
lat: 39.750870
long: -104.999999
hours:
open: "0700"
close: "2400"
recommendation:
who: "<NAME>"
what: "A giant burrito"
why: "Authentically Denver. The line moves fast."
}
{
name: "<NAME>"
address: "1634 18th St"
coordinates:
lat: 39.753394
long: -104.998427
hours:
open: "1100"
close: "0200"
}
{
name: "<NAME>"
address: "501 E 17th Ave"
coordinates:
lat: 39.743502
long: -104.980749
hours:
open: "1100"
close: "2400"
recommendation:
who: "elli<NAME>"
why: "It's a hip spot in Uptown with a welcoming atmosphere. Eat, drink, laugh, and if you get bored, play some ping pong."
}
{
name: "<NAME>"
address: "19th & Pennsylvania St"
coordinates:
lat: 39.746184
long: -104.981086
hours:
open: "1100"
close: "2200"
recommendation:
who: "amy"
what: "Amazing Desserts! Try the fresh churros with chocolate dip."
}
{
name: "<NAME>"
address: "309 W 11th Ave"
coordinates:
lat: 39.734038
long: -104.992046
hours:
open: "1100"
close: "2200"
recommendation:
who: "<NAME>"
what: "Get the 5280 pizza. Soooo good."
}
{
name: "City O City"
address: "206 E 13th Ave"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "0700"
close: "1400"
recommendation:
who: "<NAME>"
why: "Homestyle vegetarian spot with quite a few vegan options too."
}
{
name: "<NAME>"
address: "1500 Curtis Street"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "1730"
close: "2300"
recommendation:
who: "<NAME>"
what: "Everything"
why: "HUGE portions. Featured on Diners Drive-ins and Dives. Probably the best diner in Denver."
}
{
name: "<NAME>"
address: "1617 California St"
coordinates:
lat: 39.745693
long: -104.992055
hours:
open: "1100"
close: "1700"
recommendation:
who: "<NAME>"
what: "Parmesan Truffle Fries"
why: "A burger that’s very Colorado with all natural ingredients and great meat."
}
{
name: "<NAME>"
address: "2030 W 30th Ave"
coordinates:
lat: 39.759456
long: -105.011358
hours:
open: "1100"
close: "1700"
recommendation:
who: "<NAME>"
why: "Old mortuary that is now one of the best restaurants in Denver."
}
{
name: "<NAME>"
address: "3920 Tennyson St"
coordinates:
lat: 39.771347
long: -105.043775
hours:
open: "1130"
close: "2300"
recommendation:
who: "<NAME>"
what: "Slice of the day"
why: "Best place to try rare craft beers and eat crazy pizza slices that change daily."
}
{
name: "<NAME>"
address: "1441 26th St"
coordinates:
lat: 39.759525
long: -104.986551
hours:
open: "1100"
close: "2100"
recommendation:
who: "<NAME>"
why: "Fantastic curry by one of the top pro cycling chefs in the world. Healthy and absurdly delicious."
}
{
name: "<NAME>"
address: "1625 Wynkoop St"
coordinates:
lat: 39.752168
long: -105.000720
hours:
open: "1100"
close: "2000"
recommendation:
who: "<NAME>"
what: "<NAME>"
why: "Some of the best authentic Chinese food in Denver, and right next to Union Station!"
}
{
name: "<NAME>itchen Next Door"
address: "1701 Wynkoop St"
coordinates:
lat: 39.753122
long: -105.000145
hours:
open: "1100"
close: "2300"
recommendation:
who: "<NAME>"
what: "Salmon Salad Sandwich & Garlic Smashersn"
why: "Very Colorado. Healthy food, great prices, and solid beer choices."
}
{
name: "<NAME>"
address: "3200 Pecos St"
coordinates:
lat: 39.762246
long: -105.006119
hours:
open: "1100"
close: "0200"
recommendation:
who: "<NAME>"
what: "<NAME>pas"
why: "Good street food in a trendy environemnt. One of the best views in Denver."
}
{
name: "<NAME>’<NAME> Italian Restaurant"
address: "3760 Tejon St"
coordinates:
lat: 39.769083
long: -105.010928
hours:
open: "1100"
close: "2400"
recommendation:
who: "<NAME>"
what: "Lasagne"
why: "The hip Highlands neighborhood used to be the home of the Italian mafia in Denver. This restaurant was a central hub for their operations."
}
{
name: "<NAME>"
address: "725 E 26th Ave"
coordinates:
lat: 39.754811
long: -104.977389
hours:
open: "0600"
close: "1500"
recommendation:
who: "<NAME>"
what: "Taylor Pork Roll, Cheddar Cheese, Fried Egg, Everything Bagel Sandwich"
why: "The best bagels outside of NYC. He molecularly changes the water to match NYC’s."
}
{
name: "<NAME>! <NAME>"
address: "1601 Arapahoe St"
coordinates:
lat: 39.748099
long: -104.995687
hours:
open: "1100"
close: "1500"
recommendation:
who: "<NAME>"
what: "Fun Guy pizza with truffle oil"
why: "Get some fresh air and enjoy some great wood-fired pizza."
}
{
name: "<NAME>"
address: "2262 Larimer St"
coordinates:
lat: 39.755494
long: -104.988906
hours:
open: "0630"
close: "1430"
recommendation:
who: "<NAME>"
why: "A hipster favorite. There's always a crowd, so go early."
}
{
name: "<NAME>"
address: "2148 Larimer St"
coordinates:
lat: 39.7544491
long: -104.9902803
hours:
open: "1100"
close: "2000"
recommendation:
who: "elliot"
why: "A Denver staple. Put on your culinary adventurer hat and try some exotic offerings like rattlesnake, pheasant, and elk."
}
]
localStorage.setItem("restaurants", JSON.stringify(restaurants))
| true | restaurants = [
{
name: "PI:NAME:<NAME>END_PI"
address: "5412 S. Broadway St"
coordinates:
lat: 39.794618
long: -104.987326
hours:
open: "0900"
close: "2300"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "The Franklin"
why: "Fresh made biscuits, fried chicken, what more is there to want?"
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1530 16th St"
coordinates:
lat: 39.750870
long: -104.999999
hours:
open: "0700"
close: "2400"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "A giant burrito"
why: "Authentically Denver. The line moves fast."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1634 18th St"
coordinates:
lat: 39.753394
long: -104.998427
hours:
open: "1100"
close: "0200"
}
{
name: "PI:NAME:<NAME>END_PI"
address: "501 E 17th Ave"
coordinates:
lat: 39.743502
long: -104.980749
hours:
open: "1100"
close: "2400"
recommendation:
who: "elliPI:NAME:<NAME>END_PI"
why: "It's a hip spot in Uptown with a welcoming atmosphere. Eat, drink, laugh, and if you get bored, play some ping pong."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "19th & Pennsylvania St"
coordinates:
lat: 39.746184
long: -104.981086
hours:
open: "1100"
close: "2200"
recommendation:
who: "amy"
what: "Amazing Desserts! Try the fresh churros with chocolate dip."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "309 W 11th Ave"
coordinates:
lat: 39.734038
long: -104.992046
hours:
open: "1100"
close: "2200"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Get the 5280 pizza. Soooo good."
}
{
name: "City O City"
address: "206 E 13th Ave"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "0700"
close: "1400"
recommendation:
who: "PI:NAME:<NAME>END_PI"
why: "Homestyle vegetarian spot with quite a few vegan options too."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1500 Curtis Street"
coordinates:
lat: 39.736647
long: -104.984549
hours:
open: "1730"
close: "2300"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Everything"
why: "HUGE portions. Featured on Diners Drive-ins and Dives. Probably the best diner in Denver."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1617 California St"
coordinates:
lat: 39.745693
long: -104.992055
hours:
open: "1100"
close: "1700"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Parmesan Truffle Fries"
why: "A burger that’s very Colorado with all natural ingredients and great meat."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "2030 W 30th Ave"
coordinates:
lat: 39.759456
long: -105.011358
hours:
open: "1100"
close: "1700"
recommendation:
who: "PI:NAME:<NAME>END_PI"
why: "Old mortuary that is now one of the best restaurants in Denver."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "3920 Tennyson St"
coordinates:
lat: 39.771347
long: -105.043775
hours:
open: "1130"
close: "2300"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Slice of the day"
why: "Best place to try rare craft beers and eat crazy pizza slices that change daily."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1441 26th St"
coordinates:
lat: 39.759525
long: -104.986551
hours:
open: "1100"
close: "2100"
recommendation:
who: "PI:NAME:<NAME>END_PI"
why: "Fantastic curry by one of the top pro cycling chefs in the world. Healthy and absurdly delicious."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "1625 Wynkoop St"
coordinates:
lat: 39.752168
long: -105.000720
hours:
open: "1100"
close: "2000"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "PI:NAME:<NAME>END_PI"
why: "Some of the best authentic Chinese food in Denver, and right next to Union Station!"
}
{
name: "PI:NAME:<NAME>END_PIitchen Next Door"
address: "1701 Wynkoop St"
coordinates:
lat: 39.753122
long: -105.000145
hours:
open: "1100"
close: "2300"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Salmon Salad Sandwich & Garlic Smashersn"
why: "Very Colorado. Healthy food, great prices, and solid beer choices."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "3200 Pecos St"
coordinates:
lat: 39.762246
long: -105.006119
hours:
open: "1100"
close: "0200"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "PI:NAME:<NAME>END_PIpas"
why: "Good street food in a trendy environemnt. One of the best views in Denver."
}
{
name: "PI:NAME:<NAME>END_PI’PI:NAME:<NAME>END_PI Italian Restaurant"
address: "3760 Tejon St"
coordinates:
lat: 39.769083
long: -105.010928
hours:
open: "1100"
close: "2400"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Lasagne"
why: "The hip Highlands neighborhood used to be the home of the Italian mafia in Denver. This restaurant was a central hub for their operations."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "725 E 26th Ave"
coordinates:
lat: 39.754811
long: -104.977389
hours:
open: "0600"
close: "1500"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Taylor Pork Roll, Cheddar Cheese, Fried Egg, Everything Bagel Sandwich"
why: "The best bagels outside of NYC. He molecularly changes the water to match NYC’s."
}
{
name: "PI:NAME:<NAME>END_PI! PI:NAME:<NAME>END_PI"
address: "1601 Arapahoe St"
coordinates:
lat: 39.748099
long: -104.995687
hours:
open: "1100"
close: "1500"
recommendation:
who: "PI:NAME:<NAME>END_PI"
what: "Fun Guy pizza with truffle oil"
why: "Get some fresh air and enjoy some great wood-fired pizza."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "2262 Larimer St"
coordinates:
lat: 39.755494
long: -104.988906
hours:
open: "0630"
close: "1430"
recommendation:
who: "PI:NAME:<NAME>END_PI"
why: "A hipster favorite. There's always a crowd, so go early."
}
{
name: "PI:NAME:<NAME>END_PI"
address: "2148 Larimer St"
coordinates:
lat: 39.7544491
long: -104.9902803
hours:
open: "1100"
close: "2000"
recommendation:
who: "elliot"
why: "A Denver staple. Put on your culinary adventurer hat and try some exotic offerings like rattlesnake, pheasant, and elk."
}
]
localStorage.setItem("restaurants", JSON.stringify(restaurants))
|
[
{
"context": "isualization', '1', {'packages':['corechart']})\n\n# alex's logging function that he likes to use.\nl = (",
"end": 66,
"score": 0.8907890319824219,
"start": 65,
"tag": "NAME",
"value": "a"
},
{
"context": "ualization', '1', {'packages':['corechart']})\n\n# alex's logging function that he likes to use.\nl = (mes",
"end": 69,
"score": 0.47606533765792847,
"start": 66,
"tag": "NAME",
"value": "lex"
},
{
"context": "d this Semester')\n\n firebase_key = dept_name or 'depts'\n # get pre-computed data from firebase, put in ",
"end": 1367,
"score": 0.6649020910263062,
"start": 1362,
"tag": "KEY",
"value": "depts"
}
] | coffee/main.coffee | gterrono/coursegrapher | 1 | google.load('visualization', '1', {'packages':['corechart']})
# alex's logging function that he likes to use.
l = (message, objs...) ->
now = new Date()
hours = now.getHours()
mins = now.getMinutes()
secs = now.getSeconds()
console.log(["#{ hours }:#{ mins }.#{ secs }", message, objs...])
return
window.drawChart = (only_offered) ->
# get dept name if its there
hash = window.location.hash.toUpperCase()
dept_name = if hash.length > 1 then hash.substr(1) else ''
root_view = if dept_name then false else true
# set up basic google datatable
data = new google.visualization.DataTable()
data.addColumn('string', 'Major')
data.addColumn('date', 'Date')
data.addColumn('number', 'Instructor Quality')
data.addColumn('number', 'Difficulty')
data.addColumn('number', 'Course Quality')
data.addColumn('number', 'Ability to Stimulate Interest')
data.addColumn('number', 'Access to Instructor')
data.addColumn('number', 'Amount Learned')
data.addColumn('number', 'Amount of Work')
data.addColumn('number', 'Instructor\'s Communication')
data.addColumn('number', 'Recommended for Majors')
data.addColumn('number', 'Recommended for Non-Majors')
if root_view
data.addColumn('number', '# of Courses Offered This Semester')
else
data.addColumn('string', 'Offerred this Semester')
firebase_key = dept_name or 'depts'
# get pre-computed data from firebase, put in chart.
$.get "https://coursegrapher.firebaseio.com/#{firebase_key}.json", (json) ->
l 'got data', json
row_from_firebase = (id, dept) ->
return null unless dept.averages?
if not root_view and only_offered == true and not dept.offered
return null
get = (field) ->
n = dept.averages[field] or 0
parseFloat n.toFixed(2)
name = if dept.num? then "#{dept.name} (#{id})" else "[#{dept_name} #{id}] #{dept.name}"
if dept.num < 20
l 'skipping dept because to small', name, dept.num
return null
return [
name
new Date()
get('rInstructorQuality')
get('rDifficulty') or null # filters out the zeroes, I think
get('rCourseQuality')
get('rStimulateInterest')
get('rInstructorAccess')
get('rAmountLearned')
get('rWorkRequired')
get('rCommAbility')
get('rRecommendMajor')
get('rRecommendNonMajor')
if root_view then dept.course_count else dept.offered.toString()
]
# turn firebase into arrays using above func
a = (row_from_firebase(k, v) for k, v of json)
data.addRows(_.filter(a, _.isArray))
# set up options, draw chart.
chart_div = document.getElementById('chart_div')
window.chart = new google.visualization.MotionChart(chart_div)
width = $('#main_container').width()
height = Math.min(width / 2, $(window).height() - 100)
options =
showYScalePicker: false
showXScalePicker: false
showChartButtons: false
width: width
height: height
state: '{"showTrails":false,"playDuration":15000,"iconType":"BUBBLE","xLambda":1,"yZoomedDataMin":null,"xZoomedDataMin":null,"yLambda":1,"yZoomedIn":false,"nonSelectedAlpha":0.4,"orderedByY":false,"uniColorForNonSelected":false,"xZoomedIn":false,"time":"notime","yAxisOption":"3","xZoomedDataMax":null,"dimensions":{"iconDimensions":["dim0"]},"sizeOption":' + (if dept_name then '"_UNISIZE"' else '"12"') + ',"duration":{"multiplier":1,"timeUnit":"D"},"yZoomedDataMax":null,"xAxisOption":"4","iconKeySettings":[],"orderedByX":false,"colorOption":"2"};'
chart.draw(data, options)
unless window.location.hash
google.visualization.events.addListener chart, 'statechange', (e) ->
return if chart.getState() == null
# when someone clicks a bubble,
# parse what they clicked
# and take them to that dept.
state = JSON.parse(chart.getState())
longtitle = state.iconKeySettings[0].key.dim0
pat = /[(]([\w]{1,5})[)]$/
m = longtitle.match(pat)
dept = m[1]
window.location = "##{dept}"
l 'drew chart'
fix_headline = () ->
hash = window.location.hash
if hash.length > 1
dept_name = hash.substr(1)
$('#graph-title').text("Courses in #{dept_name}")
$extra = $('#extra-info')
$extra.text('Click and drag over an area to zoom in (hit enter after clicking zoom)')
$extra.after('<div id="only-offered-checkbox" style="float:right;padding-top:20px;padding-right:20px;font-size:17px"><input type="checkbox" name="only-offered" id="only-offered"><label for="only-offered" style="padding-left:5px">Display offered courses only</label></div>')
$extra.after('<span id="back" style="float:right;padding-top:20px" class="little"><a href="#">back to depts</a></span>')
else
$('#graph-title').text('Departments at Penn')
$('#extra-info').text('Clicking on a bubble will take you to the course page for that department')
$('#only-offered-checkbox').remove()
$('#back').remove()
fix_headline()
window.onhashchange = () ->
fix_headline()
drawChart()
google.setOnLoadCallback(drawChart())
$ ->
$(document).on 'change', "#only-offered-checkbox :checkbox", ->
drawChart(this.checked)
| 209903 | google.load('visualization', '1', {'packages':['corechart']})
# <NAME> <NAME>'s logging function that he likes to use.
l = (message, objs...) ->
now = new Date()
hours = now.getHours()
mins = now.getMinutes()
secs = now.getSeconds()
console.log(["#{ hours }:#{ mins }.#{ secs }", message, objs...])
return
window.drawChart = (only_offered) ->
# get dept name if its there
hash = window.location.hash.toUpperCase()
dept_name = if hash.length > 1 then hash.substr(1) else ''
root_view = if dept_name then false else true
# set up basic google datatable
data = new google.visualization.DataTable()
data.addColumn('string', 'Major')
data.addColumn('date', 'Date')
data.addColumn('number', 'Instructor Quality')
data.addColumn('number', 'Difficulty')
data.addColumn('number', 'Course Quality')
data.addColumn('number', 'Ability to Stimulate Interest')
data.addColumn('number', 'Access to Instructor')
data.addColumn('number', 'Amount Learned')
data.addColumn('number', 'Amount of Work')
data.addColumn('number', 'Instructor\'s Communication')
data.addColumn('number', 'Recommended for Majors')
data.addColumn('number', 'Recommended for Non-Majors')
if root_view
data.addColumn('number', '# of Courses Offered This Semester')
else
data.addColumn('string', 'Offerred this Semester')
firebase_key = dept_name or '<KEY>'
# get pre-computed data from firebase, put in chart.
$.get "https://coursegrapher.firebaseio.com/#{firebase_key}.json", (json) ->
l 'got data', json
row_from_firebase = (id, dept) ->
return null unless dept.averages?
if not root_view and only_offered == true and not dept.offered
return null
get = (field) ->
n = dept.averages[field] or 0
parseFloat n.toFixed(2)
name = if dept.num? then "#{dept.name} (#{id})" else "[#{dept_name} #{id}] #{dept.name}"
if dept.num < 20
l 'skipping dept because to small', name, dept.num
return null
return [
name
new Date()
get('rInstructorQuality')
get('rDifficulty') or null # filters out the zeroes, I think
get('rCourseQuality')
get('rStimulateInterest')
get('rInstructorAccess')
get('rAmountLearned')
get('rWorkRequired')
get('rCommAbility')
get('rRecommendMajor')
get('rRecommendNonMajor')
if root_view then dept.course_count else dept.offered.toString()
]
# turn firebase into arrays using above func
a = (row_from_firebase(k, v) for k, v of json)
data.addRows(_.filter(a, _.isArray))
# set up options, draw chart.
chart_div = document.getElementById('chart_div')
window.chart = new google.visualization.MotionChart(chart_div)
width = $('#main_container').width()
height = Math.min(width / 2, $(window).height() - 100)
options =
showYScalePicker: false
showXScalePicker: false
showChartButtons: false
width: width
height: height
state: '{"showTrails":false,"playDuration":15000,"iconType":"BUBBLE","xLambda":1,"yZoomedDataMin":null,"xZoomedDataMin":null,"yLambda":1,"yZoomedIn":false,"nonSelectedAlpha":0.4,"orderedByY":false,"uniColorForNonSelected":false,"xZoomedIn":false,"time":"notime","yAxisOption":"3","xZoomedDataMax":null,"dimensions":{"iconDimensions":["dim0"]},"sizeOption":' + (if dept_name then '"_UNISIZE"' else '"12"') + ',"duration":{"multiplier":1,"timeUnit":"D"},"yZoomedDataMax":null,"xAxisOption":"4","iconKeySettings":[],"orderedByX":false,"colorOption":"2"};'
chart.draw(data, options)
unless window.location.hash
google.visualization.events.addListener chart, 'statechange', (e) ->
return if chart.getState() == null
# when someone clicks a bubble,
# parse what they clicked
# and take them to that dept.
state = JSON.parse(chart.getState())
longtitle = state.iconKeySettings[0].key.dim0
pat = /[(]([\w]{1,5})[)]$/
m = longtitle.match(pat)
dept = m[1]
window.location = "##{dept}"
l 'drew chart'
fix_headline = () ->
hash = window.location.hash
if hash.length > 1
dept_name = hash.substr(1)
$('#graph-title').text("Courses in #{dept_name}")
$extra = $('#extra-info')
$extra.text('Click and drag over an area to zoom in (hit enter after clicking zoom)')
$extra.after('<div id="only-offered-checkbox" style="float:right;padding-top:20px;padding-right:20px;font-size:17px"><input type="checkbox" name="only-offered" id="only-offered"><label for="only-offered" style="padding-left:5px">Display offered courses only</label></div>')
$extra.after('<span id="back" style="float:right;padding-top:20px" class="little"><a href="#">back to depts</a></span>')
else
$('#graph-title').text('Departments at Penn')
$('#extra-info').text('Clicking on a bubble will take you to the course page for that department')
$('#only-offered-checkbox').remove()
$('#back').remove()
fix_headline()
window.onhashchange = () ->
fix_headline()
drawChart()
google.setOnLoadCallback(drawChart())
$ ->
$(document).on 'change', "#only-offered-checkbox :checkbox", ->
drawChart(this.checked)
| true | google.load('visualization', '1', {'packages':['corechart']})
# PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI's logging function that he likes to use.
l = (message, objs...) ->
now = new Date()
hours = now.getHours()
mins = now.getMinutes()
secs = now.getSeconds()
console.log(["#{ hours }:#{ mins }.#{ secs }", message, objs...])
return
window.drawChart = (only_offered) ->
# get dept name if its there
hash = window.location.hash.toUpperCase()
dept_name = if hash.length > 1 then hash.substr(1) else ''
root_view = if dept_name then false else true
# set up basic google datatable
data = new google.visualization.DataTable()
data.addColumn('string', 'Major')
data.addColumn('date', 'Date')
data.addColumn('number', 'Instructor Quality')
data.addColumn('number', 'Difficulty')
data.addColumn('number', 'Course Quality')
data.addColumn('number', 'Ability to Stimulate Interest')
data.addColumn('number', 'Access to Instructor')
data.addColumn('number', 'Amount Learned')
data.addColumn('number', 'Amount of Work')
data.addColumn('number', 'Instructor\'s Communication')
data.addColumn('number', 'Recommended for Majors')
data.addColumn('number', 'Recommended for Non-Majors')
if root_view
data.addColumn('number', '# of Courses Offered This Semester')
else
data.addColumn('string', 'Offerred this Semester')
firebase_key = dept_name or 'PI:KEY:<KEY>END_PI'
# get pre-computed data from firebase, put in chart.
$.get "https://coursegrapher.firebaseio.com/#{firebase_key}.json", (json) ->
l 'got data', json
row_from_firebase = (id, dept) ->
return null unless dept.averages?
if not root_view and only_offered == true and not dept.offered
return null
get = (field) ->
n = dept.averages[field] or 0
parseFloat n.toFixed(2)
name = if dept.num? then "#{dept.name} (#{id})" else "[#{dept_name} #{id}] #{dept.name}"
if dept.num < 20
l 'skipping dept because to small', name, dept.num
return null
return [
name
new Date()
get('rInstructorQuality')
get('rDifficulty') or null # filters out the zeroes, I think
get('rCourseQuality')
get('rStimulateInterest')
get('rInstructorAccess')
get('rAmountLearned')
get('rWorkRequired')
get('rCommAbility')
get('rRecommendMajor')
get('rRecommendNonMajor')
if root_view then dept.course_count else dept.offered.toString()
]
# turn firebase into arrays using above func
a = (row_from_firebase(k, v) for k, v of json)
data.addRows(_.filter(a, _.isArray))
# set up options, draw chart.
chart_div = document.getElementById('chart_div')
window.chart = new google.visualization.MotionChart(chart_div)
width = $('#main_container').width()
height = Math.min(width / 2, $(window).height() - 100)
options =
showYScalePicker: false
showXScalePicker: false
showChartButtons: false
width: width
height: height
state: '{"showTrails":false,"playDuration":15000,"iconType":"BUBBLE","xLambda":1,"yZoomedDataMin":null,"xZoomedDataMin":null,"yLambda":1,"yZoomedIn":false,"nonSelectedAlpha":0.4,"orderedByY":false,"uniColorForNonSelected":false,"xZoomedIn":false,"time":"notime","yAxisOption":"3","xZoomedDataMax":null,"dimensions":{"iconDimensions":["dim0"]},"sizeOption":' + (if dept_name then '"_UNISIZE"' else '"12"') + ',"duration":{"multiplier":1,"timeUnit":"D"},"yZoomedDataMax":null,"xAxisOption":"4","iconKeySettings":[],"orderedByX":false,"colorOption":"2"};'
chart.draw(data, options)
unless window.location.hash
google.visualization.events.addListener chart, 'statechange', (e) ->
return if chart.getState() == null
# when someone clicks a bubble,
# parse what they clicked
# and take them to that dept.
state = JSON.parse(chart.getState())
longtitle = state.iconKeySettings[0].key.dim0
pat = /[(]([\w]{1,5})[)]$/
m = longtitle.match(pat)
dept = m[1]
window.location = "##{dept}"
l 'drew chart'
fix_headline = () ->
hash = window.location.hash
if hash.length > 1
dept_name = hash.substr(1)
$('#graph-title').text("Courses in #{dept_name}")
$extra = $('#extra-info')
$extra.text('Click and drag over an area to zoom in (hit enter after clicking zoom)')
$extra.after('<div id="only-offered-checkbox" style="float:right;padding-top:20px;padding-right:20px;font-size:17px"><input type="checkbox" name="only-offered" id="only-offered"><label for="only-offered" style="padding-left:5px">Display offered courses only</label></div>')
$extra.after('<span id="back" style="float:right;padding-top:20px" class="little"><a href="#">back to depts</a></span>')
else
$('#graph-title').text('Departments at Penn')
$('#extra-info').text('Clicking on a bubble will take you to the course page for that department')
$('#only-offered-checkbox').remove()
$('#back').remove()
fix_headline()
window.onhashchange = () ->
fix_headline()
drawChart()
google.setOnLoadCallback(drawChart())
$ ->
$(document).on 'change', "#only-offered-checkbox :checkbox", ->
drawChart(this.checked)
|
[
{
"context": "p; shut.png\" />'\n equal tag(\"div\", data: {name: 'Stephen', city_state: ['Chicago', 'IL']}), '<div data-cit",
"end": 1317,
"score": 0.9974467754364014,
"start": 1310,
"tag": "NAME",
"value": "Stephen"
},
{
"context": "\"["Chicago","IL"]\" data-name=\"Stephen\" />'\n\ntest \"content_tag\", ->\n equal content_tag(",
"end": 1432,
"score": 0.9985710978507996,
"start": 1425,
"tag": "NAME",
"value": "Stephen"
}
] | test/javascripts/tests/helpers/tag_test.js.coffee | evrone/ultimate-helpers | 2 | #= require ultimate/underscore/underscore
#= require ultimate/underscore/underscore.string
#= require ultimate/helpers/tag
module "Ultimate.Helpers.Tag"
_.extend @, Ultimate.Helpers.Tag
test "tag_options", ->
strictEqual tag_options(), ""
strictEqual tag_options({}), ""
ok /title="Some title"/.test tag_options(class: "some-class", title: "Some title")
equal tag_options(class: ["song", "play>"]), ' class="song play>"'
equal tag_options(disabled: true, itemscope: false, multiple: true, readonly: true), ' disabled="disabled" multiple="multiple" readonly="readonly"'
equal tag_options(data: {remote: true}, role: "ajax"), ' data-remote="true" role="ajax"'
equal tag_options(data: {inner: {section: true}}), ' data-inner="{"section":true}"'
equal tag_options(data: {inner: {section: true}}, false), ' data-inner="{"section":true}"'
equal tag_options(included: ''), ' included=""'
test "tag", ->
equal tag('br'), '<br />'
equal tag('br', null, true), '<br>'
equal tag('input', type: 'text', disabled: true), '<input disabled="disabled" type="text" />'
equal tag('img', src: 'open & shut.png'), '<img src="open & shut.png" />'
equal tag("img", {src: "open & shut.png"}, false, false), '<img src="open & shut.png" />'
equal tag("div", data: {name: 'Stephen', city_state: ['Chicago', 'IL']}), '<div data-city-state="["Chicago","IL"]" data-name="Stephen" />'
test "content_tag", ->
equal content_tag('div', '', class: ['some', 'class']), '<div class="some class"></div>'
equal content_tag('div', '<Inner content>', class: 'some class'), '<div class="some class"><Inner content></div>'
equal content_tag('div', '<Inner content>', class: 'some class', false), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', -> '<Inner content>'), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', false, -> '<Inner content>'), '<div class="some class"><Inner content></div>'
test "cdata_section", ->
equal cdata_section("<hello world>"), "<![CDATA[<hello world>]]>"
equal cdata_section("hello]]>world"), "<![CDATA[hello]]]]><![CDATA[>world]]>"
equal cdata_section("hello]]>world]]>again"), "<![CDATA[hello]]]]><![CDATA[>world]]]]><![CDATA[>again]]>"
test "concat_class", ->
strictEqual concat_class(), ''
strictEqual concat_class(null), ''
strictEqual concat_class([]), ''
equal concat_class('lol', 0), 'lol'
equal concat_class(' lol ', false, ' wow '), 'lol wow'
equal concat_class('lol', null, 'rofl lol wow ', ' wow '), 'lol rofl wow'
equal concat_class([['lol', [null]], 'rofl lol wow '], ' wow '), 'lol rofl wow'
test "selectorToHtml", ->
equal selectorToHtml(''), ''
equal selectorToHtml('.lol'), '<div class="lol"></div>'
equal selectorToHtml('.mega-lol'), '<div class="mega-lol"></div>'
equal selectorToHtml('tr.lol'), '<tr class="lol"></tr>'
equal selectorToHtml('tr.lol#rofl'), '<tr class="lol" id="rofl"></tr>'
equal selectorToHtml('#rofl.lol'), '<div class="lol" id="rofl"></div>'
equal selectorToHtml('.lol.wow'), '<div class="lol wow"></div>'
equal selectorToHtml('.wow#rofl.lol'), '<div class="wow lol" id="rofl"></div>'
# equal selectorToHtml('h1.one+p.two'), '<h1 class="one"></h1><p class="two"></p>'
| 181974 | #= require ultimate/underscore/underscore
#= require ultimate/underscore/underscore.string
#= require ultimate/helpers/tag
module "Ultimate.Helpers.Tag"
_.extend @, Ultimate.Helpers.Tag
test "tag_options", ->
strictEqual tag_options(), ""
strictEqual tag_options({}), ""
ok /title="Some title"/.test tag_options(class: "some-class", title: "Some title")
equal tag_options(class: ["song", "play>"]), ' class="song play>"'
equal tag_options(disabled: true, itemscope: false, multiple: true, readonly: true), ' disabled="disabled" multiple="multiple" readonly="readonly"'
equal tag_options(data: {remote: true}, role: "ajax"), ' data-remote="true" role="ajax"'
equal tag_options(data: {inner: {section: true}}), ' data-inner="{"section":true}"'
equal tag_options(data: {inner: {section: true}}, false), ' data-inner="{"section":true}"'
equal tag_options(included: ''), ' included=""'
test "tag", ->
equal tag('br'), '<br />'
equal tag('br', null, true), '<br>'
equal tag('input', type: 'text', disabled: true), '<input disabled="disabled" type="text" />'
equal tag('img', src: 'open & shut.png'), '<img src="open & shut.png" />'
equal tag("img", {src: "open & shut.png"}, false, false), '<img src="open & shut.png" />'
equal tag("div", data: {name: '<NAME>', city_state: ['Chicago', 'IL']}), '<div data-city-state="["Chicago","IL"]" data-name="<NAME>" />'
test "content_tag", ->
equal content_tag('div', '', class: ['some', 'class']), '<div class="some class"></div>'
equal content_tag('div', '<Inner content>', class: 'some class'), '<div class="some class"><Inner content></div>'
equal content_tag('div', '<Inner content>', class: 'some class', false), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', -> '<Inner content>'), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', false, -> '<Inner content>'), '<div class="some class"><Inner content></div>'
test "cdata_section", ->
equal cdata_section("<hello world>"), "<![CDATA[<hello world>]]>"
equal cdata_section("hello]]>world"), "<![CDATA[hello]]]]><![CDATA[>world]]>"
equal cdata_section("hello]]>world]]>again"), "<![CDATA[hello]]]]><![CDATA[>world]]]]><![CDATA[>again]]>"
test "concat_class", ->
strictEqual concat_class(), ''
strictEqual concat_class(null), ''
strictEqual concat_class([]), ''
equal concat_class('lol', 0), 'lol'
equal concat_class(' lol ', false, ' wow '), 'lol wow'
equal concat_class('lol', null, 'rofl lol wow ', ' wow '), 'lol rofl wow'
equal concat_class([['lol', [null]], 'rofl lol wow '], ' wow '), 'lol rofl wow'
test "selectorToHtml", ->
equal selectorToHtml(''), ''
equal selectorToHtml('.lol'), '<div class="lol"></div>'
equal selectorToHtml('.mega-lol'), '<div class="mega-lol"></div>'
equal selectorToHtml('tr.lol'), '<tr class="lol"></tr>'
equal selectorToHtml('tr.lol#rofl'), '<tr class="lol" id="rofl"></tr>'
equal selectorToHtml('#rofl.lol'), '<div class="lol" id="rofl"></div>'
equal selectorToHtml('.lol.wow'), '<div class="lol wow"></div>'
equal selectorToHtml('.wow#rofl.lol'), '<div class="wow lol" id="rofl"></div>'
# equal selectorToHtml('h1.one+p.two'), '<h1 class="one"></h1><p class="two"></p>'
| true | #= require ultimate/underscore/underscore
#= require ultimate/underscore/underscore.string
#= require ultimate/helpers/tag
module "Ultimate.Helpers.Tag"
_.extend @, Ultimate.Helpers.Tag
test "tag_options", ->
strictEqual tag_options(), ""
strictEqual tag_options({}), ""
ok /title="Some title"/.test tag_options(class: "some-class", title: "Some title")
equal tag_options(class: ["song", "play>"]), ' class="song play>"'
equal tag_options(disabled: true, itemscope: false, multiple: true, readonly: true), ' disabled="disabled" multiple="multiple" readonly="readonly"'
equal tag_options(data: {remote: true}, role: "ajax"), ' data-remote="true" role="ajax"'
equal tag_options(data: {inner: {section: true}}), ' data-inner="{"section":true}"'
equal tag_options(data: {inner: {section: true}}, false), ' data-inner="{"section":true}"'
equal tag_options(included: ''), ' included=""'
test "tag", ->
equal tag('br'), '<br />'
equal tag('br', null, true), '<br>'
equal tag('input', type: 'text', disabled: true), '<input disabled="disabled" type="text" />'
equal tag('img', src: 'open & shut.png'), '<img src="open & shut.png" />'
equal tag("img", {src: "open & shut.png"}, false, false), '<img src="open & shut.png" />'
equal tag("div", data: {name: 'PI:NAME:<NAME>END_PI', city_state: ['Chicago', 'IL']}), '<div data-city-state="["Chicago","IL"]" data-name="PI:NAME:<NAME>END_PI" />'
test "content_tag", ->
equal content_tag('div', '', class: ['some', 'class']), '<div class="some class"></div>'
equal content_tag('div', '<Inner content>', class: 'some class'), '<div class="some class"><Inner content></div>'
equal content_tag('div', '<Inner content>', class: 'some class', false), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', -> '<Inner content>'), '<div class="some class"><Inner content></div>'
equal content_tag('div', class: 'some class', false, -> '<Inner content>'), '<div class="some class"><Inner content></div>'
test "cdata_section", ->
equal cdata_section("<hello world>"), "<![CDATA[<hello world>]]>"
equal cdata_section("hello]]>world"), "<![CDATA[hello]]]]><![CDATA[>world]]>"
equal cdata_section("hello]]>world]]>again"), "<![CDATA[hello]]]]><![CDATA[>world]]]]><![CDATA[>again]]>"
test "concat_class", ->
strictEqual concat_class(), ''
strictEqual concat_class(null), ''
strictEqual concat_class([]), ''
equal concat_class('lol', 0), 'lol'
equal concat_class(' lol ', false, ' wow '), 'lol wow'
equal concat_class('lol', null, 'rofl lol wow ', ' wow '), 'lol rofl wow'
equal concat_class([['lol', [null]], 'rofl lol wow '], ' wow '), 'lol rofl wow'
test "selectorToHtml", ->
equal selectorToHtml(''), ''
equal selectorToHtml('.lol'), '<div class="lol"></div>'
equal selectorToHtml('.mega-lol'), '<div class="mega-lol"></div>'
equal selectorToHtml('tr.lol'), '<tr class="lol"></tr>'
equal selectorToHtml('tr.lol#rofl'), '<tr class="lol" id="rofl"></tr>'
equal selectorToHtml('#rofl.lol'), '<div class="lol" id="rofl"></div>'
equal selectorToHtml('.lol.wow'), '<div class="lol wow"></div>'
equal selectorToHtml('.wow#rofl.lol'), '<div class="wow lol" id="rofl"></div>'
# equal selectorToHtml('h1.one+p.two'), '<h1 class="one"></h1><p class="two"></p>'
|
[
{
"context": "(Darwin)\nComment: GPGTools - https://gpgtools.org\n\nmQENBFN+YH8BCACpPJOdWTS5T8fobGFOHyGXJi5sxX8Jr35+XVI3qebEgV9k9NA4\nq0DgjWYamWKm6kB4sLsKL2HFF/2Mm9jHAkRChnO6BlaSbbdg/OstxvPXarkzCNmH\nXhxQenqJRTpg7Tv/6LVq8dvrzKoRRzaruzrIE041WXX+viTdxIE4+uL2ibuA9Kly\nERbrSkDrBHf/4ufFDI7zPEX1pTq90GgkQQajukPPbI95AgnslbAUCyL/Q+qezY1y\nNZ46QhrxG+Q44xex1hZtI7E4B23FtLybx1yzGkuE74c9Zi1OJUziS5UWkE06W38g\nYY+tlA7jhlCPrhN0cKppBYkfPHlaimndnmUFABEBAAG0H0Rvbm5pZSA8ZG9ubmll\nQG91dG9mZWxlbWVudC5pbz6JATgEEwEKACIFAlN+YH8CGwMGCwkIBwMCBhUIAgkK\nCwQWAgMBAh4BAheAAAoJEIl2syWmeuaP4h4H/3hA0nTwS0g9nVkHctDNgPBVasJr\na+lQMZ4UyBmISf1r8iqbQ/AfmcFgjlga8E+IyGKUrKIBZ8Cv0Dl/SbQNgCv0PkfU\nCfogeTi4ad2unaj+zYlhvCClvCMlpXHo1w8RqwTXkBRnzM6AHSEJ7ac4K/WSdnEo\nTG0zKZsDZDsqy3JwqOhtRiLu9R1Ru6qZ8xsQkA7KURx1nvfhB",
"end": 789,
"score": 0.9993927478790283,
"start": 140,
"tag": "KEY",
"value": "mQENBFN+YH8BCACpPJOdWTS5T8fobGFOHyGXJi5sxX8Jr35+XVI3qebEgV9k9NA4\nq0DgjWYamWKm6kB4sLsKL2HFF/2Mm9jHAkRChnO6BlaSbbdg/OstxvPXarkzCNmH\nXhxQenqJRTpg7Tv/6LVq8dvrzKoRRzaruzrIE041WXX+viTdxIE4+uL2ibuA9Kly\nERbrSkDrBHf/4ufFDI7zPEX1pTq90GgkQQajukPPbI95AgnslbAUCyL/Q+qezY1y\nNZ46QhrxG+Q44xex1hZtI7E4B23FtLybx1yzGkuE74c9Zi1OJUziS5UWkE06W38g\nYY+tlA7jhlCPrhN0cKppBYkfPHlaimndnmUFABEBAAG0H0Rvbm5pZSA8ZG9ubmll\nQG91dG9mZWxlbWVudC5pbz6JATgEEwEKACIFAlN+YH8CGwMGCwkIBwMCBhUIAgkK\nCwQWAgMBAh4BAheAAAoJEIl2syWmeuaP4h4H/3hA0nTwS0g9nVkHctDNgPBVasJr\na+lQMZ4UyBmISf1r8iqbQ/AfmcFgjlga8E+IyGKUrKIBZ8Cv0Dl/SbQNgCv0PkfU\nCfogeTi4ad2unaj+zYlhvCClvCMlpXHo1w8RqwTXkBRnzM6AHSEJ7ac4K/WSdnEo"
},
{
"context": "YlhvCClvCMlpXHo1w8RqwTXkBRnzM6AHSEJ7ac4K/WSdnEo\nTG0zKZsDZDsqy3JwqOhtRiLu9R1Ru6qZ8xsQkA7KURx1nvfhBssaEzMKrXHVXkpV\nWs+LCi8X02L/GyXqUWmlynrOhpR9QmU7rK0Gu9VfR61SGPklMWFZtYWaaJs/xMWM\nWNU0b06iHX1YsHdeXXNlulg9gblkgwuOl58sGzGrQxhSvbKhPdRvpByfTuS5AQ0E\nU35gfwEIAMmGh9YczcghtVeuXK+Qnk48t5U3PPGdF1fAu2qQVv10xnXpbZpAKc+z\nMoCgCfE+eeM1nxbK5R3pU+zfbeEeRuEVBUtDknEdu3QJ7y7ELeqczzLn7EsupyyV\nz54r9cOiMJv1NS3c3wKK61/sj1QbPW0rSqkxN/f3K+9Arg9wZinQi4DgjL7VSZuV\nt9RacIJ2+77FHHXs65hSVQekN779CrQQEoJi7CUBaoVyyL5rBWEkbHyB/ki1JW0A\noMZ6AtWWgYkA/G6WEEvb+rMBEq9GEkPMmxwonOs1/gmjEvjeGk4dgDnKcpWXnByX\n2/4mMyNuQN2k0yzYzKFK91Py+q0y6Q8AEQEAAYkBHwQYAQoACQUCU35gfwIbDAAK\nCRCJdrMlpnrmj1ouB/4sWefDp29qM/AA0HaVCb/DVSuIeVbzo6nGEAFOAoPRhEwX\nXHnS7z6EutYwFV97QgYo27eoYKGDDozNLoi5qUQqq6J/ALNFQUnVcOO74SrznJDQ\nzx5ilG9AlmBJiWIu2XzgDEKXhSOZJMCNdTIY8PybAKc/D+pRDoQTY5SxypDDn15Y\nTwUqX/3/i2S9LpIGbNWepA1ZuQvmMdhPbB3GyX/+z16tZ+6irs787dvZ+/E9uABR\neeWvuBoFu+q3DLL2q9zHMac3ZsjFP8zePZi1QaazLuBapmJsdVeWLbh2m/hf79/p\nLEbxeJK54YE4IHCZJNhV7BEbXcPju7pzmTP4xXDv\n=57/x\n-----END PGP PUBLIC KEY BLOCK-----\n\"\"\"\n",
"end": 1746,
"score": 0.9994040131568909,
"start": 792,
"tag": "KEY",
"value": "0zKZsDZDsqy3JwqOhtRiLu9R1Ru6qZ8xsQkA7KURx1nvfhBssaEzMKrXHVXkpV\nWs+LCi8X02L/GyXqUWmlynrOhpR9QmU7rK0Gu9VfR61SGPklMWFZtYWaaJs/xMWM\nWNU0b06iHX1YsHdeXXNlulg9gblkgwuOl58sGzGrQxhSvbKhPdRvpByfTuS5AQ0E\nU35gfwEIAMmGh9YczcghtVeuXK+Qnk48t5U3PPGdF1fAu2qQVv10xnXpbZpAKc+z\nMoCgCfE+eeM1nxbK5R3pU+zfbeEeRuEVBUtDknEdu3QJ7y7ELeqczzLn7EsupyyV\nz54r9cOiMJv1NS3c3wKK61/sj1QbPW0rSqkxN/f3K+9Arg9wZinQi4DgjL7VSZuV\nt9RacIJ2+77FHHXs65hSVQekN779CrQQEoJi7CUBaoVyyL5rBWEkbHyB/ki1JW0A\noMZ6AtWWgYkA/G6WEEvb+rMBEq9GEkPMmxwonOs1/gmjEvjeGk4dgDnKcpWXnByX\n2/4mMyNuQN2k0yzYzKFK91Py+q0y6Q8AEQEAAYkBHwQYAQoACQUCU35gfwIbDAAK\nCRCJdrMlpnrmj1ouB/4sWefDp29qM/AA0HaVCb/DVSuIeVbzo6nGEAFOAoPRhEwX\nXHnS7z6EutYwFV97QgYo27eoYKGDDozNLoi5qUQqq6J/ALNFQUnVcOO74SrznJDQ\nzx5ilG9AlmBJiWIu2XzgDEKXhSOZJMCNdTIY8PybAKc/D+pRDoQTY5SxypDDn15Y\nTwUqX/3/i2S9LpIGbNWepA1ZuQvmMdhPbB3GyX/+z16tZ+6irs787dvZ+/E9uABR\neeWvuBoFu+q3DLL2q9zHMac3ZsjFP8zePZi1QaazLuBapmJsdVeWLbh2m/hf79/p\nLEbxeJK54YE4IHCZJNhV7BEbXcPju7pzmTP4xXDv\n=57/x"
}
] | test/publickey.iced | taterbase/node-keybase | 10 | module.exports = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
mQENBFN+YH8BCACpPJOdWTS5T8fobGFOHyGXJi5sxX8Jr35+XVI3qebEgV9k9NA4
q0DgjWYamWKm6kB4sLsKL2HFF/2Mm9jHAkRChnO6BlaSbbdg/OstxvPXarkzCNmH
XhxQenqJRTpg7Tv/6LVq8dvrzKoRRzaruzrIE041WXX+viTdxIE4+uL2ibuA9Kly
ERbrSkDrBHf/4ufFDI7zPEX1pTq90GgkQQajukPPbI95AgnslbAUCyL/Q+qezY1y
NZ46QhrxG+Q44xex1hZtI7E4B23FtLybx1yzGkuE74c9Zi1OJUziS5UWkE06W38g
YY+tlA7jhlCPrhN0cKppBYkfPHlaimndnmUFABEBAAG0H0Rvbm5pZSA8ZG9ubmll
QG91dG9mZWxlbWVudC5pbz6JATgEEwEKACIFAlN+YH8CGwMGCwkIBwMCBhUIAgkK
CwQWAgMBAh4BAheAAAoJEIl2syWmeuaP4h4H/3hA0nTwS0g9nVkHctDNgPBVasJr
a+lQMZ4UyBmISf1r8iqbQ/AfmcFgjlga8E+IyGKUrKIBZ8Cv0Dl/SbQNgCv0PkfU
CfogeTi4ad2unaj+zYlhvCClvCMlpXHo1w8RqwTXkBRnzM6AHSEJ7ac4K/WSdnEo
TG0zKZsDZDsqy3JwqOhtRiLu9R1Ru6qZ8xsQkA7KURx1nvfhBssaEzMKrXHVXkpV
Ws+LCi8X02L/GyXqUWmlynrOhpR9QmU7rK0Gu9VfR61SGPklMWFZtYWaaJs/xMWM
WNU0b06iHX1YsHdeXXNlulg9gblkgwuOl58sGzGrQxhSvbKhPdRvpByfTuS5AQ0E
U35gfwEIAMmGh9YczcghtVeuXK+Qnk48t5U3PPGdF1fAu2qQVv10xnXpbZpAKc+z
MoCgCfE+eeM1nxbK5R3pU+zfbeEeRuEVBUtDknEdu3QJ7y7ELeqczzLn7EsupyyV
z54r9cOiMJv1NS3c3wKK61/sj1QbPW0rSqkxN/f3K+9Arg9wZinQi4DgjL7VSZuV
t9RacIJ2+77FHHXs65hSVQekN779CrQQEoJi7CUBaoVyyL5rBWEkbHyB/ki1JW0A
oMZ6AtWWgYkA/G6WEEvb+rMBEq9GEkPMmxwonOs1/gmjEvjeGk4dgDnKcpWXnByX
2/4mMyNuQN2k0yzYzKFK91Py+q0y6Q8AEQEAAYkBHwQYAQoACQUCU35gfwIbDAAK
CRCJdrMlpnrmj1ouB/4sWefDp29qM/AA0HaVCb/DVSuIeVbzo6nGEAFOAoPRhEwX
XHnS7z6EutYwFV97QgYo27eoYKGDDozNLoi5qUQqq6J/ALNFQUnVcOO74SrznJDQ
zx5ilG9AlmBJiWIu2XzgDEKXhSOZJMCNdTIY8PybAKc/D+pRDoQTY5SxypDDn15Y
TwUqX/3/i2S9LpIGbNWepA1ZuQvmMdhPbB3GyX/+z16tZ+6irs787dvZ+/E9uABR
eeWvuBoFu+q3DLL2q9zHMac3ZsjFP8zePZi1QaazLuBapmJsdVeWLbh2m/hf79/p
LEbxeJK54YE4IHCZJNhV7BEbXcPju7pzmTP4xXDv
=57/x
-----END PGP PUBLIC KEY BLOCK-----
"""
| 6603 | module.exports = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
<KEY>
TG<KEY>
-----END PGP PUBLIC KEY BLOCK-----
"""
| true | module.exports = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - https://gpgtools.org
PI:KEY:<KEY>END_PI
TGPI:KEY:<KEY>END_PI
-----END PGP PUBLIC KEY BLOCK-----
"""
|
[
{
"context": " values.appendChild value\n\n key = document.createElement 'div'\n key.classList.add 'text-padded'\n ",
"end": 722,
"score": 0.7473018765449524,
"start": 709,
"tag": "KEY",
"value": "createElement"
},
{
"context": "hild value\n\n key = document.createElement 'div'\n key.classList.add 'text-padded'\n ",
"end": 727,
"score": 0.4658680260181427,
"start": 724,
"tag": "KEY",
"value": "div"
}
] | lib/environment.coffee | deprint/build-tools-pty | 0 | pty = null
{TextEditorView, View} = require 'atom-space-pen-views'
module.exports =
name: 'Spawn in Pseudo-Terminal'
info:
class PtyInfoPane
constructor: (command) ->
@element = document.createElement 'div'
@element.classList.add 'module'
keys = document.createElement 'div'
values = document.createElement 'div'
key = document.createElement 'div'
key.classList.add 'text-padded'
key.innerText = 'Rows:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.rows
keys.appendChild key
values.appendChild value
key = document.createElement 'div'
key.classList.add 'text-padded'
key.innerText = 'Columns:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.cols
keys.appendChild key
values.appendChild value
@element.appendChild keys
@element.appendChild values
edit:
class PtyEditPane extends View
@content: ->
@div class: 'panel-body', =>
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Output Streams'
@div =>
@span class: 'inline-block text-subtle', 'Configure standard output/error stream'
@select class: 'form-control', outlet: 'streams', =>
@option value: 'pty-stdout', 'Use pty.js + redirect stderr in stdout'
@option value: 'pty-stderr', 'Use pty.js + redirect stdout in stderr'
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Rows'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_rows', new TextEditorView(mini: true, placeholderText: '25')
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Columns'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_cols', new TextEditorView(mini: true, placeholderText: '80')
set: (command, sourceFile) ->
if command?.environment.name is 'ptyw'
for option, id in @streams.children()
if option.attributes.getNamedItem('value').nodeValue is command.environment.config.stdoe
@streams[0].selectedIndex = id
break
@pty_rows.getModel().setText('' + command.environment.config.rows)
@pty_cols.getModel().setText('' + command.environment.config.cols)
else
@streams[0].selectedIndex = 0
@pty_rows.getModel().setText('')
@pty_cols.getModel().setText('')
get: (command) ->
value = @streams.children()[@streams[0].selectedIndex].attributes.getNamedItem('value').nodeValue
r = 0
c = 0
if @pty_cols.getModel().getText() is ''
c = 80
else
c = parseInt(@pty_cols.getModel().getText())
if Number.isNaN(c)
return "cols: #{@pty_cols.getModel().getText()} is not a number"
if @pty_rows.getModel().getText() is ''
r = 25
else
r = parseInt(@pty_rows.getModel().getText())
if Number.isNaN(r)
return "rows: #{@pty_rows.getModel().getText()} is not a number"
command.environment =
name: 'ptyw'
config:
stdoe: value
rows: r
cols: c
return null
mod:
class Ptyw
constructor: (@command, manager, @config) ->
{command, args, env} = @command
pty = require 'ptyw.js'
@promise = new Promise((@resolve, @reject) =>
@process = pty.spawn(command, args, {
name: 'xterm-color'
cols: @config.cols
rows: @config.rows
cwd: @command.getWD()
env: env
}
)
if @config.stdoe is 'pty-stdout'
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stdout.in(data)
else
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stderr.in(data)
@process.on 'exit', (exitcode, signal) =>
return unless exitcode? and signal?
if signal isnt 0
exitcode = null
signal = 128 + signal
else if exitcode >= 128
signal = exitcode
exitcode = null
else
signal = null
@killed = true
manager.finish({exitcode, signal})
@resolve({exitcode, signal})
manager.setInput(@process)
)
@promise.then(
=>
@destroy()
=>
@destroy()
)
getPromise: ->
@promise
isKilled: ->
@killed
sigterm: ->
@process?.write '\x03', 'utf8'
sigkill: ->
@process?.kill('SIGKILL')
destroy: ->
@killed = true
@promise = null
@process = null
@reject = (e) -> console.log "Received reject for finished process: #{e}"
@resolve = (e) -> console.log "Received resolve for finished process: #{e}"
| 113797 | pty = null
{TextEditorView, View} = require 'atom-space-pen-views'
module.exports =
name: 'Spawn in Pseudo-Terminal'
info:
class PtyInfoPane
constructor: (command) ->
@element = document.createElement 'div'
@element.classList.add 'module'
keys = document.createElement 'div'
values = document.createElement 'div'
key = document.createElement 'div'
key.classList.add 'text-padded'
key.innerText = 'Rows:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.rows
keys.appendChild key
values.appendChild value
key = document.<KEY> '<KEY>'
key.classList.add 'text-padded'
key.innerText = 'Columns:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.cols
keys.appendChild key
values.appendChild value
@element.appendChild keys
@element.appendChild values
edit:
class PtyEditPane extends View
@content: ->
@div class: 'panel-body', =>
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Output Streams'
@div =>
@span class: 'inline-block text-subtle', 'Configure standard output/error stream'
@select class: 'form-control', outlet: 'streams', =>
@option value: 'pty-stdout', 'Use pty.js + redirect stderr in stdout'
@option value: 'pty-stderr', 'Use pty.js + redirect stdout in stderr'
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Rows'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_rows', new TextEditorView(mini: true, placeholderText: '25')
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Columns'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_cols', new TextEditorView(mini: true, placeholderText: '80')
set: (command, sourceFile) ->
if command?.environment.name is 'ptyw'
for option, id in @streams.children()
if option.attributes.getNamedItem('value').nodeValue is command.environment.config.stdoe
@streams[0].selectedIndex = id
break
@pty_rows.getModel().setText('' + command.environment.config.rows)
@pty_cols.getModel().setText('' + command.environment.config.cols)
else
@streams[0].selectedIndex = 0
@pty_rows.getModel().setText('')
@pty_cols.getModel().setText('')
get: (command) ->
value = @streams.children()[@streams[0].selectedIndex].attributes.getNamedItem('value').nodeValue
r = 0
c = 0
if @pty_cols.getModel().getText() is ''
c = 80
else
c = parseInt(@pty_cols.getModel().getText())
if Number.isNaN(c)
return "cols: #{@pty_cols.getModel().getText()} is not a number"
if @pty_rows.getModel().getText() is ''
r = 25
else
r = parseInt(@pty_rows.getModel().getText())
if Number.isNaN(r)
return "rows: #{@pty_rows.getModel().getText()} is not a number"
command.environment =
name: 'ptyw'
config:
stdoe: value
rows: r
cols: c
return null
mod:
class Ptyw
constructor: (@command, manager, @config) ->
{command, args, env} = @command
pty = require 'ptyw.js'
@promise = new Promise((@resolve, @reject) =>
@process = pty.spawn(command, args, {
name: 'xterm-color'
cols: @config.cols
rows: @config.rows
cwd: @command.getWD()
env: env
}
)
if @config.stdoe is 'pty-stdout'
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stdout.in(data)
else
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stderr.in(data)
@process.on 'exit', (exitcode, signal) =>
return unless exitcode? and signal?
if signal isnt 0
exitcode = null
signal = 128 + signal
else if exitcode >= 128
signal = exitcode
exitcode = null
else
signal = null
@killed = true
manager.finish({exitcode, signal})
@resolve({exitcode, signal})
manager.setInput(@process)
)
@promise.then(
=>
@destroy()
=>
@destroy()
)
getPromise: ->
@promise
isKilled: ->
@killed
sigterm: ->
@process?.write '\x03', 'utf8'
sigkill: ->
@process?.kill('SIGKILL')
destroy: ->
@killed = true
@promise = null
@process = null
@reject = (e) -> console.log "Received reject for finished process: #{e}"
@resolve = (e) -> console.log "Received resolve for finished process: #{e}"
| true | pty = null
{TextEditorView, View} = require 'atom-space-pen-views'
module.exports =
name: 'Spawn in Pseudo-Terminal'
info:
class PtyInfoPane
constructor: (command) ->
@element = document.createElement 'div'
@element.classList.add 'module'
keys = document.createElement 'div'
values = document.createElement 'div'
key = document.createElement 'div'
key.classList.add 'text-padded'
key.innerText = 'Rows:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.rows
keys.appendChild key
values.appendChild value
key = document.PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI'
key.classList.add 'text-padded'
key.innerText = 'Columns:'
value = document.createElement 'div'
value.classList.add 'text-padded'
value.innerText = command.environment.config.cols
keys.appendChild key
values.appendChild value
@element.appendChild keys
@element.appendChild values
edit:
class PtyEditPane extends View
@content: ->
@div class: 'panel-body', =>
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Output Streams'
@div =>
@span class: 'inline-block text-subtle', 'Configure standard output/error stream'
@select class: 'form-control', outlet: 'streams', =>
@option value: 'pty-stdout', 'Use pty.js + redirect stderr in stdout'
@option value: 'pty-stderr', 'Use pty.js + redirect stdout in stderr'
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Rows'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_rows', new TextEditorView(mini: true, placeholderText: '25')
@div class: 'block', =>
@label =>
@div class: 'settings-name', 'Number of Columns'
@div =>
@span class: 'inline-block text-subtle', 'Dimensions of pseudo terminal (for pty.js)'
@subview 'pty_cols', new TextEditorView(mini: true, placeholderText: '80')
set: (command, sourceFile) ->
if command?.environment.name is 'ptyw'
for option, id in @streams.children()
if option.attributes.getNamedItem('value').nodeValue is command.environment.config.stdoe
@streams[0].selectedIndex = id
break
@pty_rows.getModel().setText('' + command.environment.config.rows)
@pty_cols.getModel().setText('' + command.environment.config.cols)
else
@streams[0].selectedIndex = 0
@pty_rows.getModel().setText('')
@pty_cols.getModel().setText('')
get: (command) ->
value = @streams.children()[@streams[0].selectedIndex].attributes.getNamedItem('value').nodeValue
r = 0
c = 0
if @pty_cols.getModel().getText() is ''
c = 80
else
c = parseInt(@pty_cols.getModel().getText())
if Number.isNaN(c)
return "cols: #{@pty_cols.getModel().getText()} is not a number"
if @pty_rows.getModel().getText() is ''
r = 25
else
r = parseInt(@pty_rows.getModel().getText())
if Number.isNaN(r)
return "rows: #{@pty_rows.getModel().getText()} is not a number"
command.environment =
name: 'ptyw'
config:
stdoe: value
rows: r
cols: c
return null
mod:
class Ptyw
constructor: (@command, manager, @config) ->
{command, args, env} = @command
pty = require 'ptyw.js'
@promise = new Promise((@resolve, @reject) =>
@process = pty.spawn(command, args, {
name: 'xterm-color'
cols: @config.cols
rows: @config.rows
cwd: @command.getWD()
env: env
}
)
if @config.stdoe is 'pty-stdout'
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stdout.in(data)
else
@process.on 'data', (data) =>
return unless @process?
return if @killed
data = data.replace /\r/g, ''
manager.stderr.in(data)
@process.on 'exit', (exitcode, signal) =>
return unless exitcode? and signal?
if signal isnt 0
exitcode = null
signal = 128 + signal
else if exitcode >= 128
signal = exitcode
exitcode = null
else
signal = null
@killed = true
manager.finish({exitcode, signal})
@resolve({exitcode, signal})
manager.setInput(@process)
)
@promise.then(
=>
@destroy()
=>
@destroy()
)
getPromise: ->
@promise
isKilled: ->
@killed
sigterm: ->
@process?.write '\x03', 'utf8'
sigkill: ->
@process?.kill('SIGKILL')
destroy: ->
@killed = true
@promise = null
@process = null
@reject = (e) -> console.log "Received reject for finished process: #{e}"
@resolve = (e) -> console.log "Received resolve for finished process: #{e}"
|
[
{
"context": "http://alac.macosforge.org\n#\n# Javascript port by Jens Nockert and Devon Govett of OFMLabs, https://github.com/o",
"end": 101,
"score": 0.9998438954353333,
"start": 89,
"tag": "NAME",
"value": "Jens Nockert"
},
{
"context": "forge.org\n#\n# Javascript port by Jens Nockert and Devon Govett of OFMLabs, https://github.com/ofmlabs/alac.js\n# ",
"end": 118,
"score": 0.9998832941055298,
"start": 106,
"tag": "NAME",
"value": "Devon Govett"
},
{
"context": "t and Devon Govett of OFMLabs, https://github.com/ofmlabs/alac.js\n# \n# Licensed under the Apache License, ",
"end": 157,
"score": 0.9885515570640564,
"start": 150,
"tag": "USERNAME",
"value": "ofmlabs"
}
] | src/decoder.coffee | h0rn3z0r/alac.js | 0 | #
# Original C(++) version by Apple, http://alac.macosforge.org
#
# Javascript port by Jens Nockert and Devon Govett of OFMLabs, https://github.com/ofmlabs/alac.js
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
AV = require 'av'
Aglib = require './ag_dec'
Dplib = require './dp_dec'
Matrixlib = require './matrix_dec'
class ALACDecoder extends AV.Decoder
AV.Decoder.register('alac', ALACDecoder)
ID_SCE = 0 # Single Channel Element
ID_CPE = 1 # Channel Pair Element
ID_CCE = 2 # Coupling Channel Element
ID_LFE = 3 # LFE Channel Element
ID_DSE = 4 # not yet supported
ID_PCE = 5
ID_FIL = 6
ID_END = 7
setCookie: (cookie) ->
data = AV.Stream.fromBuffer(cookie)
# For historical reasons the decoder needs to be resilient to magic cookies vended by older encoders.
# There may be additional data encapsulating the ALACSpecificConfig.
# This would consist of format ('frma') and 'alac' atoms which precede the ALACSpecificConfig.
# See ALACMagicCookieDescription.txt in the original Apple decoder for additional documentation
# concerning the 'magic cookie'
# skip format ('frma') atom if present
if data.peekString(4, 4) is 'frma'
data.advance(12)
# skip 'alac' atom header if present
if data.peekString(4, 4) is 'alac'
data.advance(12)
# read the ALACSpecificConfig
@config =
frameLength: data.readUInt32()
compatibleVersion: data.readUInt8()
bitDepth: data.readUInt8()
pb: data.readUInt8()
mb: data.readUInt8()
kb: data.readUInt8()
numChannels: data.readUInt8()
maxRun: data.readUInt16()
maxFrameBytes: data.readUInt32()
avgBitRate: data.readUInt32()
sampleRate: data.readUInt32()
# CAF files don't encode the bitsPerChannel
@format.bitsPerChannel ||= @config.bitDepth
# allocate mix buffers
@mixBuffers = [
new Int32Array(@config.frameLength) # left channel
new Int32Array(@config.frameLength) # right channel
]
# allocate dynamic predictor buffer
predictorBuffer = new ArrayBuffer(@config.frameLength * 4)
@predictor = new Int32Array(predictorBuffer)
# "shift off" buffer shares memory with predictor buffer
@shiftBuffer = new Int16Array(predictorBuffer)
readChunk: (data) ->
return unless @stream.available(4)
data = @bitstream
samples = @config.frameLength
numChannels = @config.numChannels
channelIndex = 0
output = new ArrayBuffer(samples * numChannels * @config.bitDepth / 8)
end = false
while not end
# read element tag
tag = data.read(3)
switch tag
when ID_SCE, ID_LFE, ID_CPE
channels = if tag is ID_CPE then 2 else 1
# if decoding this would take us over the max channel limit, bail
if channelIndex + channels > numChannels
throw new Error 'Too many channels!'
# no idea what this is for... doesn't seem used anywhere
elementInstanceTag = data.read(4)
# read the 12 unused header bits
unused = data.read(12)
unless unused is 0
throw new Error 'Unused part of header does not contain 0, it should'
# read the 1-bit "partial frame" flag, 2-bit "shift-off" flag & 1-bit "escape" flag
partialFrame = data.read(1)
bytesShifted = data.read(2)
escapeFlag = data.read(1)
if bytesShifted is 3
throw new Error "Bytes are shifted by 3, they shouldn't be"
# check for partial frame to override requested samples
if partialFrame
samples = data.read(32)
if escapeFlag is 0
shift = bytesShifted * 8
chanBits = @config.bitDepth - shift + channels - 1
# compressed frame, read rest of parameters
mixBits = data.read(8)
mixRes = data.read(8)
mode = []
denShift = []
pbFactor = []
num = []
coefs = []
for ch in [0...channels] by 1
mode[ch] = data.read(4)
denShift[ch] = data.read(4)
pbFactor[ch] = data.read(3)
num[ch] = data.read(5)
table = coefs[ch] = new Int16Array(32)
for i in [0...num[ch]] by 1
table[i] = data.read(16)
# if shift active, skip the the shift buffer but remember where it starts
if bytesShifted
shiftbits = data.copy()
data.advance(shift * channels * samples)
# decompress and run predictors
{mb, pb, kb, maxRun} = @config
for ch in [0...channels] by 1
params = Aglib.ag_params(mb, (pb * pbFactor[ch]) / 4, kb, samples, samples, maxRun)
status = Aglib.dyn_decomp(params, data, @predictor, samples, chanBits)
unless status
throw new Error 'Error in Aglib.dyn_decomp'
if mode[ch] is 0
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# the special "numActive == 31" mode can be done in-place
Dplib.unpc_block(@predictor, @predictor, samples, null, 31, chanBits, 0)
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# uncompressed frame, copy data into the mix buffer to use common output code
chanBits = @config.bitDepth
shift = 32 - chanBits
for i in [0...samples] by 1
for ch in [0...channels] by 1
val = (data.read(chanBits) << shift) >> shift
@mixBuffers[ch][i] = val
mixBits = mixRes = 0
bytesShifted = 0
# now read the shifted values into the shift buffer
if bytesShifted
shift = bytesShifted * 8
for i in [0...samples * channels] by 1
@shiftBuffer[i] = shiftbits.read(shift)
# un-mix the data and convert to output format
# - note that mixRes = 0 means just interleave so we use that path for uncompressed frames
switch @config.bitDepth
when 16
out16 = new Int16Array(output, channelIndex)
if channels is 2
Matrixlib.unmix16(@mixBuffers[0], @mixBuffers[1], out16, numChannels, samples, mixBits, mixRes)
else
j = 0
buf = @mixBuffers[0]
for i in [0...samples] by 1
out16[j] = buf[i]
j += numChannels
else
throw new Error 'Only supports 16-bit samples right now'
channelIndex += channels
when ID_CCE, ID_PCE
throw new Error "Unsupported element: #{tag}"
when ID_DSE
# the tag associates this data stream element with a given audio element
elementInstanceTag = data.read(4)
dataByteAlignFlag = data.read(1)
# 8-bit count or (8-bit + 8-bit count) if 8-bit count == 255
count = data.read(8)
if count is 255
count += data.read(8)
# the align flag means the bitstream should be byte-aligned before reading the following data bytes
if dataByteAlignFlag
data.align()
# skip the data bytes
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_FIL
# 4-bit count or (4-bit + 8-bit count) if 4-bit count == 15
# - plus this weird -1 thing I still don't fully understand
count = data.read(4)
if count is 15
count += data.read(8) - 1
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_END
data.align()
end = true
else
throw new Error "Unknown element: #{tag}"
if channelIndex > numChannels
throw new Error 'Channel index too large.'
return new Int16Array(output)
module.exports = ALACDecoder
| 81016 | #
# Original C(++) version by Apple, http://alac.macosforge.org
#
# Javascript port by <NAME> and <NAME> of OFMLabs, https://github.com/ofmlabs/alac.js
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
AV = require 'av'
Aglib = require './ag_dec'
Dplib = require './dp_dec'
Matrixlib = require './matrix_dec'
class ALACDecoder extends AV.Decoder
AV.Decoder.register('alac', ALACDecoder)
ID_SCE = 0 # Single Channel Element
ID_CPE = 1 # Channel Pair Element
ID_CCE = 2 # Coupling Channel Element
ID_LFE = 3 # LFE Channel Element
ID_DSE = 4 # not yet supported
ID_PCE = 5
ID_FIL = 6
ID_END = 7
setCookie: (cookie) ->
data = AV.Stream.fromBuffer(cookie)
# For historical reasons the decoder needs to be resilient to magic cookies vended by older encoders.
# There may be additional data encapsulating the ALACSpecificConfig.
# This would consist of format ('frma') and 'alac' atoms which precede the ALACSpecificConfig.
# See ALACMagicCookieDescription.txt in the original Apple decoder for additional documentation
# concerning the 'magic cookie'
# skip format ('frma') atom if present
if data.peekString(4, 4) is 'frma'
data.advance(12)
# skip 'alac' atom header if present
if data.peekString(4, 4) is 'alac'
data.advance(12)
# read the ALACSpecificConfig
@config =
frameLength: data.readUInt32()
compatibleVersion: data.readUInt8()
bitDepth: data.readUInt8()
pb: data.readUInt8()
mb: data.readUInt8()
kb: data.readUInt8()
numChannels: data.readUInt8()
maxRun: data.readUInt16()
maxFrameBytes: data.readUInt32()
avgBitRate: data.readUInt32()
sampleRate: data.readUInt32()
# CAF files don't encode the bitsPerChannel
@format.bitsPerChannel ||= @config.bitDepth
# allocate mix buffers
@mixBuffers = [
new Int32Array(@config.frameLength) # left channel
new Int32Array(@config.frameLength) # right channel
]
# allocate dynamic predictor buffer
predictorBuffer = new ArrayBuffer(@config.frameLength * 4)
@predictor = new Int32Array(predictorBuffer)
# "shift off" buffer shares memory with predictor buffer
@shiftBuffer = new Int16Array(predictorBuffer)
readChunk: (data) ->
return unless @stream.available(4)
data = @bitstream
samples = @config.frameLength
numChannels = @config.numChannels
channelIndex = 0
output = new ArrayBuffer(samples * numChannels * @config.bitDepth / 8)
end = false
while not end
# read element tag
tag = data.read(3)
switch tag
when ID_SCE, ID_LFE, ID_CPE
channels = if tag is ID_CPE then 2 else 1
# if decoding this would take us over the max channel limit, bail
if channelIndex + channels > numChannels
throw new Error 'Too many channels!'
# no idea what this is for... doesn't seem used anywhere
elementInstanceTag = data.read(4)
# read the 12 unused header bits
unused = data.read(12)
unless unused is 0
throw new Error 'Unused part of header does not contain 0, it should'
# read the 1-bit "partial frame" flag, 2-bit "shift-off" flag & 1-bit "escape" flag
partialFrame = data.read(1)
bytesShifted = data.read(2)
escapeFlag = data.read(1)
if bytesShifted is 3
throw new Error "Bytes are shifted by 3, they shouldn't be"
# check for partial frame to override requested samples
if partialFrame
samples = data.read(32)
if escapeFlag is 0
shift = bytesShifted * 8
chanBits = @config.bitDepth - shift + channels - 1
# compressed frame, read rest of parameters
mixBits = data.read(8)
mixRes = data.read(8)
mode = []
denShift = []
pbFactor = []
num = []
coefs = []
for ch in [0...channels] by 1
mode[ch] = data.read(4)
denShift[ch] = data.read(4)
pbFactor[ch] = data.read(3)
num[ch] = data.read(5)
table = coefs[ch] = new Int16Array(32)
for i in [0...num[ch]] by 1
table[i] = data.read(16)
# if shift active, skip the the shift buffer but remember where it starts
if bytesShifted
shiftbits = data.copy()
data.advance(shift * channels * samples)
# decompress and run predictors
{mb, pb, kb, maxRun} = @config
for ch in [0...channels] by 1
params = Aglib.ag_params(mb, (pb * pbFactor[ch]) / 4, kb, samples, samples, maxRun)
status = Aglib.dyn_decomp(params, data, @predictor, samples, chanBits)
unless status
throw new Error 'Error in Aglib.dyn_decomp'
if mode[ch] is 0
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# the special "numActive == 31" mode can be done in-place
Dplib.unpc_block(@predictor, @predictor, samples, null, 31, chanBits, 0)
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# uncompressed frame, copy data into the mix buffer to use common output code
chanBits = @config.bitDepth
shift = 32 - chanBits
for i in [0...samples] by 1
for ch in [0...channels] by 1
val = (data.read(chanBits) << shift) >> shift
@mixBuffers[ch][i] = val
mixBits = mixRes = 0
bytesShifted = 0
# now read the shifted values into the shift buffer
if bytesShifted
shift = bytesShifted * 8
for i in [0...samples * channels] by 1
@shiftBuffer[i] = shiftbits.read(shift)
# un-mix the data and convert to output format
# - note that mixRes = 0 means just interleave so we use that path for uncompressed frames
switch @config.bitDepth
when 16
out16 = new Int16Array(output, channelIndex)
if channels is 2
Matrixlib.unmix16(@mixBuffers[0], @mixBuffers[1], out16, numChannels, samples, mixBits, mixRes)
else
j = 0
buf = @mixBuffers[0]
for i in [0...samples] by 1
out16[j] = buf[i]
j += numChannels
else
throw new Error 'Only supports 16-bit samples right now'
channelIndex += channels
when ID_CCE, ID_PCE
throw new Error "Unsupported element: #{tag}"
when ID_DSE
# the tag associates this data stream element with a given audio element
elementInstanceTag = data.read(4)
dataByteAlignFlag = data.read(1)
# 8-bit count or (8-bit + 8-bit count) if 8-bit count == 255
count = data.read(8)
if count is 255
count += data.read(8)
# the align flag means the bitstream should be byte-aligned before reading the following data bytes
if dataByteAlignFlag
data.align()
# skip the data bytes
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_FIL
# 4-bit count or (4-bit + 8-bit count) if 4-bit count == 15
# - plus this weird -1 thing I still don't fully understand
count = data.read(4)
if count is 15
count += data.read(8) - 1
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_END
data.align()
end = true
else
throw new Error "Unknown element: #{tag}"
if channelIndex > numChannels
throw new Error 'Channel index too large.'
return new Int16Array(output)
module.exports = ALACDecoder
| true | #
# Original C(++) version by Apple, http://alac.macosforge.org
#
# Javascript port by PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI of OFMLabs, https://github.com/ofmlabs/alac.js
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
AV = require 'av'
Aglib = require './ag_dec'
Dplib = require './dp_dec'
Matrixlib = require './matrix_dec'
class ALACDecoder extends AV.Decoder
AV.Decoder.register('alac', ALACDecoder)
ID_SCE = 0 # Single Channel Element
ID_CPE = 1 # Channel Pair Element
ID_CCE = 2 # Coupling Channel Element
ID_LFE = 3 # LFE Channel Element
ID_DSE = 4 # not yet supported
ID_PCE = 5
ID_FIL = 6
ID_END = 7
setCookie: (cookie) ->
data = AV.Stream.fromBuffer(cookie)
# For historical reasons the decoder needs to be resilient to magic cookies vended by older encoders.
# There may be additional data encapsulating the ALACSpecificConfig.
# This would consist of format ('frma') and 'alac' atoms which precede the ALACSpecificConfig.
# See ALACMagicCookieDescription.txt in the original Apple decoder for additional documentation
# concerning the 'magic cookie'
# skip format ('frma') atom if present
if data.peekString(4, 4) is 'frma'
data.advance(12)
# skip 'alac' atom header if present
if data.peekString(4, 4) is 'alac'
data.advance(12)
# read the ALACSpecificConfig
@config =
frameLength: data.readUInt32()
compatibleVersion: data.readUInt8()
bitDepth: data.readUInt8()
pb: data.readUInt8()
mb: data.readUInt8()
kb: data.readUInt8()
numChannels: data.readUInt8()
maxRun: data.readUInt16()
maxFrameBytes: data.readUInt32()
avgBitRate: data.readUInt32()
sampleRate: data.readUInt32()
# CAF files don't encode the bitsPerChannel
@format.bitsPerChannel ||= @config.bitDepth
# allocate mix buffers
@mixBuffers = [
new Int32Array(@config.frameLength) # left channel
new Int32Array(@config.frameLength) # right channel
]
# allocate dynamic predictor buffer
predictorBuffer = new ArrayBuffer(@config.frameLength * 4)
@predictor = new Int32Array(predictorBuffer)
# "shift off" buffer shares memory with predictor buffer
@shiftBuffer = new Int16Array(predictorBuffer)
readChunk: (data) ->
return unless @stream.available(4)
data = @bitstream
samples = @config.frameLength
numChannels = @config.numChannels
channelIndex = 0
output = new ArrayBuffer(samples * numChannels * @config.bitDepth / 8)
end = false
while not end
# read element tag
tag = data.read(3)
switch tag
when ID_SCE, ID_LFE, ID_CPE
channels = if tag is ID_CPE then 2 else 1
# if decoding this would take us over the max channel limit, bail
if channelIndex + channels > numChannels
throw new Error 'Too many channels!'
# no idea what this is for... doesn't seem used anywhere
elementInstanceTag = data.read(4)
# read the 12 unused header bits
unused = data.read(12)
unless unused is 0
throw new Error 'Unused part of header does not contain 0, it should'
# read the 1-bit "partial frame" flag, 2-bit "shift-off" flag & 1-bit "escape" flag
partialFrame = data.read(1)
bytesShifted = data.read(2)
escapeFlag = data.read(1)
if bytesShifted is 3
throw new Error "Bytes are shifted by 3, they shouldn't be"
# check for partial frame to override requested samples
if partialFrame
samples = data.read(32)
if escapeFlag is 0
shift = bytesShifted * 8
chanBits = @config.bitDepth - shift + channels - 1
# compressed frame, read rest of parameters
mixBits = data.read(8)
mixRes = data.read(8)
mode = []
denShift = []
pbFactor = []
num = []
coefs = []
for ch in [0...channels] by 1
mode[ch] = data.read(4)
denShift[ch] = data.read(4)
pbFactor[ch] = data.read(3)
num[ch] = data.read(5)
table = coefs[ch] = new Int16Array(32)
for i in [0...num[ch]] by 1
table[i] = data.read(16)
# if shift active, skip the the shift buffer but remember where it starts
if bytesShifted
shiftbits = data.copy()
data.advance(shift * channels * samples)
# decompress and run predictors
{mb, pb, kb, maxRun} = @config
for ch in [0...channels] by 1
params = Aglib.ag_params(mb, (pb * pbFactor[ch]) / 4, kb, samples, samples, maxRun)
status = Aglib.dyn_decomp(params, data, @predictor, samples, chanBits)
unless status
throw new Error 'Error in Aglib.dyn_decomp'
if mode[ch] is 0
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# the special "numActive == 31" mode can be done in-place
Dplib.unpc_block(@predictor, @predictor, samples, null, 31, chanBits, 0)
Dplib.unpc_block(@predictor, @mixBuffers[ch], samples, coefs[ch], num[ch], chanBits, denShift[ch])
else
# uncompressed frame, copy data into the mix buffer to use common output code
chanBits = @config.bitDepth
shift = 32 - chanBits
for i in [0...samples] by 1
for ch in [0...channels] by 1
val = (data.read(chanBits) << shift) >> shift
@mixBuffers[ch][i] = val
mixBits = mixRes = 0
bytesShifted = 0
# now read the shifted values into the shift buffer
if bytesShifted
shift = bytesShifted * 8
for i in [0...samples * channels] by 1
@shiftBuffer[i] = shiftbits.read(shift)
# un-mix the data and convert to output format
# - note that mixRes = 0 means just interleave so we use that path for uncompressed frames
switch @config.bitDepth
when 16
out16 = new Int16Array(output, channelIndex)
if channels is 2
Matrixlib.unmix16(@mixBuffers[0], @mixBuffers[1], out16, numChannels, samples, mixBits, mixRes)
else
j = 0
buf = @mixBuffers[0]
for i in [0...samples] by 1
out16[j] = buf[i]
j += numChannels
else
throw new Error 'Only supports 16-bit samples right now'
channelIndex += channels
when ID_CCE, ID_PCE
throw new Error "Unsupported element: #{tag}"
when ID_DSE
# the tag associates this data stream element with a given audio element
elementInstanceTag = data.read(4)
dataByteAlignFlag = data.read(1)
# 8-bit count or (8-bit + 8-bit count) if 8-bit count == 255
count = data.read(8)
if count is 255
count += data.read(8)
# the align flag means the bitstream should be byte-aligned before reading the following data bytes
if dataByteAlignFlag
data.align()
# skip the data bytes
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_FIL
# 4-bit count or (4-bit + 8-bit count) if 4-bit count == 15
# - plus this weird -1 thing I still don't fully understand
count = data.read(4)
if count is 15
count += data.read(8) - 1
data.advance(count * 8)
unless data.pos < data.length
throw new Error 'buffer overrun'
when ID_END
data.align()
end = true
else
throw new Error "Unknown element: #{tag}"
if channelIndex > numChannels
throw new Error 'Channel index too large.'
return new Int16Array(output)
module.exports = ALACDecoder
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999127388000488,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/_components/notification-banner.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
el = React.createElement
bn = 'notification-banner-v2'
notificationBanners = document.getElementsByClassName('js-notification-banners')
export class NotificationBanner extends React.PureComponent
constructor: (props) ->
super props
@eventId = "notification-banner-#{osu.uuid()}"
@createPortalContainer()
componentDidMount: =>
$(document).on "turbolinks:before-cache.#{@eventId}", @removePortalContainer
componentWillUnmount: =>
$(document).off ".#{@eventId}"
@removePortalContainer()
render: =>
notification =
div className: "#{bn} #{bn}--#{@props.type}",
div className: "#{bn}__col #{bn}__col--icon"
div className: "#{bn}__col #{bn}__col--label",
div className: "#{bn}__type", @props.type
div className: "#{bn}__text", @props.title
div className: "#{bn}__col",
div className: "#{bn}__text", @props.message
ReactDOM.createPortal notification, @portalContainer
removePortalContainer: =>
notificationBanners[0].removeChild @portalContainer
createPortalContainer: =>
@portalContainer = document.createElement 'div'
notificationBanners[0].appendChild @portalContainer
| 93114 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
el = React.createElement
bn = 'notification-banner-v2'
notificationBanners = document.getElementsByClassName('js-notification-banners')
export class NotificationBanner extends React.PureComponent
constructor: (props) ->
super props
@eventId = "notification-banner-#{osu.uuid()}"
@createPortalContainer()
componentDidMount: =>
$(document).on "turbolinks:before-cache.#{@eventId}", @removePortalContainer
componentWillUnmount: =>
$(document).off ".#{@eventId}"
@removePortalContainer()
render: =>
notification =
div className: "#{bn} #{bn}--#{@props.type}",
div className: "#{bn}__col #{bn}__col--icon"
div className: "#{bn}__col #{bn}__col--label",
div className: "#{bn}__type", @props.type
div className: "#{bn}__text", @props.title
div className: "#{bn}__col",
div className: "#{bn}__text", @props.message
ReactDOM.createPortal notification, @portalContainer
removePortalContainer: =>
notificationBanners[0].removeChild @portalContainer
createPortalContainer: =>
@portalContainer = document.createElement 'div'
notificationBanners[0].appendChild @portalContainer
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
el = React.createElement
bn = 'notification-banner-v2'
notificationBanners = document.getElementsByClassName('js-notification-banners')
export class NotificationBanner extends React.PureComponent
constructor: (props) ->
super props
@eventId = "notification-banner-#{osu.uuid()}"
@createPortalContainer()
componentDidMount: =>
$(document).on "turbolinks:before-cache.#{@eventId}", @removePortalContainer
componentWillUnmount: =>
$(document).off ".#{@eventId}"
@removePortalContainer()
render: =>
notification =
div className: "#{bn} #{bn}--#{@props.type}",
div className: "#{bn}__col #{bn}__col--icon"
div className: "#{bn}__col #{bn}__col--label",
div className: "#{bn}__type", @props.type
div className: "#{bn}__text", @props.title
div className: "#{bn}__col",
div className: "#{bn}__text", @props.message
ReactDOM.createPortal notification, @portalContainer
removePortalContainer: =>
notificationBanners[0].removeChild @portalContainer
createPortalContainer: =>
@portalContainer = document.createElement 'div'
notificationBanners[0].appendChild @portalContainer
|
[
{
"context": "= context.robot.brain.userForId '1',\n name: 'mocha'\n room: '#mocha'\n\n done(context)\n\n conte",
"end": 795,
"score": 0.9363828301429749,
"start": 790,
"tag": "NAME",
"value": "mocha"
}
] | test/test_context.coffee | klarna/hubot-stash-poll | 8 | fs = require('fs')
nock = require('nock')
path = require('path')
sinon = require('sinon')
Robot = require('hubot/src/robot')
module.exports = (done) ->
context =
sandbox: sinon.sandbox.create()
robot: undefined
adapter: undefined
user: undefined
# to avoid "possible EventEmitter memory leak detected" warning
context.sandbox.stub process, 'on', -> null
# stop all requests by default
nock.activate() if not nock.isActive()
nock.disableNetConnect()
nock.enableNetConnect('localhost')
context.robot = new Robot(null, 'mock-adapter', false, 'MOCKBOT')
context.sandbox.spy context.robot, 'respond'
context.sandbox.spy context.robot, 'hear'
context.robot.adapter.on 'connected', ->
context.user = context.robot.brain.userForId '1',
name: 'mocha'
room: '#mocha'
done(context)
context.robot.run()
| 124169 | fs = require('fs')
nock = require('nock')
path = require('path')
sinon = require('sinon')
Robot = require('hubot/src/robot')
module.exports = (done) ->
context =
sandbox: sinon.sandbox.create()
robot: undefined
adapter: undefined
user: undefined
# to avoid "possible EventEmitter memory leak detected" warning
context.sandbox.stub process, 'on', -> null
# stop all requests by default
nock.activate() if not nock.isActive()
nock.disableNetConnect()
nock.enableNetConnect('localhost')
context.robot = new Robot(null, 'mock-adapter', false, 'MOCKBOT')
context.sandbox.spy context.robot, 'respond'
context.sandbox.spy context.robot, 'hear'
context.robot.adapter.on 'connected', ->
context.user = context.robot.brain.userForId '1',
name: '<NAME>'
room: '#mocha'
done(context)
context.robot.run()
| true | fs = require('fs')
nock = require('nock')
path = require('path')
sinon = require('sinon')
Robot = require('hubot/src/robot')
module.exports = (done) ->
context =
sandbox: sinon.sandbox.create()
robot: undefined
adapter: undefined
user: undefined
# to avoid "possible EventEmitter memory leak detected" warning
context.sandbox.stub process, 'on', -> null
# stop all requests by default
nock.activate() if not nock.isActive()
nock.disableNetConnect()
nock.enableNetConnect('localhost')
context.robot = new Robot(null, 'mock-adapter', false, 'MOCKBOT')
context.sandbox.spy context.robot, 'respond'
context.sandbox.spy context.robot, 'hear'
context.robot.adapter.on 'connected', ->
context.user = context.robot.brain.userForId '1',
name: 'PI:NAME:<NAME>END_PI'
room: '#mocha'
done(context)
context.robot.run()
|
[
{
"context": "up logger\r\nlogger = bunyan.createLogger\r\n name: 'mobitor-bot'\r\n streams: [\r\n level: 'info',\r\n stream: p",
"end": 307,
"score": 0.9862768054008484,
"start": 296,
"tag": "USERNAME",
"value": "mobitor-bot"
},
{
"context": "ice}<br />\"\r\n\r\n utilities.sendEmail\r\n from: '\"lightbringer-bot\" <lightbringer_bot@zoho.com>'\r\n to: _.join(nco",
"end": 1434,
"score": 0.9974409937858582,
"start": 1418,
"tag": "USERNAME",
"value": "lightbringer-bot"
},
{
"context": "ilities.sendEmail\r\n from: '\"lightbringer-bot\" <lightbringer_bot@zoho.com>'\r\n to: _.join(nconf.get('subscriptions'), ', ",
"end": 1462,
"score": 0.9999334216117859,
"start": 1437,
"tag": "EMAIL",
"value": "lightbringer_bot@zoho.com"
}
] | src/index.coffee | lightbringer1991/DellMonitorBot | 0 | _ = require 'lodash'
nconf = require 'nconf'
tinyreq = require 'tinyreq'
cheerio = require 'cheerio'
bunyan = require 'bunyan'
utilities = require './utilities'
# setup nconf
nconf.argv()
.env()
.file {file: 'config.json'}
# setup logger
logger = bunyan.createLogger
name: 'mobitor-bot'
streams: [
level: 'info',
stream: process.stdout
,
level: 'error',
path: 'logs/error.log'
,
level: 'debug'
path: 'logs/app.log'
]
# setup required variables
url = 'http://www1.ap.dell.com/content/topics/segtopic.aspx/products/quickship/au/en/monitors?c=au&l=en&s=dfo'
# configure search
modelRegex = /27"|P27|U27/g;
tinyreq url, (err, body) ->
if err then return logger.error err
$ = cheerio.load body
trList = $('#maincontentcnt form table table table table tbody tr').get()
# remove the first 3 rows, one is header, 2 are empty
trList.shift()
trList.shift()
trList.shift()
matchedProducts = utilities.getMatchedProducts $, trList, logger, { modelRegex }
if matchedProducts.length is 0
logger.info "No matched found"
return
# build and send email
logger.info "Sending Email to #{_.join(nconf.get('subscriptions'), ', ')}"
body = ''
_.forEach matchedProducts, ({model, price, condition}) ->
body += "Found #{model}(#{condition}) with price #{price}<br />"
utilities.sendEmail
from: '"lightbringer-bot" <lightbringer_bot@zoho.com>'
to: _.join(nconf.get('subscriptions'), ', ')
subject: 'Matched DELL monitor watcher'
html: body
, (err, info) ->
if error then logger.error "Email sent failed: #{err}"
else logger.info "Email sent successfully"
| 87806 | _ = require 'lodash'
nconf = require 'nconf'
tinyreq = require 'tinyreq'
cheerio = require 'cheerio'
bunyan = require 'bunyan'
utilities = require './utilities'
# setup nconf
nconf.argv()
.env()
.file {file: 'config.json'}
# setup logger
logger = bunyan.createLogger
name: 'mobitor-bot'
streams: [
level: 'info',
stream: process.stdout
,
level: 'error',
path: 'logs/error.log'
,
level: 'debug'
path: 'logs/app.log'
]
# setup required variables
url = 'http://www1.ap.dell.com/content/topics/segtopic.aspx/products/quickship/au/en/monitors?c=au&l=en&s=dfo'
# configure search
modelRegex = /27"|P27|U27/g;
tinyreq url, (err, body) ->
if err then return logger.error err
$ = cheerio.load body
trList = $('#maincontentcnt form table table table table tbody tr').get()
# remove the first 3 rows, one is header, 2 are empty
trList.shift()
trList.shift()
trList.shift()
matchedProducts = utilities.getMatchedProducts $, trList, logger, { modelRegex }
if matchedProducts.length is 0
logger.info "No matched found"
return
# build and send email
logger.info "Sending Email to #{_.join(nconf.get('subscriptions'), ', ')}"
body = ''
_.forEach matchedProducts, ({model, price, condition}) ->
body += "Found #{model}(#{condition}) with price #{price}<br />"
utilities.sendEmail
from: '"lightbringer-bot" <<EMAIL>>'
to: _.join(nconf.get('subscriptions'), ', ')
subject: 'Matched DELL monitor watcher'
html: body
, (err, info) ->
if error then logger.error "Email sent failed: #{err}"
else logger.info "Email sent successfully"
| true | _ = require 'lodash'
nconf = require 'nconf'
tinyreq = require 'tinyreq'
cheerio = require 'cheerio'
bunyan = require 'bunyan'
utilities = require './utilities'
# setup nconf
nconf.argv()
.env()
.file {file: 'config.json'}
# setup logger
logger = bunyan.createLogger
name: 'mobitor-bot'
streams: [
level: 'info',
stream: process.stdout
,
level: 'error',
path: 'logs/error.log'
,
level: 'debug'
path: 'logs/app.log'
]
# setup required variables
url = 'http://www1.ap.dell.com/content/topics/segtopic.aspx/products/quickship/au/en/monitors?c=au&l=en&s=dfo'
# configure search
modelRegex = /27"|P27|U27/g;
tinyreq url, (err, body) ->
if err then return logger.error err
$ = cheerio.load body
trList = $('#maincontentcnt form table table table table tbody tr').get()
# remove the first 3 rows, one is header, 2 are empty
trList.shift()
trList.shift()
trList.shift()
matchedProducts = utilities.getMatchedProducts $, trList, logger, { modelRegex }
if matchedProducts.length is 0
logger.info "No matched found"
return
# build and send email
logger.info "Sending Email to #{_.join(nconf.get('subscriptions'), ', ')}"
body = ''
_.forEach matchedProducts, ({model, price, condition}) ->
body += "Found #{model}(#{condition}) with price #{price}<br />"
utilities.sendEmail
from: '"lightbringer-bot" <PI:EMAIL:<EMAIL>END_PI>'
to: _.join(nconf.get('subscriptions'), ', ')
subject: 'Matched DELL monitor watcher'
html: body
, (err, info) ->
if error then logger.error "Email sent failed: #{err}"
else logger.info "Email sent successfully"
|
[
{
"context": "CardSetId(CardSet.Coreshatter)\n\t\t\tcard.name = \"Oak in the Nemeton\"\n\t\t\tcard.setDescription(\"Trial: Have ",
"end": 4791,
"score": 0.5062854290008545,
"start": 4789,
"tag": "NAME",
"value": "in"
},
{
"context": "CardSet.Coreshatter)\n\t\t\tcard.name = \"Oak in the Nemeton\"\n\t\t\tcard.setDescription(\"Trial: Have 5 token mini",
"end": 4803,
"score": 0.5930821299552917,
"start": 4798,
"tag": "NAME",
"value": "meton"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Cloudcaller\"\n\t\t\tcard.setDescription(\"Opening Gambit: Your Blo",
"end": 6713,
"score": 0.9955613613128662,
"start": 6702,
"tag": "NAME",
"value": "Cloudcaller"
},
{
"context": "ard.setIsHiddenInCollection(true)\n\t\t\tcard.name = \"Lesser Waterball\"\n\t\t\tcard.setDescription(\"Deal 4 damage to an enem",
"end": 8184,
"score": 0.988372802734375,
"start": 8168,
"tag": "NAME",
"value": "Lesser Waterball"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Malicious Wisp\"\n\t\t\tcard.setDescription(\"Opening Gambit: Take a m",
"end": 8847,
"score": 0.8814266324043274,
"start": 8833,
"tag": "NAME",
"value": "Malicious Wisp"
},
{
"context": "= Cards.Spell.EnterThunderdome\n\t\t\tcard.name = \"Ice Age\"\n\t\t\tcard.setDescription(\"Summon Blazing Spines al",
"end": 10351,
"score": 0.7415649890899658,
"start": 10348,
"tag": "NAME",
"value": "Age"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Thicket Augur\"\n\t\t\tcard.setDescription(\"Intensify: Summon 1 Trea",
"end": 11106,
"score": 0.9998527765274048,
"start": 11093,
"tag": "NAME",
"value": "Thicket Augur"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Fenrir Berserker\"\n\t\t\tcard.setDescription(\"Dying Wish: Summon a Fen",
"end": 12471,
"score": 0.9998562932014465,
"start": 12455,
"tag": "NAME",
"value": "Fenrir Berserker"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Meltwater Moose\"\n\t\t\tcard.setDescription(\"When this minion is summ",
"end": 13752,
"score": 0.9987834095954895,
"start": 13737,
"tag": "NAME",
"value": "Meltwater Moose"
},
{
"context": "setCardSetId(CardSet.Coreshatter)\n\t\t\tcard.name = \"Wanderlust\"\n\t\t\tcard.setDescription(\"Intensify: Teleport 1 ra",
"end": 16114,
"score": 0.9926726818084717,
"start": 16104,
"tag": "NAME",
"value": "Wanderlust"
},
{
"context": "ard.factionId = Factions.Faction6\n\t\t\tcard.name = \"Disciple of Yggdra\"\n\t\t\tcard.setDescription(\"When an enemy is Stunned",
"end": 16919,
"score": 0.9974626302719116,
"start": 16901,
"tag": "NAME",
"value": "Disciple of Yggdra"
},
{
"context": "ard.setIsHiddenInCollection(true)\n\t\t\tcard.name = \"Yggdra's Voracity\"\n\t\t\tcard.setDescription(\"Whenever an enemy is Stu",
"end": 18435,
"score": 0.9996744990348816,
"start": 18418,
"tag": "NAME",
"value": "Yggdra's Voracity"
},
{
"context": "setCardSetId(CardSet.Coreshatter)\n\t\t\tcard.name = \"Endless Hunt\"\n\t\t\tcard.setDescription(\"Summon a 3/3 Vespyr Nigh",
"end": 19873,
"score": 0.9966808557510376,
"start": 19861,
"tag": "NAME",
"value": "Endless Hunt"
},
{
"context": "d.id = Cards.Artifact.SnowChipper\n\t\t\tcard.name = \"Resonance Scythe\"\n\t\t\tcard.setDescription(\"Your General has +1 Atta",
"end": 20704,
"score": 0.999370276927948,
"start": 20688,
"tag": "NAME",
"value": "Resonance Scythe"
},
{
"context": "setCardSetId(CardSet.Coreshatter)\n\t\t\tcard.name = \"Permafrost\"\n\t\t\tcard.setDescription(\"Stun an enemy minion.\\nS",
"end": 22387,
"score": 0.9860305190086365,
"start": 22377,
"tag": "NAME",
"value": "Permafrost"
}
] | app/sdk/cards/factory/coreshatter/faction6.coffee | willroberts/duelyst | 5 | # do not add this file to a package
# it is specifically parsed by the package generation script
_ = require 'underscore'
moment = require 'moment'
Logger = require 'app/common/logger'
CONFIG = require('app/common/config')
RSX = require('app/data/resources')
Card = require 'app/sdk/cards/card'
Cards = require 'app/sdk/cards/cardsLookupComplete'
CardType = require 'app/sdk/cards/cardType'
Factions = require 'app/sdk/cards/factionsLookup'
FactionFactory = require 'app/sdk/cards/factionFactory'
Races = require 'app/sdk/cards/racesLookup'
Rarity = require 'app/sdk/cards/rarityLookup'
Unit = require 'app/sdk/entities/unit'
CardSet = require 'app/sdk/cards/cardSetLookup'
Artifact = require 'app/sdk/artifacts/artifact'
SpellFilterType = require 'app/sdk/spells/spellFilterType'
SpellSpawnEntitiesOnEdgeSpaces = require 'app/sdk/spells/spellSpawnEntitiesOnEdgeSpaces'
SpellApplyModifiersToExhaustedMinion = require 'app/sdk/spells/spellApplyModifiersToExhaustedMinion'
SpellApplyModifiers = require 'app/sdk/spells/spellApplyModifiers'
SpellIntensifyTeleportOwnSide = require 'app/sdk/spells/spellIntensifyTeleportOwnSide'
SpellInfiniteHowlers = require 'app/sdk/spells/spellInfiniteHowlers'
SpellTransformSameManaCost = require 'app/sdk/spells/spellTransformSameManaCost'
SpellCreepingFrost = require 'app/sdk/spells/spellCreepingFrost'
SpellDamage = require 'app/sdk/spells/spellDamage'
Modifier = require 'app/sdk/modifiers/modifier'
ModifierStunned = require 'app/sdk/modifiers/modifierStunned'
ModifierStun = require 'app/sdk/modifiers/modifierStun'
ModifierStunnedVanar = require 'app/sdk/modifiers/modifierStunnedVanar'
ModifierOpeningGambit = require 'app/sdk/modifiers/modifierOpeningGambit'
ModifierDyingWish = require 'app/sdk/modifiers/modifierDyingWish'
ModifierProvoke = require 'app/sdk/modifiers/modifierProvoke'
ModifierDyingWishSpawnEntity = require 'app/sdk/modifiers/modifierDyingWishSpawnEntity'
ModifierEntersBattlefieldWatchApplyModifiers = require 'app/sdk/modifiers/modifierEntersBattlefieldWatchApplyModifiers'
ModifierOpeningGambitDestroyManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitDestroyManaCrystal'
ModifierOpeningGambitBonusManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitBonusManaCrystal'
ModifierDyingWishDestroyManaCrystal = require 'app/sdk/modifiers/modifierDyingWishDestroyManaCrystal'
ModifierDyingWishBonusManaCrystal = require 'app/sdk/modifiers/modifierDyingWishBonusManaCrystal'
ModifierStartsInHand = require 'app/sdk/modifiers/modifierStartsInHand'
ModifierEnemyStunWatch = require 'app/sdk/modifiers/modifierEnemyStunWatch'
ModifierEnemyStunWatchTransformThis = require 'app/sdk/modifiers/modifierEnemyStunWatchTransformThis'
ModifierEnemyStunWatchDamageNearbyEnemies = require 'app/sdk/modifiers/modifierEnemyStunWatchDamageNearbyEnemies'
ModifierIntensifySpawnEntitiesNearby = require 'app/sdk/modifiers/modifierIntensifySpawnEntitiesNearby'
ModifierKillWatchRefreshExhaustionIfTargetStunned = require 'app/sdk/modifiers/modifierKillWatchRefreshExhaustionIfTargetStunned'
ModifierEnemyStunWatchFullyHeal = require 'app/sdk/modifiers/modifierEnemyStunWatchFullyHeal'
ModifierOnSummonFromHandApplyEmblems = require 'app/sdk/modifiers/modifierOnSummonFromHandApplyEmblems'
ModifierOpeningGambitChangeSignatureCardForThisTurn = require 'app/sdk/modifiers/modifierOpeningGambitChangeSignatureCardForThisTurn'
ModifierOpeningGambitRefreshSignatureCard = require 'app/sdk/modifiers/modifierOpeningGambitRefreshSignatureCard'
ModifierTokenCreator = require 'app/sdk/modifiers/modifierTokenCreator'
ModifierToken = require 'app/sdk/modifiers/modifierToken'
ModifierFateVanarTokenQuest = require 'app/sdk/modifiers/modifierFateVanarTokenQuest'
ModifierCannotBeReplaced = require 'app/sdk/modifiers/modifierCannotBeReplaced'
ModifierIntensify = require 'app/sdk/modifiers/modifierIntensify'
ModifierCounterIntensify = require 'app/sdk/modifiers/modifierCounterIntensify'
ModifierCannotBeRemovedFromHand = require 'app/sdk/modifiers/modifierCannotBeRemovedFromHand'
ModifierQuestBuffVanar = require 'app/sdk/modifiers/modifierQuestBuffVanar'
PlayerModifierEmblemSummonWatchVanarTokenQuest = require 'app/sdk/playerModifiers/playerModifierEmblemSummonWatchVanarTokenQuest'
i18next = require 'i18next'
if i18next.t() is undefined
i18next.t = (text) ->
return text
class CardFactory_CoreshatterSet_Faction6
###*
* Returns a card that matches the identifier.
* @param {Number|String} identifier
* @param {GameSession} gameSession
* @returns {Card}
###
@cardForIdentifier: (identifier,gameSession) ->
card = null
if (identifier == Cards.Faction6.VanarQuest)
card = new Unit(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.name = "Oak in the Nemeton"
card.setDescription("Trial: Have 5 token minions with different names.\nDestiny: Friendly token minions have +4/+4.")
card.atk = 6
card.maxHP = 6
card.manaCost = 6
card.rarityId = Rarity.Mythron
statContextObject = ModifierQuestBuffVanar.createContextObjectWithAttributeBuffs(4,4)
statContextObject.appliedName = "Might of the Oak"
emblemModifier = PlayerModifierEmblemSummonWatchVanarTokenQuest.createContextObject([statContextObject])
emblemModifier.appliedName = "Nemeton's Protection"
emblemModifier.appliedDescription = "Your token minions have +4/+4."
card.setInherentModifiersContextObjects([
ModifierStartsInHand.createContextObject(),
ModifierCannotBeReplaced.createContextObject(),
ModifierFateVanarTokenQuest.createContextObject(5),
ModifierOnSummonFromHandApplyEmblems.createContextObject([emblemModifier], true, false),
ModifierCannotBeRemovedFromHand.createContextObject()
])
card.setFXResource(["FX.Cards.Neutral.TwilightMage"])
card.setBoundingBoxWidth(50)
card.setBoundingBoxHeight(75)
card.setBaseSoundResource(
apply : RSX.sfx_ui_booster_packexplode.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_f2_jadeogre_attack_swing.audio
receiveDamage : RSX.sfx_f3_dunecaster_hit.audio
attackDamage : RSX.sfx_f3_dunecaster_impact.audio
death : RSX.sfx_f3_dunecaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6MythronquestBreathing.name
idle : RSX.f6MythronquestIdle.name
walk : RSX.f6MythronquestRun.name
attack : RSX.f6MythronquestAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.3
damage : RSX.f6MythronquestHit.name
death : RSX.f6MythronquestDeath.name
)
if (identifier == Cards.Faction6.Snowballer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Cloudcaller"
card.setDescription("Opening Gambit: Your Bloodbound Spell refreshes and is Lesser Waterball this turn.")
card.atk = 4
card.maxHP = 5
card.manaCost = 5
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierOpeningGambitRefreshSignatureCard.createContextObject()
ModifierOpeningGambitChangeSignatureCardForThisTurn.createContextObject({id: Cards.Spell.SnowballBBS})
])
card.setFXResource(["FX.Cards.Neutral.ZenRui"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(90)
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_bloodtearalchemist_death.audio
receiveDamage : RSX.sfx_neutral_archonspellbinder_hit.audio
attackDamage : RSX.sfx_neutral_archonspellbinder_attack_impact.audio
death : RSX.sfx_neutral_archonspellbinder_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytTrackerBreathing.name
idle : RSX.f6YnuytTrackerIdle.name
walk : RSX.f6YnuytTrackerRun.name
attack : RSX.f6YnuytTrackerAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.1
damage : RSX.f6YnuytTrackerHit.name
death : RSX.f6YnuytTrackerDeath.name
)
if (identifier == Cards.Spell.SnowballBBS)
card = new SpellDamage(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.SnowballBBS
card.setIsHiddenInCollection(true)
card.name = "Lesser Waterball"
card.setDescription("Deal 4 damage to an enemy minion.")
card.manaCost = 1
card.damageAmount = 4
card.rarityId = Rarity.Fixed
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.setFXResource(["FX.Cards.Spell.LesserWaterball"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
card.setBaseAnimResource(
idle: RSX.iconWaterballIdle.name
active: RSX.iconWaterballActive.name
)
if (identifier == Cards.Faction6.ManaThief)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Malicious Wisp"
card.setDescription("Opening Gambit: Take a mana crystal from your opponent.\nDying Wish: Give back the mana crystal.")
card.atk = 4
card.maxHP = 3
card.manaCost = 4
card.rarityId = Rarity.Epic
card.setInherentModifiersContextObjects([
ModifierOpeningGambitDestroyManaCrystal.createContextObject(false,1),
ModifierOpeningGambitBonusManaCrystal.createContextObject(true,1),
ModifierDyingWishDestroyManaCrystal.createContextObject(true,1),
ModifierDyingWishBonusManaCrystal.createContextObject(false,1)
])
card.setFXResource(["FX.Cards.Neutral.EXun"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_diretidefrenzy.audio
walk : RSX.sfx_neutral_komodocharger_hit.audio
attack : RSX.sfx_neutral_sunelemental_death.audio
receiveDamage : RSX.sfx_neutral_swornavenger_hit.audio
attackDamage : RSX.sfx_f2lanternfox_death.audio
death : RSX.sfx_neutral_daggerkiri_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6EvilWispBreathing.name
idle : RSX.f6EvilWispIdle.name
walk : RSX.f6EvilWispRun.name
attack : RSX.f6EvilWispAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6EvilWispHit.name
death : RSX.f6EvilWispDeath.name
)
if (identifier == Cards.Spell.EnterThunderdome)
card = new SpellSpawnEntitiesOnEdgeSpaces(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.EnterThunderdome
card.name = "Ice Age"
card.setDescription("Summon Blazing Spines along the outside of the battlefield.")
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.BlazingSpines}
card.manaCost = 8
card.rarityId = Rarity.Legendary
card.spellFilterType = SpellFilterType.None
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.IceAge"])
card.setBaseAnimResource(
idle : RSX.iconEnterIcedomeIdle.name
active : RSX.iconEnterIcedomeActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_ghostlightning.audio
)
if (identifier == Cards.Faction6.Rootmancer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Thicket Augur"
card.setDescription("Intensify: Summon 1 Treant with Provoke nearby.")
card.atk = 1
card.maxHP = 1
card.manaCost = 2
card.rarityId = Rarity.Common
card.setInherentModifiersContextObjects([
ModifierIntensifySpawnEntitiesNearby.createContextObject({id: Cards.Faction6.Treant}, 1),
ModifierCounterIntensify.createContextObject()
])
card.addKeywordClassToInclude(ModifierProvoke)
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Amu"])
card.setBaseSoundResource(
apply : RSX.sfx_f6_voiceofthewind_attack_swing.audio
walk : RSX.sfx_spell_polymorph.audio
attack : RSX.sfx_neutral_amu_attack_swing.audio
receiveDamage : RSX.sfx_neutral_amu_hit.audio
attackDamage : RSX.sfx_neutral_amu_attack_impact.audio
death : RSX.sfx_neutral_amu_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6RootmancerBreathing.name
idle : RSX.f6RootmancerIdle.name
walk : RSX.f6RootmancerRun.name
attack : RSX.f6RootmancerAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.9
damage : RSX.f6RootmancerHit.name
death : RSX.f6RootmancerDeath.name
)
if (identifier == Cards.Faction6.SuperFenrir)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Fenrir Berserker"
card.setDescription("Dying Wish: Summon a Fenrir Warmaster on this space.")
card.atk = 4
card.maxHP = 3
card.manaCost = 5
card.rarityId = Rarity.Rare
card.setInherentModifiersContextObjects([ ModifierDyingWishSpawnEntity.createContextObject({id: Cards.Faction6.FenrirWarmaster}) ])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Shuffler"])
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_spell_icepillar_melt.audio
attack : RSX.sfx_neutral_windstopper_attack_impact.audio
receiveDamage : RSX.sfx_f6_icedryad_hit.audio
attackDamage : RSX.sfx_neutral_spelljammer_attack_impact.audio
death : RSX.sfx_neutral_windstopper_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6SuperFenrirBreathing.name
idle : RSX.f6SuperFenrirIdle.name
walk : RSX.f6SuperFenrirRun.name
attack : RSX.f6SuperFenrirAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6SuperFenrirHit.name
death : RSX.f6SuperFenrirDeath.name
)
if (identifier == Cards.Faction6.SnowWinkle)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Meltwater Moose"
card.setDescription("When this minion is summoned, Stun it.")
card.atk = 6
card.maxHP = 7
card.manaCost = 4
card.rarityId = Rarity.Common
card.raceId = Races.Vespyr
card.setInherentModifiersContextObjects([
ModifierEntersBattlefieldWatchApplyModifiers.createContextObject([ModifierStunnedVanar.createContextObject()])
])
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Neutral.DragoneboneGolem"])
card.setBoundingBoxWidth(95)
card.setBoundingBoxHeight(95)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_2.audio
walk : RSX.sfx_unit_physical_4.audio
attack : RSX.sfx_neutral_golemdragonbone_attack_swing.audio
receiveDamage : RSX.sfx_neutral_golemdragonbone_hit.audio
attackDamage : RSX.sfx_neutral_golemdragonbone_impact.audio
death : RSX.sfx_neutral_golemdragonbone_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6ElkodonBreathing.name
idle : RSX.f6ElkodonIdle.name
walk : RSX.f6ElkodonRun.name
attack : RSX.f6ElkodonAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6ElkodonHit.name
death : RSX.f6ElkodonDeath.name
)
if (identifier == Cards.Spell.IceCapsule)
card = new SpellApplyModifiersToExhaustedMinion(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.IceCapsule
card.name = "Cryonic Potential"
card.setDescription("Give an exhausted friendly minion +6/+6.")
card.manaCost = 3
card.rarityId = Rarity.Common
card.spellFilterType = SpellFilterType.AllyDirect
card.canTargetGeneral = false
buffContextObject = Modifier.createContextObjectWithAttributeBuffs(6,6)
buffContextObject.appliedName = "Potential Realized"
card.setTargetModifiersContextObjects([
buffContextObject
])
card.setFXResource(["FX.Cards.Spell.CryonicPotential"])
card.setBaseAnimResource(
idle: RSX.iconIceCapsuleIdle.name
active: RSX.iconIceCapsuleActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
if (identifier == Cards.Spell.OwnSideTeleport)
card = new SpellIntensifyTeleportOwnSide(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.OwnSideTeleport
card.setCardSetId(CardSet.Coreshatter)
card.name = "Wanderlust"
card.setDescription("Intensify: Teleport 1 random enemy minion to a space on your starting side of the battlefield.")
card.spellFilterType = SpellFilterType.None
card.manaCost = 1
card.rarityId = Rarity.Common
card.addKeywordClassToInclude(ModifierIntensify)
card.setInherentModifiersContextObjects([ModifierCounterIntensify.createContextObject()])
card._fxResource = ["FX.Cards.Spell.Wanderlust"]
card.setBaseAnimResource(
idle: RSX.iconIceHooksIdle.name
active: RSX.iconIceHooksActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_tranquility.audio
)
if (identifier == Cards.Faction6.StunWarlock)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "Disciple of Yggdra"
card.setDescription("When an enemy is Stunned, transform this minion into Yggdra's Voracity.")
card.atk = 2
card.maxHP = 3
card.manaCost = 3
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchTransformThis.createContextObject({id: Cards.Faction6.StunBeast})
])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.AzureHornShaman"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(105)
card.setBaseSoundResource(
apply : RSX.sfx_spell_fractalreplication.audio
walk : RSX.sfx_unit_run_magical_3.audio
attack : RSX.sfx_neutral_prophetofthewhite_attack_swing.audio
receiveDamage : RSX.sfx_neutral_alcuinloremaster_hit.audio
attackDamage : RSX.sfx_neutral_alcuinloremaster_attack_impact.audio
death : RSX.sfx_neutral_alcuinloremaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytWarlockBreathing.name
idle : RSX.f6YnuytWarlockIdle.name
walk : RSX.f6YnuytWarlockRun.name
attack : RSX.f6YnuytWarlockAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6YnuytWarlockHit.name
death : RSX.f6YnuytWarlockDeath.name
)
if (identifier == Cards.Faction6.StunBeast)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.setIsHiddenInCollection(true)
card.name = "Yggdra's Voracity"
card.setDescription("Whenever an enemy is Stunned, fully heal this minion.")
card.atk = 5
card.maxHP = 5
card.manaCost = 3
card.rarityId = Rarity.TokenUnit
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchFullyHeal.createContextObject()
])
card.addKeywordClassToInclude(ModifierToken)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.OwlbeastSage"])
card.setBoundingBoxWidth(85)
card.setBoundingBoxHeight(80)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_1.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_arcanelimiter_attack_impact.audio
receiveDamage : RSX.sfx_f4_engulfingshadow_attack_impact.audio
attackDamage : RSX.sfx_f4_engulfingshadow_hit.audio
death : RSX.sfx_f4_engulfingshadow_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytUnleashedBreathing.name
idle : RSX.f6YnuytUnleashedIdle.name
walk : RSX.f6YnuytUnleashedRun.name
attack : RSX.f6YnuytUnleashedAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6YnuytUnleashedHit.name
death : RSX.f6YnuytUnleashedDeath.name
)
if (identifier == Cards.Spell.InfiniteHowlers)
card = new SpellInfiniteHowlers(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.InfiniteHowlers
card.setCardSetId(CardSet.Coreshatter)
card.name = "Endless Hunt"
card.setDescription("Summon a 3/3 Vespyr Night Howler.\nPut an Endless Hunt into your action bar.")
card.manaCost = 4
card.spellFilterType = SpellFilterType.SpawnSource
card.rarityId = Rarity.Rare
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.ShadowVespyr}
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.EndlessHunt"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_entropicdecay.audio
)
card.setBaseAnimResource(
idle : RSX.iconInfiniteHowlersIdle.name
active : RSX.iconInfiniteHowlersActive.name
)
if (identifier == Cards.Artifact.SnowChipper)
card = new Artifact(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.id = Cards.Artifact.SnowChipper
card.name = "Resonance Scythe"
card.setDescription("Your General has +1 Attack.\nReactivate your General whenever they destroy a Stunned enemy.")
card.manaCost = 2
card.rarityId = Rarity.Epic
card.durability = 3
card.setTargetModifiersContextObjects([
Modifier.createContextObjectWithAttributeBuffs(1,undefined),
ModifierKillWatchRefreshExhaustionIfTargetStunned.createContextObject(false, true)
])
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Artifact.OblivionSickle"])
card.setBaseAnimResource(
idle: RSX.iconSnowchipperIdle.name
active: RSX.iconSnowchipperActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_victory_crest.audio
)
if (identifier == Cards.Spell.AspectOfIdentity)
card = new SpellTransformSameManaCost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.AspectOfIdentity
card.setCardSetId(CardSet.Coreshatter)
card.name = "Aspect of Ego"
card.setDescription("Transform ANY minion into a random minion of the same cost.")
card.manaCost = 1
card.spellFilterType = SpellFilterType.NeutralDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Rare
card.setFXResource(["FX.Cards.Spell.AspectOfEgo"])
card.setBaseAnimResource(
idle : RSX.iconAspectIdentityIdle.name
active : RSX.iconAspectIdentityActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_neutral_crossbones_hit.audio
)
if (identifier == Cards.Spell.CreepingFrost)
card = new SpellCreepingFrost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.CreepingFrost
card.setCardSetId(CardSet.Coreshatter)
card.name = "Permafrost"
card.setDescription("Stun an enemy minion.\nStunned enemy minions Stun a nearby enemy.")
card.manaCost = 3
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Epic
card.addKeywordClassToInclude(ModifierStunned)
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Spell.Permafrost"])
card.setBaseSoundResource(
apply : RSX.sfx_neutral_windstopper_attack_impact.audio
)
card.setBaseAnimResource(
idle : RSX.iconPermaFrostIdle.name
active : RSX.iconPermaFrostActive.name
)
return card
module.exports = CardFactory_CoreshatterSet_Faction6
| 157882 | # do not add this file to a package
# it is specifically parsed by the package generation script
_ = require 'underscore'
moment = require 'moment'
Logger = require 'app/common/logger'
CONFIG = require('app/common/config')
RSX = require('app/data/resources')
Card = require 'app/sdk/cards/card'
Cards = require 'app/sdk/cards/cardsLookupComplete'
CardType = require 'app/sdk/cards/cardType'
Factions = require 'app/sdk/cards/factionsLookup'
FactionFactory = require 'app/sdk/cards/factionFactory'
Races = require 'app/sdk/cards/racesLookup'
Rarity = require 'app/sdk/cards/rarityLookup'
Unit = require 'app/sdk/entities/unit'
CardSet = require 'app/sdk/cards/cardSetLookup'
Artifact = require 'app/sdk/artifacts/artifact'
SpellFilterType = require 'app/sdk/spells/spellFilterType'
SpellSpawnEntitiesOnEdgeSpaces = require 'app/sdk/spells/spellSpawnEntitiesOnEdgeSpaces'
SpellApplyModifiersToExhaustedMinion = require 'app/sdk/spells/spellApplyModifiersToExhaustedMinion'
SpellApplyModifiers = require 'app/sdk/spells/spellApplyModifiers'
SpellIntensifyTeleportOwnSide = require 'app/sdk/spells/spellIntensifyTeleportOwnSide'
SpellInfiniteHowlers = require 'app/sdk/spells/spellInfiniteHowlers'
SpellTransformSameManaCost = require 'app/sdk/spells/spellTransformSameManaCost'
SpellCreepingFrost = require 'app/sdk/spells/spellCreepingFrost'
SpellDamage = require 'app/sdk/spells/spellDamage'
Modifier = require 'app/sdk/modifiers/modifier'
ModifierStunned = require 'app/sdk/modifiers/modifierStunned'
ModifierStun = require 'app/sdk/modifiers/modifierStun'
ModifierStunnedVanar = require 'app/sdk/modifiers/modifierStunnedVanar'
ModifierOpeningGambit = require 'app/sdk/modifiers/modifierOpeningGambit'
ModifierDyingWish = require 'app/sdk/modifiers/modifierDyingWish'
ModifierProvoke = require 'app/sdk/modifiers/modifierProvoke'
ModifierDyingWishSpawnEntity = require 'app/sdk/modifiers/modifierDyingWishSpawnEntity'
ModifierEntersBattlefieldWatchApplyModifiers = require 'app/sdk/modifiers/modifierEntersBattlefieldWatchApplyModifiers'
ModifierOpeningGambitDestroyManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitDestroyManaCrystal'
ModifierOpeningGambitBonusManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitBonusManaCrystal'
ModifierDyingWishDestroyManaCrystal = require 'app/sdk/modifiers/modifierDyingWishDestroyManaCrystal'
ModifierDyingWishBonusManaCrystal = require 'app/sdk/modifiers/modifierDyingWishBonusManaCrystal'
ModifierStartsInHand = require 'app/sdk/modifiers/modifierStartsInHand'
ModifierEnemyStunWatch = require 'app/sdk/modifiers/modifierEnemyStunWatch'
ModifierEnemyStunWatchTransformThis = require 'app/sdk/modifiers/modifierEnemyStunWatchTransformThis'
ModifierEnemyStunWatchDamageNearbyEnemies = require 'app/sdk/modifiers/modifierEnemyStunWatchDamageNearbyEnemies'
ModifierIntensifySpawnEntitiesNearby = require 'app/sdk/modifiers/modifierIntensifySpawnEntitiesNearby'
ModifierKillWatchRefreshExhaustionIfTargetStunned = require 'app/sdk/modifiers/modifierKillWatchRefreshExhaustionIfTargetStunned'
ModifierEnemyStunWatchFullyHeal = require 'app/sdk/modifiers/modifierEnemyStunWatchFullyHeal'
ModifierOnSummonFromHandApplyEmblems = require 'app/sdk/modifiers/modifierOnSummonFromHandApplyEmblems'
ModifierOpeningGambitChangeSignatureCardForThisTurn = require 'app/sdk/modifiers/modifierOpeningGambitChangeSignatureCardForThisTurn'
ModifierOpeningGambitRefreshSignatureCard = require 'app/sdk/modifiers/modifierOpeningGambitRefreshSignatureCard'
ModifierTokenCreator = require 'app/sdk/modifiers/modifierTokenCreator'
ModifierToken = require 'app/sdk/modifiers/modifierToken'
ModifierFateVanarTokenQuest = require 'app/sdk/modifiers/modifierFateVanarTokenQuest'
ModifierCannotBeReplaced = require 'app/sdk/modifiers/modifierCannotBeReplaced'
ModifierIntensify = require 'app/sdk/modifiers/modifierIntensify'
ModifierCounterIntensify = require 'app/sdk/modifiers/modifierCounterIntensify'
ModifierCannotBeRemovedFromHand = require 'app/sdk/modifiers/modifierCannotBeRemovedFromHand'
ModifierQuestBuffVanar = require 'app/sdk/modifiers/modifierQuestBuffVanar'
PlayerModifierEmblemSummonWatchVanarTokenQuest = require 'app/sdk/playerModifiers/playerModifierEmblemSummonWatchVanarTokenQuest'
i18next = require 'i18next'
if i18next.t() is undefined
i18next.t = (text) ->
return text
class CardFactory_CoreshatterSet_Faction6
###*
* Returns a card that matches the identifier.
* @param {Number|String} identifier
* @param {GameSession} gameSession
* @returns {Card}
###
@cardForIdentifier: (identifier,gameSession) ->
card = null
if (identifier == Cards.Faction6.VanarQuest)
card = new Unit(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.name = "Oak <NAME> the Ne<NAME>"
card.setDescription("Trial: Have 5 token minions with different names.\nDestiny: Friendly token minions have +4/+4.")
card.atk = 6
card.maxHP = 6
card.manaCost = 6
card.rarityId = Rarity.Mythron
statContextObject = ModifierQuestBuffVanar.createContextObjectWithAttributeBuffs(4,4)
statContextObject.appliedName = "Might of the Oak"
emblemModifier = PlayerModifierEmblemSummonWatchVanarTokenQuest.createContextObject([statContextObject])
emblemModifier.appliedName = "Nemeton's Protection"
emblemModifier.appliedDescription = "Your token minions have +4/+4."
card.setInherentModifiersContextObjects([
ModifierStartsInHand.createContextObject(),
ModifierCannotBeReplaced.createContextObject(),
ModifierFateVanarTokenQuest.createContextObject(5),
ModifierOnSummonFromHandApplyEmblems.createContextObject([emblemModifier], true, false),
ModifierCannotBeRemovedFromHand.createContextObject()
])
card.setFXResource(["FX.Cards.Neutral.TwilightMage"])
card.setBoundingBoxWidth(50)
card.setBoundingBoxHeight(75)
card.setBaseSoundResource(
apply : RSX.sfx_ui_booster_packexplode.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_f2_jadeogre_attack_swing.audio
receiveDamage : RSX.sfx_f3_dunecaster_hit.audio
attackDamage : RSX.sfx_f3_dunecaster_impact.audio
death : RSX.sfx_f3_dunecaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6MythronquestBreathing.name
idle : RSX.f6MythronquestIdle.name
walk : RSX.f6MythronquestRun.name
attack : RSX.f6MythronquestAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.3
damage : RSX.f6MythronquestHit.name
death : RSX.f6MythronquestDeath.name
)
if (identifier == Cards.Faction6.Snowballer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("Opening Gambit: Your Bloodbound Spell refreshes and is Lesser Waterball this turn.")
card.atk = 4
card.maxHP = 5
card.manaCost = 5
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierOpeningGambitRefreshSignatureCard.createContextObject()
ModifierOpeningGambitChangeSignatureCardForThisTurn.createContextObject({id: Cards.Spell.SnowballBBS})
])
card.setFXResource(["FX.Cards.Neutral.ZenRui"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(90)
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_bloodtearalchemist_death.audio
receiveDamage : RSX.sfx_neutral_archonspellbinder_hit.audio
attackDamage : RSX.sfx_neutral_archonspellbinder_attack_impact.audio
death : RSX.sfx_neutral_archonspellbinder_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytTrackerBreathing.name
idle : RSX.f6YnuytTrackerIdle.name
walk : RSX.f6YnuytTrackerRun.name
attack : RSX.f6YnuytTrackerAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.1
damage : RSX.f6YnuytTrackerHit.name
death : RSX.f6YnuytTrackerDeath.name
)
if (identifier == Cards.Spell.SnowballBBS)
card = new SpellDamage(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.SnowballBBS
card.setIsHiddenInCollection(true)
card.name = "<NAME>"
card.setDescription("Deal 4 damage to an enemy minion.")
card.manaCost = 1
card.damageAmount = 4
card.rarityId = Rarity.Fixed
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.setFXResource(["FX.Cards.Spell.LesserWaterball"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
card.setBaseAnimResource(
idle: RSX.iconWaterballIdle.name
active: RSX.iconWaterballActive.name
)
if (identifier == Cards.Faction6.ManaThief)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("Opening Gambit: Take a mana crystal from your opponent.\nDying Wish: Give back the mana crystal.")
card.atk = 4
card.maxHP = 3
card.manaCost = 4
card.rarityId = Rarity.Epic
card.setInherentModifiersContextObjects([
ModifierOpeningGambitDestroyManaCrystal.createContextObject(false,1),
ModifierOpeningGambitBonusManaCrystal.createContextObject(true,1),
ModifierDyingWishDestroyManaCrystal.createContextObject(true,1),
ModifierDyingWishBonusManaCrystal.createContextObject(false,1)
])
card.setFXResource(["FX.Cards.Neutral.EXun"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_diretidefrenzy.audio
walk : RSX.sfx_neutral_komodocharger_hit.audio
attack : RSX.sfx_neutral_sunelemental_death.audio
receiveDamage : RSX.sfx_neutral_swornavenger_hit.audio
attackDamage : RSX.sfx_f2lanternfox_death.audio
death : RSX.sfx_neutral_daggerkiri_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6EvilWispBreathing.name
idle : RSX.f6EvilWispIdle.name
walk : RSX.f6EvilWispRun.name
attack : RSX.f6EvilWispAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6EvilWispHit.name
death : RSX.f6EvilWispDeath.name
)
if (identifier == Cards.Spell.EnterThunderdome)
card = new SpellSpawnEntitiesOnEdgeSpaces(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.EnterThunderdome
card.name = "Ice <NAME>"
card.setDescription("Summon Blazing Spines along the outside of the battlefield.")
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.BlazingSpines}
card.manaCost = 8
card.rarityId = Rarity.Legendary
card.spellFilterType = SpellFilterType.None
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.IceAge"])
card.setBaseAnimResource(
idle : RSX.iconEnterIcedomeIdle.name
active : RSX.iconEnterIcedomeActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_ghostlightning.audio
)
if (identifier == Cards.Faction6.Rootmancer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("Intensify: Summon 1 Treant with Provoke nearby.")
card.atk = 1
card.maxHP = 1
card.manaCost = 2
card.rarityId = Rarity.Common
card.setInherentModifiersContextObjects([
ModifierIntensifySpawnEntitiesNearby.createContextObject({id: Cards.Faction6.Treant}, 1),
ModifierCounterIntensify.createContextObject()
])
card.addKeywordClassToInclude(ModifierProvoke)
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Amu"])
card.setBaseSoundResource(
apply : RSX.sfx_f6_voiceofthewind_attack_swing.audio
walk : RSX.sfx_spell_polymorph.audio
attack : RSX.sfx_neutral_amu_attack_swing.audio
receiveDamage : RSX.sfx_neutral_amu_hit.audio
attackDamage : RSX.sfx_neutral_amu_attack_impact.audio
death : RSX.sfx_neutral_amu_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6RootmancerBreathing.name
idle : RSX.f6RootmancerIdle.name
walk : RSX.f6RootmancerRun.name
attack : RSX.f6RootmancerAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.9
damage : RSX.f6RootmancerHit.name
death : RSX.f6RootmancerDeath.name
)
if (identifier == Cards.Faction6.SuperFenrir)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("Dying Wish: Summon a Fenrir Warmaster on this space.")
card.atk = 4
card.maxHP = 3
card.manaCost = 5
card.rarityId = Rarity.Rare
card.setInherentModifiersContextObjects([ ModifierDyingWishSpawnEntity.createContextObject({id: Cards.Faction6.FenrirWarmaster}) ])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Shuffler"])
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_spell_icepillar_melt.audio
attack : RSX.sfx_neutral_windstopper_attack_impact.audio
receiveDamage : RSX.sfx_f6_icedryad_hit.audio
attackDamage : RSX.sfx_neutral_spelljammer_attack_impact.audio
death : RSX.sfx_neutral_windstopper_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6SuperFenrirBreathing.name
idle : RSX.f6SuperFenrirIdle.name
walk : RSX.f6SuperFenrirRun.name
attack : RSX.f6SuperFenrirAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6SuperFenrirHit.name
death : RSX.f6SuperFenrirDeath.name
)
if (identifier == Cards.Faction6.SnowWinkle)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("When this minion is summoned, Stun it.")
card.atk = 6
card.maxHP = 7
card.manaCost = 4
card.rarityId = Rarity.Common
card.raceId = Races.Vespyr
card.setInherentModifiersContextObjects([
ModifierEntersBattlefieldWatchApplyModifiers.createContextObject([ModifierStunnedVanar.createContextObject()])
])
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Neutral.DragoneboneGolem"])
card.setBoundingBoxWidth(95)
card.setBoundingBoxHeight(95)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_2.audio
walk : RSX.sfx_unit_physical_4.audio
attack : RSX.sfx_neutral_golemdragonbone_attack_swing.audio
receiveDamage : RSX.sfx_neutral_golemdragonbone_hit.audio
attackDamage : RSX.sfx_neutral_golemdragonbone_impact.audio
death : RSX.sfx_neutral_golemdragonbone_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6ElkodonBreathing.name
idle : RSX.f6ElkodonIdle.name
walk : RSX.f6ElkodonRun.name
attack : RSX.f6ElkodonAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6ElkodonHit.name
death : RSX.f6ElkodonDeath.name
)
if (identifier == Cards.Spell.IceCapsule)
card = new SpellApplyModifiersToExhaustedMinion(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.IceCapsule
card.name = "Cryonic Potential"
card.setDescription("Give an exhausted friendly minion +6/+6.")
card.manaCost = 3
card.rarityId = Rarity.Common
card.spellFilterType = SpellFilterType.AllyDirect
card.canTargetGeneral = false
buffContextObject = Modifier.createContextObjectWithAttributeBuffs(6,6)
buffContextObject.appliedName = "Potential Realized"
card.setTargetModifiersContextObjects([
buffContextObject
])
card.setFXResource(["FX.Cards.Spell.CryonicPotential"])
card.setBaseAnimResource(
idle: RSX.iconIceCapsuleIdle.name
active: RSX.iconIceCapsuleActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
if (identifier == Cards.Spell.OwnSideTeleport)
card = new SpellIntensifyTeleportOwnSide(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.OwnSideTeleport
card.setCardSetId(CardSet.Coreshatter)
card.name = "<NAME>"
card.setDescription("Intensify: Teleport 1 random enemy minion to a space on your starting side of the battlefield.")
card.spellFilterType = SpellFilterType.None
card.manaCost = 1
card.rarityId = Rarity.Common
card.addKeywordClassToInclude(ModifierIntensify)
card.setInherentModifiersContextObjects([ModifierCounterIntensify.createContextObject()])
card._fxResource = ["FX.Cards.Spell.Wanderlust"]
card.setBaseAnimResource(
idle: RSX.iconIceHooksIdle.name
active: RSX.iconIceHooksActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_tranquility.audio
)
if (identifier == Cards.Faction6.StunWarlock)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "<NAME>"
card.setDescription("When an enemy is Stunned, transform this minion into Yggdra's Voracity.")
card.atk = 2
card.maxHP = 3
card.manaCost = 3
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchTransformThis.createContextObject({id: Cards.Faction6.StunBeast})
])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.AzureHornShaman"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(105)
card.setBaseSoundResource(
apply : RSX.sfx_spell_fractalreplication.audio
walk : RSX.sfx_unit_run_magical_3.audio
attack : RSX.sfx_neutral_prophetofthewhite_attack_swing.audio
receiveDamage : RSX.sfx_neutral_alcuinloremaster_hit.audio
attackDamage : RSX.sfx_neutral_alcuinloremaster_attack_impact.audio
death : RSX.sfx_neutral_alcuinloremaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytWarlockBreathing.name
idle : RSX.f6YnuytWarlockIdle.name
walk : RSX.f6YnuytWarlockRun.name
attack : RSX.f6YnuytWarlockAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6YnuytWarlockHit.name
death : RSX.f6YnuytWarlockDeath.name
)
if (identifier == Cards.Faction6.StunBeast)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.setIsHiddenInCollection(true)
card.name = "<NAME>"
card.setDescription("Whenever an enemy is Stunned, fully heal this minion.")
card.atk = 5
card.maxHP = 5
card.manaCost = 3
card.rarityId = Rarity.TokenUnit
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchFullyHeal.createContextObject()
])
card.addKeywordClassToInclude(ModifierToken)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.OwlbeastSage"])
card.setBoundingBoxWidth(85)
card.setBoundingBoxHeight(80)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_1.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_arcanelimiter_attack_impact.audio
receiveDamage : RSX.sfx_f4_engulfingshadow_attack_impact.audio
attackDamage : RSX.sfx_f4_engulfingshadow_hit.audio
death : RSX.sfx_f4_engulfingshadow_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytUnleashedBreathing.name
idle : RSX.f6YnuytUnleashedIdle.name
walk : RSX.f6YnuytUnleashedRun.name
attack : RSX.f6YnuytUnleashedAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6YnuytUnleashedHit.name
death : RSX.f6YnuytUnleashedDeath.name
)
if (identifier == Cards.Spell.InfiniteHowlers)
card = new SpellInfiniteHowlers(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.InfiniteHowlers
card.setCardSetId(CardSet.Coreshatter)
card.name = "<NAME>"
card.setDescription("Summon a 3/3 Vespyr Night Howler.\nPut an Endless Hunt into your action bar.")
card.manaCost = 4
card.spellFilterType = SpellFilterType.SpawnSource
card.rarityId = Rarity.Rare
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.ShadowVespyr}
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.EndlessHunt"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_entropicdecay.audio
)
card.setBaseAnimResource(
idle : RSX.iconInfiniteHowlersIdle.name
active : RSX.iconInfiniteHowlersActive.name
)
if (identifier == Cards.Artifact.SnowChipper)
card = new Artifact(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.id = Cards.Artifact.SnowChipper
card.name = "<NAME>"
card.setDescription("Your General has +1 Attack.\nReactivate your General whenever they destroy a Stunned enemy.")
card.manaCost = 2
card.rarityId = Rarity.Epic
card.durability = 3
card.setTargetModifiersContextObjects([
Modifier.createContextObjectWithAttributeBuffs(1,undefined),
ModifierKillWatchRefreshExhaustionIfTargetStunned.createContextObject(false, true)
])
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Artifact.OblivionSickle"])
card.setBaseAnimResource(
idle: RSX.iconSnowchipperIdle.name
active: RSX.iconSnowchipperActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_victory_crest.audio
)
if (identifier == Cards.Spell.AspectOfIdentity)
card = new SpellTransformSameManaCost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.AspectOfIdentity
card.setCardSetId(CardSet.Coreshatter)
card.name = "Aspect of Ego"
card.setDescription("Transform ANY minion into a random minion of the same cost.")
card.manaCost = 1
card.spellFilterType = SpellFilterType.NeutralDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Rare
card.setFXResource(["FX.Cards.Spell.AspectOfEgo"])
card.setBaseAnimResource(
idle : RSX.iconAspectIdentityIdle.name
active : RSX.iconAspectIdentityActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_neutral_crossbones_hit.audio
)
if (identifier == Cards.Spell.CreepingFrost)
card = new SpellCreepingFrost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.CreepingFrost
card.setCardSetId(CardSet.Coreshatter)
card.name = "<NAME>"
card.setDescription("Stun an enemy minion.\nStunned enemy minions Stun a nearby enemy.")
card.manaCost = 3
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Epic
card.addKeywordClassToInclude(ModifierStunned)
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Spell.Permafrost"])
card.setBaseSoundResource(
apply : RSX.sfx_neutral_windstopper_attack_impact.audio
)
card.setBaseAnimResource(
idle : RSX.iconPermaFrostIdle.name
active : RSX.iconPermaFrostActive.name
)
return card
module.exports = CardFactory_CoreshatterSet_Faction6
| true | # do not add this file to a package
# it is specifically parsed by the package generation script
_ = require 'underscore'
moment = require 'moment'
Logger = require 'app/common/logger'
CONFIG = require('app/common/config')
RSX = require('app/data/resources')
Card = require 'app/sdk/cards/card'
Cards = require 'app/sdk/cards/cardsLookupComplete'
CardType = require 'app/sdk/cards/cardType'
Factions = require 'app/sdk/cards/factionsLookup'
FactionFactory = require 'app/sdk/cards/factionFactory'
Races = require 'app/sdk/cards/racesLookup'
Rarity = require 'app/sdk/cards/rarityLookup'
Unit = require 'app/sdk/entities/unit'
CardSet = require 'app/sdk/cards/cardSetLookup'
Artifact = require 'app/sdk/artifacts/artifact'
SpellFilterType = require 'app/sdk/spells/spellFilterType'
SpellSpawnEntitiesOnEdgeSpaces = require 'app/sdk/spells/spellSpawnEntitiesOnEdgeSpaces'
SpellApplyModifiersToExhaustedMinion = require 'app/sdk/spells/spellApplyModifiersToExhaustedMinion'
SpellApplyModifiers = require 'app/sdk/spells/spellApplyModifiers'
SpellIntensifyTeleportOwnSide = require 'app/sdk/spells/spellIntensifyTeleportOwnSide'
SpellInfiniteHowlers = require 'app/sdk/spells/spellInfiniteHowlers'
SpellTransformSameManaCost = require 'app/sdk/spells/spellTransformSameManaCost'
SpellCreepingFrost = require 'app/sdk/spells/spellCreepingFrost'
SpellDamage = require 'app/sdk/spells/spellDamage'
Modifier = require 'app/sdk/modifiers/modifier'
ModifierStunned = require 'app/sdk/modifiers/modifierStunned'
ModifierStun = require 'app/sdk/modifiers/modifierStun'
ModifierStunnedVanar = require 'app/sdk/modifiers/modifierStunnedVanar'
ModifierOpeningGambit = require 'app/sdk/modifiers/modifierOpeningGambit'
ModifierDyingWish = require 'app/sdk/modifiers/modifierDyingWish'
ModifierProvoke = require 'app/sdk/modifiers/modifierProvoke'
ModifierDyingWishSpawnEntity = require 'app/sdk/modifiers/modifierDyingWishSpawnEntity'
ModifierEntersBattlefieldWatchApplyModifiers = require 'app/sdk/modifiers/modifierEntersBattlefieldWatchApplyModifiers'
ModifierOpeningGambitDestroyManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitDestroyManaCrystal'
ModifierOpeningGambitBonusManaCrystal = require 'app/sdk/modifiers/modifierOpeningGambitBonusManaCrystal'
ModifierDyingWishDestroyManaCrystal = require 'app/sdk/modifiers/modifierDyingWishDestroyManaCrystal'
ModifierDyingWishBonusManaCrystal = require 'app/sdk/modifiers/modifierDyingWishBonusManaCrystal'
ModifierStartsInHand = require 'app/sdk/modifiers/modifierStartsInHand'
ModifierEnemyStunWatch = require 'app/sdk/modifiers/modifierEnemyStunWatch'
ModifierEnemyStunWatchTransformThis = require 'app/sdk/modifiers/modifierEnemyStunWatchTransformThis'
ModifierEnemyStunWatchDamageNearbyEnemies = require 'app/sdk/modifiers/modifierEnemyStunWatchDamageNearbyEnemies'
ModifierIntensifySpawnEntitiesNearby = require 'app/sdk/modifiers/modifierIntensifySpawnEntitiesNearby'
ModifierKillWatchRefreshExhaustionIfTargetStunned = require 'app/sdk/modifiers/modifierKillWatchRefreshExhaustionIfTargetStunned'
ModifierEnemyStunWatchFullyHeal = require 'app/sdk/modifiers/modifierEnemyStunWatchFullyHeal'
ModifierOnSummonFromHandApplyEmblems = require 'app/sdk/modifiers/modifierOnSummonFromHandApplyEmblems'
ModifierOpeningGambitChangeSignatureCardForThisTurn = require 'app/sdk/modifiers/modifierOpeningGambitChangeSignatureCardForThisTurn'
ModifierOpeningGambitRefreshSignatureCard = require 'app/sdk/modifiers/modifierOpeningGambitRefreshSignatureCard'
ModifierTokenCreator = require 'app/sdk/modifiers/modifierTokenCreator'
ModifierToken = require 'app/sdk/modifiers/modifierToken'
ModifierFateVanarTokenQuest = require 'app/sdk/modifiers/modifierFateVanarTokenQuest'
ModifierCannotBeReplaced = require 'app/sdk/modifiers/modifierCannotBeReplaced'
ModifierIntensify = require 'app/sdk/modifiers/modifierIntensify'
ModifierCounterIntensify = require 'app/sdk/modifiers/modifierCounterIntensify'
ModifierCannotBeRemovedFromHand = require 'app/sdk/modifiers/modifierCannotBeRemovedFromHand'
ModifierQuestBuffVanar = require 'app/sdk/modifiers/modifierQuestBuffVanar'
PlayerModifierEmblemSummonWatchVanarTokenQuest = require 'app/sdk/playerModifiers/playerModifierEmblemSummonWatchVanarTokenQuest'
i18next = require 'i18next'
if i18next.t() is undefined
i18next.t = (text) ->
return text
class CardFactory_CoreshatterSet_Faction6
###*
* Returns a card that matches the identifier.
* @param {Number|String} identifier
* @param {GameSession} gameSession
* @returns {Card}
###
@cardForIdentifier: (identifier,gameSession) ->
card = null
if (identifier == Cards.Faction6.VanarQuest)
card = new Unit(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.name = "Oak PI:NAME:<NAME>END_PI the NePI:NAME:<NAME>END_PI"
card.setDescription("Trial: Have 5 token minions with different names.\nDestiny: Friendly token minions have +4/+4.")
card.atk = 6
card.maxHP = 6
card.manaCost = 6
card.rarityId = Rarity.Mythron
statContextObject = ModifierQuestBuffVanar.createContextObjectWithAttributeBuffs(4,4)
statContextObject.appliedName = "Might of the Oak"
emblemModifier = PlayerModifierEmblemSummonWatchVanarTokenQuest.createContextObject([statContextObject])
emblemModifier.appliedName = "Nemeton's Protection"
emblemModifier.appliedDescription = "Your token minions have +4/+4."
card.setInherentModifiersContextObjects([
ModifierStartsInHand.createContextObject(),
ModifierCannotBeReplaced.createContextObject(),
ModifierFateVanarTokenQuest.createContextObject(5),
ModifierOnSummonFromHandApplyEmblems.createContextObject([emblemModifier], true, false),
ModifierCannotBeRemovedFromHand.createContextObject()
])
card.setFXResource(["FX.Cards.Neutral.TwilightMage"])
card.setBoundingBoxWidth(50)
card.setBoundingBoxHeight(75)
card.setBaseSoundResource(
apply : RSX.sfx_ui_booster_packexplode.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_f2_jadeogre_attack_swing.audio
receiveDamage : RSX.sfx_f3_dunecaster_hit.audio
attackDamage : RSX.sfx_f3_dunecaster_impact.audio
death : RSX.sfx_f3_dunecaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6MythronquestBreathing.name
idle : RSX.f6MythronquestIdle.name
walk : RSX.f6MythronquestRun.name
attack : RSX.f6MythronquestAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.3
damage : RSX.f6MythronquestHit.name
death : RSX.f6MythronquestDeath.name
)
if (identifier == Cards.Faction6.Snowballer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Opening Gambit: Your Bloodbound Spell refreshes and is Lesser Waterball this turn.")
card.atk = 4
card.maxHP = 5
card.manaCost = 5
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierOpeningGambitRefreshSignatureCard.createContextObject()
ModifierOpeningGambitChangeSignatureCardForThisTurn.createContextObject({id: Cards.Spell.SnowballBBS})
])
card.setFXResource(["FX.Cards.Neutral.ZenRui"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(90)
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_bloodtearalchemist_death.audio
receiveDamage : RSX.sfx_neutral_archonspellbinder_hit.audio
attackDamage : RSX.sfx_neutral_archonspellbinder_attack_impact.audio
death : RSX.sfx_neutral_archonspellbinder_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytTrackerBreathing.name
idle : RSX.f6YnuytTrackerIdle.name
walk : RSX.f6YnuytTrackerRun.name
attack : RSX.f6YnuytTrackerAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.1
damage : RSX.f6YnuytTrackerHit.name
death : RSX.f6YnuytTrackerDeath.name
)
if (identifier == Cards.Spell.SnowballBBS)
card = new SpellDamage(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.SnowballBBS
card.setIsHiddenInCollection(true)
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Deal 4 damage to an enemy minion.")
card.manaCost = 1
card.damageAmount = 4
card.rarityId = Rarity.Fixed
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.setFXResource(["FX.Cards.Spell.LesserWaterball"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
card.setBaseAnimResource(
idle: RSX.iconWaterballIdle.name
active: RSX.iconWaterballActive.name
)
if (identifier == Cards.Faction6.ManaThief)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Opening Gambit: Take a mana crystal from your opponent.\nDying Wish: Give back the mana crystal.")
card.atk = 4
card.maxHP = 3
card.manaCost = 4
card.rarityId = Rarity.Epic
card.setInherentModifiersContextObjects([
ModifierOpeningGambitDestroyManaCrystal.createContextObject(false,1),
ModifierOpeningGambitBonusManaCrystal.createContextObject(true,1),
ModifierDyingWishDestroyManaCrystal.createContextObject(true,1),
ModifierDyingWishBonusManaCrystal.createContextObject(false,1)
])
card.setFXResource(["FX.Cards.Neutral.EXun"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_diretidefrenzy.audio
walk : RSX.sfx_neutral_komodocharger_hit.audio
attack : RSX.sfx_neutral_sunelemental_death.audio
receiveDamage : RSX.sfx_neutral_swornavenger_hit.audio
attackDamage : RSX.sfx_f2lanternfox_death.audio
death : RSX.sfx_neutral_daggerkiri_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6EvilWispBreathing.name
idle : RSX.f6EvilWispIdle.name
walk : RSX.f6EvilWispRun.name
attack : RSX.f6EvilWispAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6EvilWispHit.name
death : RSX.f6EvilWispDeath.name
)
if (identifier == Cards.Spell.EnterThunderdome)
card = new SpellSpawnEntitiesOnEdgeSpaces(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.EnterThunderdome
card.name = "Ice PI:NAME:<NAME>END_PI"
card.setDescription("Summon Blazing Spines along the outside of the battlefield.")
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.BlazingSpines}
card.manaCost = 8
card.rarityId = Rarity.Legendary
card.spellFilterType = SpellFilterType.None
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.IceAge"])
card.setBaseAnimResource(
idle : RSX.iconEnterIcedomeIdle.name
active : RSX.iconEnterIcedomeActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_ghostlightning.audio
)
if (identifier == Cards.Faction6.Rootmancer)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Intensify: Summon 1 Treant with Provoke nearby.")
card.atk = 1
card.maxHP = 1
card.manaCost = 2
card.rarityId = Rarity.Common
card.setInherentModifiersContextObjects([
ModifierIntensifySpawnEntitiesNearby.createContextObject({id: Cards.Faction6.Treant}, 1),
ModifierCounterIntensify.createContextObject()
])
card.addKeywordClassToInclude(ModifierProvoke)
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Amu"])
card.setBaseSoundResource(
apply : RSX.sfx_f6_voiceofthewind_attack_swing.audio
walk : RSX.sfx_spell_polymorph.audio
attack : RSX.sfx_neutral_amu_attack_swing.audio
receiveDamage : RSX.sfx_neutral_amu_hit.audio
attackDamage : RSX.sfx_neutral_amu_attack_impact.audio
death : RSX.sfx_neutral_amu_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6RootmancerBreathing.name
idle : RSX.f6RootmancerIdle.name
walk : RSX.f6RootmancerRun.name
attack : RSX.f6RootmancerAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.9
damage : RSX.f6RootmancerHit.name
death : RSX.f6RootmancerDeath.name
)
if (identifier == Cards.Faction6.SuperFenrir)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Dying Wish: Summon a Fenrir Warmaster on this space.")
card.atk = 4
card.maxHP = 3
card.manaCost = 5
card.rarityId = Rarity.Rare
card.setInherentModifiersContextObjects([ ModifierDyingWishSpawnEntity.createContextObject({id: Cards.Faction6.FenrirWarmaster}) ])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Neutral.Shuffler"])
card.setBaseSoundResource(
apply : RSX.sfx_summonlegendary.audio
walk : RSX.sfx_spell_icepillar_melt.audio
attack : RSX.sfx_neutral_windstopper_attack_impact.audio
receiveDamage : RSX.sfx_f6_icedryad_hit.audio
attackDamage : RSX.sfx_neutral_spelljammer_attack_impact.audio
death : RSX.sfx_neutral_windstopper_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6SuperFenrirBreathing.name
idle : RSX.f6SuperFenrirIdle.name
walk : RSX.f6SuperFenrirRun.name
attack : RSX.f6SuperFenrirAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6SuperFenrirHit.name
death : RSX.f6SuperFenrirDeath.name
)
if (identifier == Cards.Faction6.SnowWinkle)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("When this minion is summoned, Stun it.")
card.atk = 6
card.maxHP = 7
card.manaCost = 4
card.rarityId = Rarity.Common
card.raceId = Races.Vespyr
card.setInherentModifiersContextObjects([
ModifierEntersBattlefieldWatchApplyModifiers.createContextObject([ModifierStunnedVanar.createContextObject()])
])
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Neutral.DragoneboneGolem"])
card.setBoundingBoxWidth(95)
card.setBoundingBoxHeight(95)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_2.audio
walk : RSX.sfx_unit_physical_4.audio
attack : RSX.sfx_neutral_golemdragonbone_attack_swing.audio
receiveDamage : RSX.sfx_neutral_golemdragonbone_hit.audio
attackDamage : RSX.sfx_neutral_golemdragonbone_impact.audio
death : RSX.sfx_neutral_golemdragonbone_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6ElkodonBreathing.name
idle : RSX.f6ElkodonIdle.name
walk : RSX.f6ElkodonRun.name
attack : RSX.f6ElkodonAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6ElkodonHit.name
death : RSX.f6ElkodonDeath.name
)
if (identifier == Cards.Spell.IceCapsule)
card = new SpellApplyModifiersToExhaustedMinion(gameSession)
card.factionId = Factions.Faction6
card.setCardSetId(CardSet.Coreshatter)
card.id = Cards.Spell.IceCapsule
card.name = "Cryonic Potential"
card.setDescription("Give an exhausted friendly minion +6/+6.")
card.manaCost = 3
card.rarityId = Rarity.Common
card.spellFilterType = SpellFilterType.AllyDirect
card.canTargetGeneral = false
buffContextObject = Modifier.createContextObjectWithAttributeBuffs(6,6)
buffContextObject.appliedName = "Potential Realized"
card.setTargetModifiersContextObjects([
buffContextObject
])
card.setFXResource(["FX.Cards.Spell.CryonicPotential"])
card.setBaseAnimResource(
idle: RSX.iconIceCapsuleIdle.name
active: RSX.iconIceCapsuleActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_icepillar.audio
)
if (identifier == Cards.Spell.OwnSideTeleport)
card = new SpellIntensifyTeleportOwnSide(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.OwnSideTeleport
card.setCardSetId(CardSet.Coreshatter)
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Intensify: Teleport 1 random enemy minion to a space on your starting side of the battlefield.")
card.spellFilterType = SpellFilterType.None
card.manaCost = 1
card.rarityId = Rarity.Common
card.addKeywordClassToInclude(ModifierIntensify)
card.setInherentModifiersContextObjects([ModifierCounterIntensify.createContextObject()])
card._fxResource = ["FX.Cards.Spell.Wanderlust"]
card.setBaseAnimResource(
idle: RSX.iconIceHooksIdle.name
active: RSX.iconIceHooksActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_spell_tranquility.audio
)
if (identifier == Cards.Faction6.StunWarlock)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("When an enemy is Stunned, transform this minion into Yggdra's Voracity.")
card.atk = 2
card.maxHP = 3
card.manaCost = 3
card.rarityId = Rarity.Legendary
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchTransformThis.createContextObject({id: Cards.Faction6.StunBeast})
])
card.addKeywordClassToInclude(ModifierTokenCreator)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.AzureHornShaman"])
card.setBoundingBoxWidth(70)
card.setBoundingBoxHeight(105)
card.setBaseSoundResource(
apply : RSX.sfx_spell_fractalreplication.audio
walk : RSX.sfx_unit_run_magical_3.audio
attack : RSX.sfx_neutral_prophetofthewhite_attack_swing.audio
receiveDamage : RSX.sfx_neutral_alcuinloremaster_hit.audio
attackDamage : RSX.sfx_neutral_alcuinloremaster_attack_impact.audio
death : RSX.sfx_neutral_alcuinloremaster_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytWarlockBreathing.name
idle : RSX.f6YnuytWarlockIdle.name
walk : RSX.f6YnuytWarlockRun.name
attack : RSX.f6YnuytWarlockAttack.name
attackReleaseDelay: 0.0
attackDelay: 1.2
damage : RSX.f6YnuytWarlockHit.name
death : RSX.f6YnuytWarlockDeath.name
)
if (identifier == Cards.Faction6.StunBeast)
card = new Unit(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.setIsHiddenInCollection(true)
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Whenever an enemy is Stunned, fully heal this minion.")
card.atk = 5
card.maxHP = 5
card.manaCost = 3
card.rarityId = Rarity.TokenUnit
card.setInherentModifiersContextObjects([
ModifierEnemyStunWatchFullyHeal.createContextObject()
])
card.addKeywordClassToInclude(ModifierToken)
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Neutral.OwlbeastSage"])
card.setBoundingBoxWidth(85)
card.setBoundingBoxHeight(80)
card.setBaseSoundResource(
apply : RSX.sfx_unit_deploy_1.audio
walk : RSX.sfx_singe2.audio
attack : RSX.sfx_neutral_arcanelimiter_attack_impact.audio
receiveDamage : RSX.sfx_f4_engulfingshadow_attack_impact.audio
attackDamage : RSX.sfx_f4_engulfingshadow_hit.audio
death : RSX.sfx_f4_engulfingshadow_death.audio
)
card.setBaseAnimResource(
breathing : RSX.f6YnuytUnleashedBreathing.name
idle : RSX.f6YnuytUnleashedIdle.name
walk : RSX.f6YnuytUnleashedRun.name
attack : RSX.f6YnuytUnleashedAttack.name
attackReleaseDelay: 0.0
attackDelay: 0.5
damage : RSX.f6YnuytUnleashedHit.name
death : RSX.f6YnuytUnleashedDeath.name
)
if (identifier == Cards.Spell.InfiniteHowlers)
card = new SpellInfiniteHowlers(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.InfiniteHowlers
card.setCardSetId(CardSet.Coreshatter)
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Summon a 3/3 Vespyr Night Howler.\nPut an Endless Hunt into your action bar.")
card.manaCost = 4
card.spellFilterType = SpellFilterType.SpawnSource
card.rarityId = Rarity.Rare
card.cardDataOrIndexToSpawn = {id: Cards.Faction6.ShadowVespyr}
card.addKeywordClassToInclude(ModifierTokenCreator)
card.setFXResource(["FX.Cards.Spell.EndlessHunt"])
card.setBaseSoundResource(
apply : RSX.sfx_spell_entropicdecay.audio
)
card.setBaseAnimResource(
idle : RSX.iconInfiniteHowlersIdle.name
active : RSX.iconInfiniteHowlersActive.name
)
if (identifier == Cards.Artifact.SnowChipper)
card = new Artifact(gameSession)
card.setCardSetId(CardSet.Coreshatter)
card.factionId = Factions.Faction6
card.id = Cards.Artifact.SnowChipper
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Your General has +1 Attack.\nReactivate your General whenever they destroy a Stunned enemy.")
card.manaCost = 2
card.rarityId = Rarity.Epic
card.durability = 3
card.setTargetModifiersContextObjects([
Modifier.createContextObjectWithAttributeBuffs(1,undefined),
ModifierKillWatchRefreshExhaustionIfTargetStunned.createContextObject(false, true)
])
card.addKeywordClassToInclude(ModifierStunned)
card.setFXResource(["FX.Cards.Artifact.OblivionSickle"])
card.setBaseAnimResource(
idle: RSX.iconSnowchipperIdle.name
active: RSX.iconSnowchipperActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_victory_crest.audio
)
if (identifier == Cards.Spell.AspectOfIdentity)
card = new SpellTransformSameManaCost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.AspectOfIdentity
card.setCardSetId(CardSet.Coreshatter)
card.name = "Aspect of Ego"
card.setDescription("Transform ANY minion into a random minion of the same cost.")
card.manaCost = 1
card.spellFilterType = SpellFilterType.NeutralDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Rare
card.setFXResource(["FX.Cards.Spell.AspectOfEgo"])
card.setBaseAnimResource(
idle : RSX.iconAspectIdentityIdle.name
active : RSX.iconAspectIdentityActive.name
)
card.setBaseSoundResource(
apply : RSX.sfx_neutral_crossbones_hit.audio
)
if (identifier == Cards.Spell.CreepingFrost)
card = new SpellCreepingFrost(gameSession)
card.factionId = Factions.Faction6
card.id = Cards.Spell.CreepingFrost
card.setCardSetId(CardSet.Coreshatter)
card.name = "PI:NAME:<NAME>END_PI"
card.setDescription("Stun an enemy minion.\nStunned enemy minions Stun a nearby enemy.")
card.manaCost = 3
card.spellFilterType = SpellFilterType.EnemyDirect
card.canTargetGeneral = false
card.rarityId = Rarity.Epic
card.addKeywordClassToInclude(ModifierStunned)
card.addKeywordClassToInclude(ModifierStun)
card.setFXResource(["FX.Cards.Spell.Permafrost"])
card.setBaseSoundResource(
apply : RSX.sfx_neutral_windstopper_attack_impact.audio
)
card.setBaseAnimResource(
idle : RSX.iconPermaFrostIdle.name
active : RSX.iconPermaFrostActive.name
)
return card
module.exports = CardFactory_CoreshatterSet_Faction6
|
[
{
"context": "ason_from_value(@file_path)\n\n key_match = /tree\\.(\\S+)\\.(\\S+)\\.bmp/.exec(path.basename(@file_path))\n if k",
"end": 592,
"score": 0.6010903716087341,
"start": 585,
"tag": "KEY",
"value": "S+)\\.(\\"
},
{
"context": "lue(@file_path)\n\n key_match = /tree\\.(\\S+)\\.(\\S+)\\.bmp/.exec(path.basename(@file_path))\n if key_match\n ",
"end": 606,
"score": 0.7064823508262634,
"start": 597,
"tag": "KEY",
"value": "bmp/.exec"
}
] | src/land/tree/tree-texture.coffee | starpeace-project/starpeace-website-client-assets | 0 | _ = require('lodash')
path = require('path')
crypto = require('crypto')
Jimp = require('jimp')
LandAttributes = require('../land-attributes')
Texture = require('../../common/texture')
ConsoleProgressUpdater = require('../../utils/console-progress-updater')
FileUtils = require('../../utils/file-utils')
module.exports = class TreeTexture extends Texture
constructor: (@directory, @file_path, image) ->
super(null, image)
@planet_type = LandAttributes.planet_type_from_value(@file_path)
@season = LandAttributes.season_from_value(@file_path)
key_match = /tree\.(\S+)\.(\S+)\.bmp/.exec(path.basename(@file_path))
if key_match
@variant = key_match[2]
@zone = LandAttributes.zone_from_value(key_match[1])
else
@variant = Number.NaN
@zone = LandAttributes.ZONES.other
ideal_file_name: () -> "tree.#{@zone}.#{@variant.toString().padStart(2, '0')}.bmp"
key_for_spritesheet: () -> "#{@season}.#{@zone}.#{@variant.toString().padStart(2, '0')}"
filter_mode: () -> { blue: true, white: true, grey: true }
@load: (land_dir) ->
console.log "loading tree textures from #{land_dir}\n"
image_file_paths = _.filter(FileUtils.read_all_files_sync(land_dir), (file_path) -> path.basename(file_path).startsWith('tree') && file_path.endsWith('.bmp'))
progress = new ConsoleProgressUpdater(image_file_paths.length)
images = await Promise.all(_.map(image_file_paths, (file_path) ->
img = Jimp.read(file_path)
progress.next()
img
))
_.map(_.zip(image_file_paths, images), (pair) -> new TreeTexture(land_dir, pair[0].substring(land_dir.length + 1), pair[1]))
| 67132 | _ = require('lodash')
path = require('path')
crypto = require('crypto')
Jimp = require('jimp')
LandAttributes = require('../land-attributes')
Texture = require('../../common/texture')
ConsoleProgressUpdater = require('../../utils/console-progress-updater')
FileUtils = require('../../utils/file-utils')
module.exports = class TreeTexture extends Texture
constructor: (@directory, @file_path, image) ->
super(null, image)
@planet_type = LandAttributes.planet_type_from_value(@file_path)
@season = LandAttributes.season_from_value(@file_path)
key_match = /tree\.(\<KEY>S+)\.<KEY>(path.basename(@file_path))
if key_match
@variant = key_match[2]
@zone = LandAttributes.zone_from_value(key_match[1])
else
@variant = Number.NaN
@zone = LandAttributes.ZONES.other
ideal_file_name: () -> "tree.#{@zone}.#{@variant.toString().padStart(2, '0')}.bmp"
key_for_spritesheet: () -> "#{@season}.#{@zone}.#{@variant.toString().padStart(2, '0')}"
filter_mode: () -> { blue: true, white: true, grey: true }
@load: (land_dir) ->
console.log "loading tree textures from #{land_dir}\n"
image_file_paths = _.filter(FileUtils.read_all_files_sync(land_dir), (file_path) -> path.basename(file_path).startsWith('tree') && file_path.endsWith('.bmp'))
progress = new ConsoleProgressUpdater(image_file_paths.length)
images = await Promise.all(_.map(image_file_paths, (file_path) ->
img = Jimp.read(file_path)
progress.next()
img
))
_.map(_.zip(image_file_paths, images), (pair) -> new TreeTexture(land_dir, pair[0].substring(land_dir.length + 1), pair[1]))
| true | _ = require('lodash')
path = require('path')
crypto = require('crypto')
Jimp = require('jimp')
LandAttributes = require('../land-attributes')
Texture = require('../../common/texture')
ConsoleProgressUpdater = require('../../utils/console-progress-updater')
FileUtils = require('../../utils/file-utils')
module.exports = class TreeTexture extends Texture
constructor: (@directory, @file_path, image) ->
super(null, image)
@planet_type = LandAttributes.planet_type_from_value(@file_path)
@season = LandAttributes.season_from_value(@file_path)
key_match = /tree\.(\PI:KEY:<KEY>END_PIS+)\.PI:KEY:<KEY>END_PI(path.basename(@file_path))
if key_match
@variant = key_match[2]
@zone = LandAttributes.zone_from_value(key_match[1])
else
@variant = Number.NaN
@zone = LandAttributes.ZONES.other
ideal_file_name: () -> "tree.#{@zone}.#{@variant.toString().padStart(2, '0')}.bmp"
key_for_spritesheet: () -> "#{@season}.#{@zone}.#{@variant.toString().padStart(2, '0')}"
filter_mode: () -> { blue: true, white: true, grey: true }
@load: (land_dir) ->
console.log "loading tree textures from #{land_dir}\n"
image_file_paths = _.filter(FileUtils.read_all_files_sync(land_dir), (file_path) -> path.basename(file_path).startsWith('tree') && file_path.endsWith('.bmp'))
progress = new ConsoleProgressUpdater(image_file_paths.length)
images = await Promise.all(_.map(image_file_paths, (file_path) ->
img = Jimp.read(file_path)
progress.next()
img
))
_.map(_.zip(image_file_paths, images), (pair) -> new TreeTexture(land_dir, pair[0].substring(land_dir.length + 1), pair[1]))
|
[
{
"context": "et: (params)->\n cookie = new Cookie(key: params.name, value: params.value, domain: params.domain || \"l",
"end": 1669,
"score": 0.9579185843467712,
"start": 1665,
"tag": "KEY",
"value": "name"
}
] | src/zombie/cookies.coffee | scoky/zombie | 0 | # See [RFC 2109](http://tools.ietf.org/html/rfc2109.html) and
# [document.cookie](http://developer.mozilla.org/en/document.cookie)
assert = require("assert")
HTML = require("jsdom").defaultLevel
{ isArray } = require("util")
Tough = require("tough-cookie")
Cookie = Tough.Cookie
# Lists all available cookies.
module.exports = class Cookies extends Array
constructor: ->
# Used to dump state to console (debugging)
dump: ->
for cookie in @sort(Tough.cookieCompare)
process.stdout.write cookie.toString() + "\n"
# Serializes all selected cookies into a single string. Used to generate a cookies header.
#
# domain - Request hostname
# path - Request pathname
serialize: (domain, path)->
return @select(domain: domain, path: path)
.map((cookie)-> cookie.cookieString()).join("; ")
# Returns all cookies that match the identifier (name, domain and path).
# This is used for retrieving cookies.
select: (identifier)->
cookies = @filter((cookie)-> cookie.TTL() > 0)
if identifier.name
cookies = cookies.filter((cookie)-> cookie.key == identifier.name)
if identifier.path
cookies = cookies.filter((cookie)-> Tough.pathMatch(identifier.path, cookie.path))
if identifier.domain
cookies = cookies.filter((cookie)-> Tough.domainMatch(identifier.domain, cookie.domain))
return cookies
.sort((a, b)-> return (b.domain.length - a.domain.length))
.sort(Tough.cookieCompare)
# Adds a new cookie, updates existing cookie (same name, domain and path), or
# deletes a cookie (if expires in the past).
set: (params)->
cookie = new Cookie(key: params.name, value: params.value, domain: params.domain || "localhost", path: params.path || "/")
if params.expires
cookie.setExpires(params.expires)
else if params.hasOwnProperty("max-age")
cookie.setMaxAge(params["max-age"])
cookie.secure = !!params.secure
cookie.httpOnly = !!params.httpOnly
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Delete the specified cookie.
delete: (cookie)->
index = @indexOf(cookie)
if ~index
@splice(index, 1)
# Deletes all cookies.
deleteAll: ->
@length = 0
# Update cookies with HTTP response
#
# httpHeader - Value of HTTP Set-Cookie header (string/array)
# domain - Set from hostname
# path - Set from pathname
update: (httpHeader, domain, path)->
# One Set-Cookie is a string, multiple is an array
cookies = if isArray(httpHeader) then httpHeader else [httpHeader]
for cookie in cookies
cookie = Cookie.parse(cookie)
if cookie
cookie.domain ||= domain
cookie.path ||= Tough.defaultPath(path)
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Returns name=value pairs
HTML.HTMLDocument.prototype.__defineGetter__ "cookie", ->
return @window.browser.cookies.select(domain: @location.hostname, path: @location.pathname)
.filter((cookie)-> !cookie.httpOnly)
.map((cookie)-> "#{cookie.key}=#{cookie.value}")
.join("; ")
# Accepts serialized form (same as Set-Cookie header) and updates cookie from
# new values.
HTML.HTMLDocument.prototype.__defineSetter__ "cookie", (cookie)->
@window.browser.cookies.update(cookie.toString(), @location.hostname, @location.pathname)
| 59023 | # See [RFC 2109](http://tools.ietf.org/html/rfc2109.html) and
# [document.cookie](http://developer.mozilla.org/en/document.cookie)
assert = require("assert")
HTML = require("jsdom").defaultLevel
{ isArray } = require("util")
Tough = require("tough-cookie")
Cookie = Tough.Cookie
# Lists all available cookies.
module.exports = class Cookies extends Array
constructor: ->
# Used to dump state to console (debugging)
dump: ->
for cookie in @sort(Tough.cookieCompare)
process.stdout.write cookie.toString() + "\n"
# Serializes all selected cookies into a single string. Used to generate a cookies header.
#
# domain - Request hostname
# path - Request pathname
serialize: (domain, path)->
return @select(domain: domain, path: path)
.map((cookie)-> cookie.cookieString()).join("; ")
# Returns all cookies that match the identifier (name, domain and path).
# This is used for retrieving cookies.
select: (identifier)->
cookies = @filter((cookie)-> cookie.TTL() > 0)
if identifier.name
cookies = cookies.filter((cookie)-> cookie.key == identifier.name)
if identifier.path
cookies = cookies.filter((cookie)-> Tough.pathMatch(identifier.path, cookie.path))
if identifier.domain
cookies = cookies.filter((cookie)-> Tough.domainMatch(identifier.domain, cookie.domain))
return cookies
.sort((a, b)-> return (b.domain.length - a.domain.length))
.sort(Tough.cookieCompare)
# Adds a new cookie, updates existing cookie (same name, domain and path), or
# deletes a cookie (if expires in the past).
set: (params)->
cookie = new Cookie(key: params.<KEY>, value: params.value, domain: params.domain || "localhost", path: params.path || "/")
if params.expires
cookie.setExpires(params.expires)
else if params.hasOwnProperty("max-age")
cookie.setMaxAge(params["max-age"])
cookie.secure = !!params.secure
cookie.httpOnly = !!params.httpOnly
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Delete the specified cookie.
delete: (cookie)->
index = @indexOf(cookie)
if ~index
@splice(index, 1)
# Deletes all cookies.
deleteAll: ->
@length = 0
# Update cookies with HTTP response
#
# httpHeader - Value of HTTP Set-Cookie header (string/array)
# domain - Set from hostname
# path - Set from pathname
update: (httpHeader, domain, path)->
# One Set-Cookie is a string, multiple is an array
cookies = if isArray(httpHeader) then httpHeader else [httpHeader]
for cookie in cookies
cookie = Cookie.parse(cookie)
if cookie
cookie.domain ||= domain
cookie.path ||= Tough.defaultPath(path)
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Returns name=value pairs
HTML.HTMLDocument.prototype.__defineGetter__ "cookie", ->
return @window.browser.cookies.select(domain: @location.hostname, path: @location.pathname)
.filter((cookie)-> !cookie.httpOnly)
.map((cookie)-> "#{cookie.key}=#{cookie.value}")
.join("; ")
# Accepts serialized form (same as Set-Cookie header) and updates cookie from
# new values.
HTML.HTMLDocument.prototype.__defineSetter__ "cookie", (cookie)->
@window.browser.cookies.update(cookie.toString(), @location.hostname, @location.pathname)
| true | # See [RFC 2109](http://tools.ietf.org/html/rfc2109.html) and
# [document.cookie](http://developer.mozilla.org/en/document.cookie)
assert = require("assert")
HTML = require("jsdom").defaultLevel
{ isArray } = require("util")
Tough = require("tough-cookie")
Cookie = Tough.Cookie
# Lists all available cookies.
module.exports = class Cookies extends Array
constructor: ->
# Used to dump state to console (debugging)
dump: ->
for cookie in @sort(Tough.cookieCompare)
process.stdout.write cookie.toString() + "\n"
# Serializes all selected cookies into a single string. Used to generate a cookies header.
#
# domain - Request hostname
# path - Request pathname
serialize: (domain, path)->
return @select(domain: domain, path: path)
.map((cookie)-> cookie.cookieString()).join("; ")
# Returns all cookies that match the identifier (name, domain and path).
# This is used for retrieving cookies.
select: (identifier)->
cookies = @filter((cookie)-> cookie.TTL() > 0)
if identifier.name
cookies = cookies.filter((cookie)-> cookie.key == identifier.name)
if identifier.path
cookies = cookies.filter((cookie)-> Tough.pathMatch(identifier.path, cookie.path))
if identifier.domain
cookies = cookies.filter((cookie)-> Tough.domainMatch(identifier.domain, cookie.domain))
return cookies
.sort((a, b)-> return (b.domain.length - a.domain.length))
.sort(Tough.cookieCompare)
# Adds a new cookie, updates existing cookie (same name, domain and path), or
# deletes a cookie (if expires in the past).
set: (params)->
cookie = new Cookie(key: params.PI:KEY:<KEY>END_PI, value: params.value, domain: params.domain || "localhost", path: params.path || "/")
if params.expires
cookie.setExpires(params.expires)
else if params.hasOwnProperty("max-age")
cookie.setMaxAge(params["max-age"])
cookie.secure = !!params.secure
cookie.httpOnly = !!params.httpOnly
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Delete the specified cookie.
delete: (cookie)->
index = @indexOf(cookie)
if ~index
@splice(index, 1)
# Deletes all cookies.
deleteAll: ->
@length = 0
# Update cookies with HTTP response
#
# httpHeader - Value of HTTP Set-Cookie header (string/array)
# domain - Set from hostname
# path - Set from pathname
update: (httpHeader, domain, path)->
# One Set-Cookie is a string, multiple is an array
cookies = if isArray(httpHeader) then httpHeader else [httpHeader]
for cookie in cookies
cookie = Cookie.parse(cookie)
if cookie
cookie.domain ||= domain
cookie.path ||= Tough.defaultPath(path)
# Delete cookie before setting it, so we only store one cookie (per
# domain/path/name)
deleteIfExists = @filter((c)-> c.key == cookie.key && c.domain == cookie.domain && c.path == cookie.path)[0]
@delete(deleteIfExists)
if cookie.TTL() > 0
@push(cookie)
return
# Returns name=value pairs
HTML.HTMLDocument.prototype.__defineGetter__ "cookie", ->
return @window.browser.cookies.select(domain: @location.hostname, path: @location.pathname)
.filter((cookie)-> !cookie.httpOnly)
.map((cookie)-> "#{cookie.key}=#{cookie.value}")
.join("; ")
# Accepts serialized form (same as Set-Cookie header) and updates cookie from
# new values.
HTML.HTMLDocument.prototype.__defineSetter__ "cookie", (cookie)->
@window.browser.cookies.update(cookie.toString(), @location.hostname, @location.pathname)
|
[
{
"context": "\nGulp task sass compile\n@create 2014-10-07\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nmodule.exports = (gulp, plug",
"end": 66,
"score": 0.9998921751976013,
"start": 54,
"tag": "NAME",
"value": "KoutarouYabe"
},
{
"context": " compile\n@create 2014-10-07\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nmodule.exports = (gulp, plugins)->\n gulp.t",
"end": 81,
"score": 0.9999326467514038,
"start": 68,
"tag": "EMAIL",
"value": "idolm@ster.pw"
}
] | tasks/config/sass.coffee | moorvin/Sea-Fight | 1 | ###
Gulp task sass compile
@create 2014-10-07
@author KoutarouYabe <idolm@ster.pw>
###
module.exports = (gulp, plugins)->
gulp.task "sass", ->
gulp.src [
"assets/css/**/*.sass",
"assets/css/**/*.scss"
]
.pipe plugins.plumber()
.pipe plugins.sass bare: true
.on 'error', plugins.error
.pipe gulp.dest plugins.config.destPath + "css/"
| 95040 | ###
Gulp task sass compile
@create 2014-10-07
@author <NAME> <<EMAIL>>
###
module.exports = (gulp, plugins)->
gulp.task "sass", ->
gulp.src [
"assets/css/**/*.sass",
"assets/css/**/*.scss"
]
.pipe plugins.plumber()
.pipe plugins.sass bare: true
.on 'error', plugins.error
.pipe gulp.dest plugins.config.destPath + "css/"
| true | ###
Gulp task sass compile
@create 2014-10-07
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
module.exports = (gulp, plugins)->
gulp.task "sass", ->
gulp.src [
"assets/css/**/*.sass",
"assets/css/**/*.scss"
]
.pipe plugins.plumber()
.pipe plugins.sass bare: true
.on 'error', plugins.error
.pipe gulp.dest plugins.config.destPath + "css/"
|
[
{
"context": "io/v3/d7abd15cee994b058394679cfe2eff7a\"\n\napiKey: \"zcrttestnet\"\n\ninvoices:\n minConfirmations: 1\n\npublic:\n cont",
"end": 128,
"score": 0.9994784593582153,
"start": 117,
"tag": "KEY",
"value": "zcrttestnet"
}
] | config/default.cson | mosqueiro/zcrtpay | 0 | listen: 443
provider:
type: "rpc"
uri: "https://ropsten.infura.io/v3/d7abd15cee994b058394679cfe2eff7a"
apiKey: "zcrttestnet"
invoices:
minConfirmations: 1
public:
contracts:
ZCRT:
address: "0x28858bb770de1e7689045117ff99c286ed24ea55" | 21901 | listen: 443
provider:
type: "rpc"
uri: "https://ropsten.infura.io/v3/d7abd15cee994b058394679cfe2eff7a"
apiKey: "<KEY>"
invoices:
minConfirmations: 1
public:
contracts:
ZCRT:
address: "0x28858bb770de1e7689045117ff99c286ed24ea55" | true | listen: 443
provider:
type: "rpc"
uri: "https://ropsten.infura.io/v3/d7abd15cee994b058394679cfe2eff7a"
apiKey: "PI:KEY:<KEY>END_PI"
invoices:
minConfirmations: 1
public:
contracts:
ZCRT:
address: "0x28858bb770de1e7689045117ff99c286ed24ea55" |
[
{
"context": "gth\n\n message =\n body: content\n authorName: authorName\n attachments: [\n category: 'quote'\n ",
"end": 401,
"score": 0.7541109919548035,
"start": 395,
"tag": "NAME",
"value": "author"
},
{
"context": "/icons/swathub@2x.png'\n\n @_fields.push\n key: 'webhookUrl'\n type: 'text'\n readOnly: true\n descr",
"end": 1173,
"score": 0.6981943845748901,
"start": 1166,
"tag": "KEY",
"value": "webhook"
}
] | src/services/swathub.coffee | jianliaoim/talk-services | 40 | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({query, body}) ->
payload = _.assign {}
, query or {}
, body or {}
{
content
authorName
title
text
redirectUrl
imageUrl
} = payload
throw new Error("Title and text can not be empty") unless title?.length or text?.length or content?.length
message =
body: content
authorName: authorName
attachments: [
category: 'quote'
data:
title: title
text: text
redirectUrl: redirectUrl
imageUrl: imageUrl
]
message
module.exports = ->
@title = 'SWATHub'
@template = 'webhook'
@summary = util.i18n
zh: '简单、高效的云端自动化测试平台。'
en: 'Simple and Efficient Test Automation on Cloud'
@description = util.i18n
zh: '无需学习任何编程语言,SWATHub让你在云端快速创建和实施自动化测试。添加SWATHub聚合服务之后,你可以在简聊中收取自动化测试的状态信息和结果报告。'
en: 'SWATHub enables building automated test scenarios on cloud in a code-less way. You can receive the test automation status messages, and execution reports in Talk.ai, by means of this SWATHub integration.'
@iconUrl = util.static 'images/icons/swathub@2x.png'
@_fields.push
key: 'webhookUrl'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook url'
en: 'Webhook url'
@registerEvent 'service.webhook', _receiveWebhook
| 42239 | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({query, body}) ->
payload = _.assign {}
, query or {}
, body or {}
{
content
authorName
title
text
redirectUrl
imageUrl
} = payload
throw new Error("Title and text can not be empty") unless title?.length or text?.length or content?.length
message =
body: content
authorName: <NAME>Name
attachments: [
category: 'quote'
data:
title: title
text: text
redirectUrl: redirectUrl
imageUrl: imageUrl
]
message
module.exports = ->
@title = 'SWATHub'
@template = 'webhook'
@summary = util.i18n
zh: '简单、高效的云端自动化测试平台。'
en: 'Simple and Efficient Test Automation on Cloud'
@description = util.i18n
zh: '无需学习任何编程语言,SWATHub让你在云端快速创建和实施自动化测试。添加SWATHub聚合服务之后,你可以在简聊中收取自动化测试的状态信息和结果报告。'
en: 'SWATHub enables building automated test scenarios on cloud in a code-less way. You can receive the test automation status messages, and execution reports in Talk.ai, by means of this SWATHub integration.'
@iconUrl = util.static 'images/icons/swathub@2x.png'
@_fields.push
key: '<KEY>Url'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook url'
en: 'Webhook url'
@registerEvent 'service.webhook', _receiveWebhook
| true | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({query, body}) ->
payload = _.assign {}
, query or {}
, body or {}
{
content
authorName
title
text
redirectUrl
imageUrl
} = payload
throw new Error("Title and text can not be empty") unless title?.length or text?.length or content?.length
message =
body: content
authorName: PI:NAME:<NAME>END_PIName
attachments: [
category: 'quote'
data:
title: title
text: text
redirectUrl: redirectUrl
imageUrl: imageUrl
]
message
module.exports = ->
@title = 'SWATHub'
@template = 'webhook'
@summary = util.i18n
zh: '简单、高效的云端自动化测试平台。'
en: 'Simple and Efficient Test Automation on Cloud'
@description = util.i18n
zh: '无需学习任何编程语言,SWATHub让你在云端快速创建和实施自动化测试。添加SWATHub聚合服务之后,你可以在简聊中收取自动化测试的状态信息和结果报告。'
en: 'SWATHub enables building automated test scenarios on cloud in a code-less way. You can receive the test automation status messages, and execution reports in Talk.ai, by means of this SWATHub integration.'
@iconUrl = util.static 'images/icons/swathub@2x.png'
@_fields.push
key: 'PI:KEY:<KEY>END_PIUrl'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook url'
en: 'Webhook url'
@registerEvent 'service.webhook', _receiveWebhook
|
[
{
"context": "### ^\nBSD 3-Clause License\n\nCopyright (c) 2017, Stephan Jorek\nAll rights reserved.\n\nRedistribution and use in s",
"end": 61,
"score": 0.9998365640640259,
"start": 48,
"tag": "NAME",
"value": "Stephan Jorek"
}
] | src/Expressions.coffee | sjorek/goatee-rules.js | 0 | ### ^
BSD 3-Clause License
Copyright (c) 2017, Stephan Jorek
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
###
# # ExpressionMap …
# ---------------
#
# ExpressionMaps look like “attribute-key: expression; another-key: expression”.
# They provide a implementation of normalized to dash-seperated RuleMap.
#
###
###*
# -------------
# A helping wrapper …
#
# @function _create
# @param {Function} ParentClass
# @private
###
_create = (ParentClass) ->
###*
# -------------
# @class ExpressionMap
# @namespace GoateeRules
###
class ExpressionMap extends ParentClass
# lazy reference to **Parser.parse**
parse = null
###*
# -------------
# Compatibillity layer for expressions
#
# @property operator
# @type {Object}
###
operator:
name: 'rules'
###*
# -------------
# Parses the given string and applies the resulting map to this map, taking
# priorities into consideration.
#
# @method apply
# @param {String} string
# @return {RuleMap}
###
apply: (string) ->
# Delayed require to allow circular dependency during parser creation
parse ?= require('./Parser').parse
@inject parse(string, this)
###*
# -------------
# @method normalizeValue
# @param {Expression} expression
# @return {Expression}
###
normalizeValue: (expression) ->
expression
###*
# -------------
# @method toJSON
# @param {Function} callback (optional)
# @return {Array}
###
toJSON: (callback) ->
return callback this if callback
@flatten()
###*
# -------------
# @method callback
# @param {Function} callback (optional)
# @return {ExpressionMap}
###
callback: (callback) ->
@each (key, expression, important) ->
expression.callback(callback)
###*
# -------------
# @method evaluate
# @param {Object} context (optional)
# @param {Object} variables (optional)
# @param {Array} scope (optional)
# @param {Array} stack (optional)
# @return {Object}
###
evaluate: (context={}, variables={}, scope, stack) ->
rules = {}
@map (key, expression, important) ->
rules[key] = expression.evaluate(context, variables, scope, stack)
rules
ExpressionMap
module.exports = _create require('./Ordered/PropertyMap')
# Map all possible implementations for development purposes
for _kind in ['Attribute','Property', 'Rule']
module.exports["Ordered#{_kind}Expressions"] = \
_create require("./Ordered/#{_kind}Map")
module.exports["Unordered#{_kind}Expressions"] = \
_create require("./Unordered/#{_kind}Map")
| 153107 | ### ^
BSD 3-Clause License
Copyright (c) 2017, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
###
# # ExpressionMap …
# ---------------
#
# ExpressionMaps look like “attribute-key: expression; another-key: expression”.
# They provide a implementation of normalized to dash-seperated RuleMap.
#
###
###*
# -------------
# A helping wrapper …
#
# @function _create
# @param {Function} ParentClass
# @private
###
_create = (ParentClass) ->
###*
# -------------
# @class ExpressionMap
# @namespace GoateeRules
###
class ExpressionMap extends ParentClass
# lazy reference to **Parser.parse**
parse = null
###*
# -------------
# Compatibillity layer for expressions
#
# @property operator
# @type {Object}
###
operator:
name: 'rules'
###*
# -------------
# Parses the given string and applies the resulting map to this map, taking
# priorities into consideration.
#
# @method apply
# @param {String} string
# @return {RuleMap}
###
apply: (string) ->
# Delayed require to allow circular dependency during parser creation
parse ?= require('./Parser').parse
@inject parse(string, this)
###*
# -------------
# @method normalizeValue
# @param {Expression} expression
# @return {Expression}
###
normalizeValue: (expression) ->
expression
###*
# -------------
# @method toJSON
# @param {Function} callback (optional)
# @return {Array}
###
toJSON: (callback) ->
return callback this if callback
@flatten()
###*
# -------------
# @method callback
# @param {Function} callback (optional)
# @return {ExpressionMap}
###
callback: (callback) ->
@each (key, expression, important) ->
expression.callback(callback)
###*
# -------------
# @method evaluate
# @param {Object} context (optional)
# @param {Object} variables (optional)
# @param {Array} scope (optional)
# @param {Array} stack (optional)
# @return {Object}
###
evaluate: (context={}, variables={}, scope, stack) ->
rules = {}
@map (key, expression, important) ->
rules[key] = expression.evaluate(context, variables, scope, stack)
rules
ExpressionMap
module.exports = _create require('./Ordered/PropertyMap')
# Map all possible implementations for development purposes
for _kind in ['Attribute','Property', 'Rule']
module.exports["Ordered#{_kind}Expressions"] = \
_create require("./Ordered/#{_kind}Map")
module.exports["Unordered#{_kind}Expressions"] = \
_create require("./Unordered/#{_kind}Map")
| true | ### ^
BSD 3-Clause License
Copyright (c) 2017, PI:NAME:<NAME>END_PI
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
###
# # ExpressionMap …
# ---------------
#
# ExpressionMaps look like “attribute-key: expression; another-key: expression”.
# They provide a implementation of normalized to dash-seperated RuleMap.
#
###
###*
# -------------
# A helping wrapper …
#
# @function _create
# @param {Function} ParentClass
# @private
###
_create = (ParentClass) ->
###*
# -------------
# @class ExpressionMap
# @namespace GoateeRules
###
class ExpressionMap extends ParentClass
# lazy reference to **Parser.parse**
parse = null
###*
# -------------
# Compatibillity layer for expressions
#
# @property operator
# @type {Object}
###
operator:
name: 'rules'
###*
# -------------
# Parses the given string and applies the resulting map to this map, taking
# priorities into consideration.
#
# @method apply
# @param {String} string
# @return {RuleMap}
###
apply: (string) ->
# Delayed require to allow circular dependency during parser creation
parse ?= require('./Parser').parse
@inject parse(string, this)
###*
# -------------
# @method normalizeValue
# @param {Expression} expression
# @return {Expression}
###
normalizeValue: (expression) ->
expression
###*
# -------------
# @method toJSON
# @param {Function} callback (optional)
# @return {Array}
###
toJSON: (callback) ->
return callback this if callback
@flatten()
###*
# -------------
# @method callback
# @param {Function} callback (optional)
# @return {ExpressionMap}
###
callback: (callback) ->
@each (key, expression, important) ->
expression.callback(callback)
###*
# -------------
# @method evaluate
# @param {Object} context (optional)
# @param {Object} variables (optional)
# @param {Array} scope (optional)
# @param {Array} stack (optional)
# @return {Object}
###
evaluate: (context={}, variables={}, scope, stack) ->
rules = {}
@map (key, expression, important) ->
rules[key] = expression.evaluate(context, variables, scope, stack)
rules
ExpressionMap
module.exports = _create require('./Ordered/PropertyMap')
# Map all possible implementations for development purposes
for _kind in ['Attribute','Property', 'Rule']
module.exports["Ordered#{_kind}Expressions"] = \
_create require("./Ordered/#{_kind}Map")
module.exports["Unordered#{_kind}Expressions"] = \
_create require("./Unordered/#{_kind}Map")
|
[
{
"context": "nse_type') is -1\n # https://github.com/OAButton/discussion/issues/1793\n #sub = 'https:",
"end": 6356,
"score": 0.9994385838508606,
"start": 6348,
"tag": "USERNAME",
"value": "OAButton"
},
{
"context": "graphy+and+Recording+beyond+the+Phonograph&author=Pierce%2C+J+Mackenzie&issn=01482076&title=Nineteenth+Cen",
"end": 16963,
"score": 0.9998128414154053,
"start": 16957,
"tag": "NAME",
"value": "Pierce"
},
{
"context": "+Recording+beyond+the+Phonograph&author=Pierce%2C+J+Mackenzie&issn=01482076&title=Nineteenth+Century+Music&volu",
"end": 16978,
"score": 0.9333859086036682,
"start": 16967,
"tag": "NAME",
"value": "J+Mackenzie"
},
{
"context": " service: 'openaccessbutton',\n from: 'natalia.norori@openaccessbutton.org',\n to: ['joe@righttoresearch.org','s.barro",
"end": 21471,
"score": 0.999929666519165,
"start": 21436,
"tag": "EMAIL",
"value": "natalia.norori@openaccessbutton.org"
},
{
"context": "talia.norori@openaccessbutton.org',\n to: ['joe@righttoresearch.org','s.barron@imperial.ac.uk'],\n subject: 'EX",
"end": 21511,
"score": 0.9999297261238098,
"start": 21488,
"tag": "EMAIL",
"value": "joe@righttoresearch.org"
},
{
"context": "ton.org',\n to: ['joe@righttoresearch.org','s.barron@imperial.ac.uk'],\n subject: 'EXAMPLE ILL TRIGGER',\n ",
"end": 21537,
"score": 0.9999300837516785,
"start": 21514,
"tag": "EMAIL",
"value": "s.barron@imperial.ac.uk"
},
{
"context": "antill_create.html'}, to: eml, from: \"InstantILL <InstantILL@openaccessbutton.org>\", subject: \"ILL request \" + vars.illid})\n\n ",
"end": 25379,
"score": 0.9999291896820068,
"start": 25348,
"tag": "EMAIL",
"value": "InstantILL@openaccessbutton.org"
},
{
"context": " \" + vars.illid})\n\n # send msg to mark and joe for testing (can be removed later)\n txt = va",
"end": 25455,
"score": 0.6054789423942566,
"start": 25454,
"tag": "NAME",
"value": "e"
},
{
"context": "e: 'openaccessbutton',\n from: 'InstantILL <InstantILL@openaccessbutton.org>',\n to: ['mark@cottagelabs.com','joe@right",
"end": 25718,
"score": 0.9999318718910217,
"start": 25687,
"tag": "EMAIL",
"value": "InstantILL@openaccessbutton.org"
},
{
"context": "<InstantILL@openaccessbutton.org>',\n to: ['mark@cottagelabs.com','joe@righttoresearch.org'],\n subject: 'IL",
"end": 25756,
"score": 0.9999315142631531,
"start": 25736,
"tag": "EMAIL",
"value": "mark@cottagelabs.com"
},
{
"context": "utton.org>',\n to: ['mark@cottagelabs.com','joe@righttoresearch.org'],\n subject: 'ILL CREATED',\n html: ",
"end": 25782,
"score": 0.9999290108680725,
"start": 25759,
"tag": "EMAIL",
"value": "joe@righttoresearch.org"
},
{
"context": " # need to set a config on live for the IUPUI user ajrfnwswdr4my8kgd\n # the URL params they need are like\n # https:/",
"end": 26059,
"score": 0.9812943935394287,
"start": 26042,
"tag": "USERNAME",
"value": "ajrfnwswdr4my8kgd"
},
{
"context": " 'aulast' # this is what iupui needs\n author: 'aulast' # author should actually be au, but aulast works",
"end": 30863,
"score": 0.999481201171875,
"start": 30857,
"tag": "USERNAME",
"value": "aulast"
}
] | noddy/service/v2/ill.coffee | oaworks/API | 2 |
import crypto from 'crypto'
import { Random } from 'meteor/random'
API.add 'service/oab/subscription',
get:
#roleRequired: 'openaccessbutton.user'
authOptional: true
action: () ->
if this.user
uid = this.user._id
else if this.queryParams.uid
uid = this.queryParams.uid
delete this.queryParams.uid
#if this.queryParams.uid and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
# uid = this.queryParams.uid
# delete this.queryParams.uid
if not uid? or _.isEmpty this.queryParams
return {}
else
res = {metadata: API.service.oab.metadata this.queryParams}
res.subscription = API.service.oab.ill.subscription uid, res.metadata
return res
API.add 'service/oab/ill',
get: () ->
return {data: 'ILL service'}
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if this.user
opts.from ?= this.userId
opts.api = true
opts = API.tdm.clean opts
return API.service.oab.ill.start opts
API.add 'service/oab/ill/collect/:sid',
get: () ->
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + this.urlParams.sid + '/exec?'
for q of this.queryParams
url += q + '=' + API.tdm.clean(decodeURIComponent(this.queryParams[q])) + '&'
url += 'uuid=' + Random.id()
HTTP.call 'GET', url
return true
API.add 'service/oab/ill/openurl',
get: () ->
return 'Will eventually redirect after reading openurl params passed here, somehow. For now a POST of metadata here by a user with an open url registered will build their openurl'
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.config?
opts.uid ?= opts.config
delete opts.config
if opts.metadata?
for m of opts.metadata
opts[m] ?= opts.metadata[m]
delete opts.metadata
if not opts.uid and not this.user?
return 404
else
opts = API.tdm.clean opts
config = opts.config ? API.service.oab.ill.config(opts.uid ? this.userId)
return (if config?.ill_form then config.ill_form + '?' else '') + API.service.oab.ill.openurl config ? opts.uid ? this.userId, opts
API.add 'service/oab/ill/url',
get:
authOptional: true
action: () ->
return API.service.oab.ill.url this.queryParams.uid ? this.userId
API.add 'service/oab/ill/config',
get:
authOptional: true
action: () ->
try
return API.service.oab.ill.config this.queryParams.uid ? this.userId ? this.queryParams.url
return 404
post:
authRequired: 'openaccessbutton.user'
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.uid and API.accounts.auth 'openaccessbutton.admin', this.user
user = Users.get opts.uid
delete opts.uid
else
user = this.user
opts = API.tdm.clean opts
return API.service.oab.ill.config user, opts
API.add 'service/oab/ills',
get:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.queryParams, {restrict:restrict}
post:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.bodyParams, {restrict:restrict}
API.service.oab.ill = {}
API.service.oab.ill.subscription = (uid, meta={}, refresh=false) ->
if typeof uid is 'string'
sig = uid + JSON.stringify meta
sig = crypto.createHash('md5').update(sig, 'utf8').digest('base64')
res = API.http.cache(sig, 'oab_ill_subs', undefined, refresh) if refresh and refresh isnt true and refresh isnt 0
if not res?
res = {findings:{}, lookups:[], error:[], contents: []}
if typeof uid is 'string'
res.uid = uid
user = API.accounts.retrieve uid
config = user?.service?.openaccessbutton?.ill?.config
else
config = uid
if config?.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = API.service.oab.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use htto because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace 'cache=true', ''
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace '=10.', '=doi:10.'
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
# need to use the proxy as some subscriptions endpoints need a registered IP address, and ours is registered for some of them already
# but having a problem passing proxy details through, so ignore for now
# BUT AGAIN eds definitely does NOT work without puppeteer so going to have to use that again for now and figure out the proxy problem later
#pg = API.http.puppeteer url #, undefined, API.settings.proxy
# then get that link
# then in that link find various content, depending on what kind of service it is
# try doing without puppeteer and see how that goes
API.log 'Using OAB subscription check for ' + url
pg = ''
spg = ''
error = false
res.lookups.push url
try
#pg = HTTP.call('GET', url, {timeout:15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then HTTP.call('GET',url).content else API.http.puppeteer url #, undefined, API.settings.proxy
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
console.log(err) if API.settings.log?.level is 'debug'
API.log {msg: 'ILL subscription check error when looking up ' + url, level:'warn', url: url, error: err}
error = true
#res.u ?= []
#res.u.push url
#res.pg = pg
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# does not appear to need proxy or password
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=Pierce%2C+J+Mackenzie&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim() # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# API.http.cache(sig, 'oab_ill_subs', res)
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
#npg = API.http.puppeteer surl #, undefined, API.settings.proxy
API.log 'Using OAB subscription unsuppress for ' + surl
npg = HTTP.call('GET', surl, {timeout: 15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0]
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
API.http.cache(sig, 'oab_ill_subs', res)
return res
API.http.cache(sig, 'oab_ill_subs', res) if res.uid and not _.isEmpty res.findings
# return cached or empty result if nothing else found
else
res.cache = true
return res
API.service.oab.ill.start = (opts={}) ->
# opts should include a key called metadata at this point containing all metadata known about the object
# but if not, and if needed for the below stages, it is looked up again
opts.metadata ?= {}
meta = API.service.oab.metadata opts
for m of meta
opts.metadata[m] ?= meta[m]
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
if opts.library is 'imperial'
# TODO for now we are just going to send an email when a user creates an ILL
# until we have a script endpoint at the library to hit
# library POST URL: https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php
if not opts.forwarded and not opts.resolved
API.mail.send {
service: 'openaccessbutton',
from: 'natalia.norori@openaccessbutton.org',
to: ['joe@righttoresearch.org','s.barron@imperial.ac.uk'],
subject: 'EXAMPLE ILL TRIGGER',
text: JSON.stringify(opts,undefined,2)
}
API.service.oab.mail({template:{filename:'imperial_confirmation_example.txt'},to:opts.id})
HTTP.call('POST','https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php',{data:opts})
return oab_ill.insert opts
else if opts.from? or opts.config?
user = API.accounts.retrieve(opts.from) if opts.from isnt 'anonymous'
if user? or opts.config?
config = opts.config ? user?.service?.openaccessbutton?.ill?.config ? {}
if config.requests
config.requests_off ?= config.requests
delete opts.config if opts.config?
vars = {}
vars.name = user?.profile?.firstname ? 'librarian'
vars.details = ''
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars.details += authors + '</p>'
vars[r] = ats
else if ['started','ended','took'].indexOf(r) is -1
vars.details += '<p>' + r + ':<br>' + opts[r] + '</p>'
#vars.details += '<p>' + o + ':<br>' + opts[o] + '</p>'
opts.requests_off = true if config.requests_off
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
delete opts.metadata.author if opts.metadata?.author?
vars.illid = oab_ill.insert opts
vars.details += '<p>Open access button ILL ID:<br>' + vars.illid + '</p>';
eml = if config.email and config.email.length then config.email else if user?.email then user?.email else if user?.emails? and user.emails.length then user.emails[0].address else false
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.details += '<p>Search URL:<br><a href="' + su + '">' + su + '</a></p>'
vars.worldcatsearchurl = su
if not opts.forwarded and not opts.resolved and eml
API.service.oab.mail({vars: vars, template: {filename:'instantill_create.html'}, to: eml, from: "InstantILL <InstantILL@openaccessbutton.org>", subject: "ILL request " + vars.illid})
# send msg to mark and joe for testing (can be removed later)
txt = vars.details
delete vars.details
txt += '<br><br>' + JSON.stringify(vars,undefined,2)
API.mail.send {
service: 'openaccessbutton',
from: 'InstantILL <InstantILL@openaccessbutton.org>',
to: ['mark@cottagelabs.com','joe@righttoresearch.org'],
subject: 'ILL CREATED',
html: txt,
text: txt
}
return vars.illid
else
return 401
else
return 404
API.service.oab.ill.config = (user, config) ->
# need to set a config on live for the IUPUI user ajrfnwswdr4my8kgd
# the URL params they need are like
# https://ill.ulib.iupui.edu/ILLiad/IUP/illiad.dll?Action=10&Form=30&sid=OABILL&genre=InstantILL&aulast=Sapon-Shevin&aufirst=Mara&issn=10478248&title=Journal+of+Educational+Foundations&atitle=Cooperative+Learning%3A+Liberatory+Praxis+or+Hamburger+Helper&volume=5&part=&issue=3&spage=5&epage=&date=1991-07-01&pmid
# and their openurl config https://docs.google.com/spreadsheets/d/1wGQp7MofLh40JJK32Rp9di7pEkbwOpQ0ioigbqsufU0/edit#gid=806496802
# tested it and set values as below defaults, but also noted that it has year and month boxes, but these do not correspond to year and month params, or date params
if typeof user is 'string' and user.indexOf('.') isnt -1 # user is actually url where an embed has been called from
try
res = oab_find.search 'plugin.exact:instantill AND config:* AND embedded:"' + user.split('?')[0].split('#')[0] + '"'
return JSON.parse res.hits.hits[0]._source.config
catch
return {}
else
user = Users.get(user) if typeof user is 'string'
if typeof user is 'object' and config?
if config.ill_redirect_base_url
config.ill_form = config.ill_redirect_base_url
# ['institution','ill_form','ill_added_params','method','sid','title','doi','pmid','pmcid','author','journal','issn','volume','issue','page','published','year','notes','terms','book','other','cost','time','email','problem','account','subscription','subscription_type','val','search','autorun_off','autorunparams','intro_off','requests_off','ill_info','ill_if_oa_off','ill_if_sub_off','say_paper','pilot','live','advanced_ill_form']
config.pilot = Date.now() if config.pilot is true
config.live = Date.now() if config.live is true
if typeof config.ill_form is 'string'
if config.ill_form.indexOf('illiad.dll') isnt -1 and config.ill_form.toLowerCase().indexOf('action=') is -1
config.ill_form = config.ill_form.split('?')[0]
if config.ill_form.indexOf('/openurl') is -1
config.ill_form = config.ill_form.split('#')[0] + '/openurl'
config.ill_form += if config.ill_form.indexOf('#') is -1 then '' else '#' + config.ill_form.split('#')[1].split('?')[0]
config.ill_form += '?genre=article'
else if config.ill_form.indexOf('relais') isnt -1 and config.ill_form.toLowerCase().indexOf('genre=') is -1
config.ill_form = config.ill_form.split('?')[0]
config.ill_form += '?genre=article'
if JSON.stringify(config).indexOf('<script') is -1
if not user.service?
Users.update user._id, {service: {openaccessbutton: {ill: {config: config, had_old: false}}}}
else if not user.service.openaccessbutton?
Users.update user._id, {'service.openaccessbutton': {ill: {config: config, had_old: false}}}
else if not user.service.openaccessbutton.ill?
Users.update user._id, {'service.openaccessbutton.ill': {config: config, had_old: false}}
else
upd = {'service.openaccessbutton.ill.config': config}
if user.service.openaccessbutton.ill.config? and not user.service.openaccessbutton.ill.old_config? and user.service.openaccessbutton.ill.had_old isnt false
upd['service.openaccessbutton.ill.old_config'] = user.service.openaccessbutton.ill.config
Users.update user._id, upd
try
config ?= user.service.openaccessbutton.ill?.config ? {}
try config.owner ?= user.email ? user.emails[0].address
return config
catch
return {}
API.service.oab.ill.resolver = (user, resolve, config) ->
# should configure and return link resolver settings for the given user
# should be like the users config but can have different params set for it
# and has to default to the ill one anyway
# and has to apply per resolver url that the user gives us
# this shouldn't actually be a user setting - it should be settings for a given link resolver address
return false
API.service.oab.ill.openurl = (uid, meta={}) ->
config = if typeof uid is 'object' then uid else API.service.oab.ill.config uid
config ?= {}
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
#pmid: 'pmid' # same as iupui ill url format
pmcid: 'pmcid' # don't know yet what this should be
#aufirst: 'aufirst' # this is what iupui needs
#aulast: 'aulast' # this is what iupui needs
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
#issn: 'issn' # same as iupui ill url format
#volume: 'volume' # same as iupui ill url format
#issue: 'issue' # same as iupui ill url format
#spage: 'spage' # this is what iupui needs
#epage: 'epage' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
# IUPUI also has a month field, but there is nothing to match to that
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = false
if k is 'author'
# need to check if config has aufirst and aulast or something similar, then need to use those instead,
# if we have author name parts
try
if typeof meta.author is 'string'
v = meta.author
else if _.isArray meta.author
v = ''
for author in meta.author
v += ', ' if v.length
if typeof author is 'string'
v += author
else if author.family
v += author.family + if author.given then ', ' + author.given else ''
else
if meta.author.family
v = meta.author.family + if meta.author.given then ', ' + meta.author.given else ''
else
v = JSON.stringify meta.author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
if v
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&'
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace('usermetadata=true','')
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace(nfield+'=',nfield+'=The user provided some metadata. ')
return url.replace('/&&/g','&')
API.service.oab.ill.url = (uid) ->
# given a uid, find the most recent URL that this users uid submitted an availability check with an ILL for
q = {size: 0, query: {filtered: {query: {bool: {must: [{term: {plugin: "instantill"}},{term: {"from.exact": uid}}]}}}}}
q.aggregations = {embeds: {terms: {field: "embedded.exact"}}}
res = oab_find.search q
for eu in res.aggregations.embeds.buckets
eur = eu.key.split('?')[0].split('#')[0]
if eur.indexOf('instantill.org') is -1 and eur.indexOf('openaccessbutton.org') is -1
return eur
return false
API.service.oab.ill.terms = (uid) ->
if typeof uid is 'object'
return uid.terms
else
return API.service.oab.ill.config(uid).terms
API.service.oab.ill.progress = () ->
# TODO need a function that can lookup ILL progress from the library systems some how
return | 100879 |
import crypto from 'crypto'
import { Random } from 'meteor/random'
API.add 'service/oab/subscription',
get:
#roleRequired: 'openaccessbutton.user'
authOptional: true
action: () ->
if this.user
uid = this.user._id
else if this.queryParams.uid
uid = this.queryParams.uid
delete this.queryParams.uid
#if this.queryParams.uid and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
# uid = this.queryParams.uid
# delete this.queryParams.uid
if not uid? or _.isEmpty this.queryParams
return {}
else
res = {metadata: API.service.oab.metadata this.queryParams}
res.subscription = API.service.oab.ill.subscription uid, res.metadata
return res
API.add 'service/oab/ill',
get: () ->
return {data: 'ILL service'}
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if this.user
opts.from ?= this.userId
opts.api = true
opts = API.tdm.clean opts
return API.service.oab.ill.start opts
API.add 'service/oab/ill/collect/:sid',
get: () ->
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + this.urlParams.sid + '/exec?'
for q of this.queryParams
url += q + '=' + API.tdm.clean(decodeURIComponent(this.queryParams[q])) + '&'
url += 'uuid=' + Random.id()
HTTP.call 'GET', url
return true
API.add 'service/oab/ill/openurl',
get: () ->
return 'Will eventually redirect after reading openurl params passed here, somehow. For now a POST of metadata here by a user with an open url registered will build their openurl'
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.config?
opts.uid ?= opts.config
delete opts.config
if opts.metadata?
for m of opts.metadata
opts[m] ?= opts.metadata[m]
delete opts.metadata
if not opts.uid and not this.user?
return 404
else
opts = API.tdm.clean opts
config = opts.config ? API.service.oab.ill.config(opts.uid ? this.userId)
return (if config?.ill_form then config.ill_form + '?' else '') + API.service.oab.ill.openurl config ? opts.uid ? this.userId, opts
API.add 'service/oab/ill/url',
get:
authOptional: true
action: () ->
return API.service.oab.ill.url this.queryParams.uid ? this.userId
API.add 'service/oab/ill/config',
get:
authOptional: true
action: () ->
try
return API.service.oab.ill.config this.queryParams.uid ? this.userId ? this.queryParams.url
return 404
post:
authRequired: 'openaccessbutton.user'
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.uid and API.accounts.auth 'openaccessbutton.admin', this.user
user = Users.get opts.uid
delete opts.uid
else
user = this.user
opts = API.tdm.clean opts
return API.service.oab.ill.config user, opts
API.add 'service/oab/ills',
get:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.queryParams, {restrict:restrict}
post:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.bodyParams, {restrict:restrict}
API.service.oab.ill = {}
API.service.oab.ill.subscription = (uid, meta={}, refresh=false) ->
if typeof uid is 'string'
sig = uid + JSON.stringify meta
sig = crypto.createHash('md5').update(sig, 'utf8').digest('base64')
res = API.http.cache(sig, 'oab_ill_subs', undefined, refresh) if refresh and refresh isnt true and refresh isnt 0
if not res?
res = {findings:{}, lookups:[], error:[], contents: []}
if typeof uid is 'string'
res.uid = uid
user = API.accounts.retrieve uid
config = user?.service?.openaccessbutton?.ill?.config
else
config = uid
if config?.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = API.service.oab.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use htto because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace 'cache=true', ''
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace '=10.', '=doi:10.'
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
# need to use the proxy as some subscriptions endpoints need a registered IP address, and ours is registered for some of them already
# but having a problem passing proxy details through, so ignore for now
# BUT AGAIN eds definitely does NOT work without puppeteer so going to have to use that again for now and figure out the proxy problem later
#pg = API.http.puppeteer url #, undefined, API.settings.proxy
# then get that link
# then in that link find various content, depending on what kind of service it is
# try doing without puppeteer and see how that goes
API.log 'Using OAB subscription check for ' + url
pg = ''
spg = ''
error = false
res.lookups.push url
try
#pg = HTTP.call('GET', url, {timeout:15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then HTTP.call('GET',url).content else API.http.puppeteer url #, undefined, API.settings.proxy
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
console.log(err) if API.settings.log?.level is 'debug'
API.log {msg: 'ILL subscription check error when looking up ' + url, level:'warn', url: url, error: err}
error = true
#res.u ?= []
#res.u.push url
#res.pg = pg
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# does not appear to need proxy or password
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=<NAME>%2C+<NAME>&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim() # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# API.http.cache(sig, 'oab_ill_subs', res)
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
#npg = API.http.puppeteer surl #, undefined, API.settings.proxy
API.log 'Using OAB subscription unsuppress for ' + surl
npg = HTTP.call('GET', surl, {timeout: 15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0]
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
API.http.cache(sig, 'oab_ill_subs', res)
return res
API.http.cache(sig, 'oab_ill_subs', res) if res.uid and not _.isEmpty res.findings
# return cached or empty result if nothing else found
else
res.cache = true
return res
API.service.oab.ill.start = (opts={}) ->
# opts should include a key called metadata at this point containing all metadata known about the object
# but if not, and if needed for the below stages, it is looked up again
opts.metadata ?= {}
meta = API.service.oab.metadata opts
for m of meta
opts.metadata[m] ?= meta[m]
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
if opts.library is 'imperial'
# TODO for now we are just going to send an email when a user creates an ILL
# until we have a script endpoint at the library to hit
# library POST URL: https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php
if not opts.forwarded and not opts.resolved
API.mail.send {
service: 'openaccessbutton',
from: '<EMAIL>',
to: ['<EMAIL>','<EMAIL>'],
subject: 'EXAMPLE ILL TRIGGER',
text: JSON.stringify(opts,undefined,2)
}
API.service.oab.mail({template:{filename:'imperial_confirmation_example.txt'},to:opts.id})
HTTP.call('POST','https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php',{data:opts})
return oab_ill.insert opts
else if opts.from? or opts.config?
user = API.accounts.retrieve(opts.from) if opts.from isnt 'anonymous'
if user? or opts.config?
config = opts.config ? user?.service?.openaccessbutton?.ill?.config ? {}
if config.requests
config.requests_off ?= config.requests
delete opts.config if opts.config?
vars = {}
vars.name = user?.profile?.firstname ? 'librarian'
vars.details = ''
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars.details += authors + '</p>'
vars[r] = ats
else if ['started','ended','took'].indexOf(r) is -1
vars.details += '<p>' + r + ':<br>' + opts[r] + '</p>'
#vars.details += '<p>' + o + ':<br>' + opts[o] + '</p>'
opts.requests_off = true if config.requests_off
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
delete opts.metadata.author if opts.metadata?.author?
vars.illid = oab_ill.insert opts
vars.details += '<p>Open access button ILL ID:<br>' + vars.illid + '</p>';
eml = if config.email and config.email.length then config.email else if user?.email then user?.email else if user?.emails? and user.emails.length then user.emails[0].address else false
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.details += '<p>Search URL:<br><a href="' + su + '">' + su + '</a></p>'
vars.worldcatsearchurl = su
if not opts.forwarded and not opts.resolved and eml
API.service.oab.mail({vars: vars, template: {filename:'instantill_create.html'}, to: eml, from: "InstantILL <<EMAIL>>", subject: "ILL request " + vars.illid})
# send msg to mark and jo<NAME> for testing (can be removed later)
txt = vars.details
delete vars.details
txt += '<br><br>' + JSON.stringify(vars,undefined,2)
API.mail.send {
service: 'openaccessbutton',
from: 'InstantILL <<EMAIL>>',
to: ['<EMAIL>','<EMAIL>'],
subject: 'ILL CREATED',
html: txt,
text: txt
}
return vars.illid
else
return 401
else
return 404
API.service.oab.ill.config = (user, config) ->
# need to set a config on live for the IUPUI user ajrfnwswdr4my8kgd
# the URL params they need are like
# https://ill.ulib.iupui.edu/ILLiad/IUP/illiad.dll?Action=10&Form=30&sid=OABILL&genre=InstantILL&aulast=Sapon-Shevin&aufirst=Mara&issn=10478248&title=Journal+of+Educational+Foundations&atitle=Cooperative+Learning%3A+Liberatory+Praxis+or+Hamburger+Helper&volume=5&part=&issue=3&spage=5&epage=&date=1991-07-01&pmid
# and their openurl config https://docs.google.com/spreadsheets/d/1wGQp7MofLh40JJK32Rp9di7pEkbwOpQ0ioigbqsufU0/edit#gid=806496802
# tested it and set values as below defaults, but also noted that it has year and month boxes, but these do not correspond to year and month params, or date params
if typeof user is 'string' and user.indexOf('.') isnt -1 # user is actually url where an embed has been called from
try
res = oab_find.search 'plugin.exact:instantill AND config:* AND embedded:"' + user.split('?')[0].split('#')[0] + '"'
return JSON.parse res.hits.hits[0]._source.config
catch
return {}
else
user = Users.get(user) if typeof user is 'string'
if typeof user is 'object' and config?
if config.ill_redirect_base_url
config.ill_form = config.ill_redirect_base_url
# ['institution','ill_form','ill_added_params','method','sid','title','doi','pmid','pmcid','author','journal','issn','volume','issue','page','published','year','notes','terms','book','other','cost','time','email','problem','account','subscription','subscription_type','val','search','autorun_off','autorunparams','intro_off','requests_off','ill_info','ill_if_oa_off','ill_if_sub_off','say_paper','pilot','live','advanced_ill_form']
config.pilot = Date.now() if config.pilot is true
config.live = Date.now() if config.live is true
if typeof config.ill_form is 'string'
if config.ill_form.indexOf('illiad.dll') isnt -1 and config.ill_form.toLowerCase().indexOf('action=') is -1
config.ill_form = config.ill_form.split('?')[0]
if config.ill_form.indexOf('/openurl') is -1
config.ill_form = config.ill_form.split('#')[0] + '/openurl'
config.ill_form += if config.ill_form.indexOf('#') is -1 then '' else '#' + config.ill_form.split('#')[1].split('?')[0]
config.ill_form += '?genre=article'
else if config.ill_form.indexOf('relais') isnt -1 and config.ill_form.toLowerCase().indexOf('genre=') is -1
config.ill_form = config.ill_form.split('?')[0]
config.ill_form += '?genre=article'
if JSON.stringify(config).indexOf('<script') is -1
if not user.service?
Users.update user._id, {service: {openaccessbutton: {ill: {config: config, had_old: false}}}}
else if not user.service.openaccessbutton?
Users.update user._id, {'service.openaccessbutton': {ill: {config: config, had_old: false}}}
else if not user.service.openaccessbutton.ill?
Users.update user._id, {'service.openaccessbutton.ill': {config: config, had_old: false}}
else
upd = {'service.openaccessbutton.ill.config': config}
if user.service.openaccessbutton.ill.config? and not user.service.openaccessbutton.ill.old_config? and user.service.openaccessbutton.ill.had_old isnt false
upd['service.openaccessbutton.ill.old_config'] = user.service.openaccessbutton.ill.config
Users.update user._id, upd
try
config ?= user.service.openaccessbutton.ill?.config ? {}
try config.owner ?= user.email ? user.emails[0].address
return config
catch
return {}
API.service.oab.ill.resolver = (user, resolve, config) ->
# should configure and return link resolver settings for the given user
# should be like the users config but can have different params set for it
# and has to default to the ill one anyway
# and has to apply per resolver url that the user gives us
# this shouldn't actually be a user setting - it should be settings for a given link resolver address
return false
API.service.oab.ill.openurl = (uid, meta={}) ->
config = if typeof uid is 'object' then uid else API.service.oab.ill.config uid
config ?= {}
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
#pmid: 'pmid' # same as iupui ill url format
pmcid: 'pmcid' # don't know yet what this should be
#aufirst: 'aufirst' # this is what iupui needs
#aulast: 'aulast' # this is what iupui needs
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
#issn: 'issn' # same as iupui ill url format
#volume: 'volume' # same as iupui ill url format
#issue: 'issue' # same as iupui ill url format
#spage: 'spage' # this is what iupui needs
#epage: 'epage' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
# IUPUI also has a month field, but there is nothing to match to that
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = false
if k is 'author'
# need to check if config has aufirst and aulast or something similar, then need to use those instead,
# if we have author name parts
try
if typeof meta.author is 'string'
v = meta.author
else if _.isArray meta.author
v = ''
for author in meta.author
v += ', ' if v.length
if typeof author is 'string'
v += author
else if author.family
v += author.family + if author.given then ', ' + author.given else ''
else
if meta.author.family
v = meta.author.family + if meta.author.given then ', ' + meta.author.given else ''
else
v = JSON.stringify meta.author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
if v
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&'
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace('usermetadata=true','')
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace(nfield+'=',nfield+'=The user provided some metadata. ')
return url.replace('/&&/g','&')
API.service.oab.ill.url = (uid) ->
# given a uid, find the most recent URL that this users uid submitted an availability check with an ILL for
q = {size: 0, query: {filtered: {query: {bool: {must: [{term: {plugin: "instantill"}},{term: {"from.exact": uid}}]}}}}}
q.aggregations = {embeds: {terms: {field: "embedded.exact"}}}
res = oab_find.search q
for eu in res.aggregations.embeds.buckets
eur = eu.key.split('?')[0].split('#')[0]
if eur.indexOf('instantill.org') is -1 and eur.indexOf('openaccessbutton.org') is -1
return eur
return false
API.service.oab.ill.terms = (uid) ->
if typeof uid is 'object'
return uid.terms
else
return API.service.oab.ill.config(uid).terms
API.service.oab.ill.progress = () ->
# TODO need a function that can lookup ILL progress from the library systems some how
return | true |
import crypto from 'crypto'
import { Random } from 'meteor/random'
API.add 'service/oab/subscription',
get:
#roleRequired: 'openaccessbutton.user'
authOptional: true
action: () ->
if this.user
uid = this.user._id
else if this.queryParams.uid
uid = this.queryParams.uid
delete this.queryParams.uid
#if this.queryParams.uid and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
# uid = this.queryParams.uid
# delete this.queryParams.uid
if not uid? or _.isEmpty this.queryParams
return {}
else
res = {metadata: API.service.oab.metadata this.queryParams}
res.subscription = API.service.oab.ill.subscription uid, res.metadata
return res
API.add 'service/oab/ill',
get: () ->
return {data: 'ILL service'}
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if this.user
opts.from ?= this.userId
opts.api = true
opts = API.tdm.clean opts
return API.service.oab.ill.start opts
API.add 'service/oab/ill/collect/:sid',
get: () ->
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + this.urlParams.sid + '/exec?'
for q of this.queryParams
url += q + '=' + API.tdm.clean(decodeURIComponent(this.queryParams[q])) + '&'
url += 'uuid=' + Random.id()
HTTP.call 'GET', url
return true
API.add 'service/oab/ill/openurl',
get: () ->
return 'Will eventually redirect after reading openurl params passed here, somehow. For now a POST of metadata here by a user with an open url registered will build their openurl'
post:
authOptional: true
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.config?
opts.uid ?= opts.config
delete opts.config
if opts.metadata?
for m of opts.metadata
opts[m] ?= opts.metadata[m]
delete opts.metadata
if not opts.uid and not this.user?
return 404
else
opts = API.tdm.clean opts
config = opts.config ? API.service.oab.ill.config(opts.uid ? this.userId)
return (if config?.ill_form then config.ill_form + '?' else '') + API.service.oab.ill.openurl config ? opts.uid ? this.userId, opts
API.add 'service/oab/ill/url',
get:
authOptional: true
action: () ->
return API.service.oab.ill.url this.queryParams.uid ? this.userId
API.add 'service/oab/ill/config',
get:
authOptional: true
action: () ->
try
return API.service.oab.ill.config this.queryParams.uid ? this.userId ? this.queryParams.url
return 404
post:
authRequired: 'openaccessbutton.user'
action: () ->
opts = this.request.body ? {}
for o of this.queryParams
opts[o] = this.queryParams[o]
if opts.uid and API.accounts.auth 'openaccessbutton.admin', this.user
user = Users.get opts.uid
delete opts.uid
else
user = this.user
opts = API.tdm.clean opts
return API.service.oab.ill.config user, opts
API.add 'service/oab/ills',
get:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.queryParams, {restrict:restrict}
post:
roleRequired:'openaccessbutton.user'
action: () ->
restrict = if API.accounts.auth('openaccessbutton.admin', this.user) and this.queryParams.all then [] else [{term:{from:this.userId}}]
delete this.queryParams.all if this.queryParams.all?
return oab_ill.search this.bodyParams, {restrict:restrict}
API.service.oab.ill = {}
API.service.oab.ill.subscription = (uid, meta={}, refresh=false) ->
if typeof uid is 'string'
sig = uid + JSON.stringify meta
sig = crypto.createHash('md5').update(sig, 'utf8').digest('base64')
res = API.http.cache(sig, 'oab_ill_subs', undefined, refresh) if refresh and refresh isnt true and refresh isnt 0
if not res?
res = {findings:{}, lookups:[], error:[], contents: []}
if typeof uid is 'string'
res.uid = uid
user = API.accounts.retrieve uid
config = user?.service?.openaccessbutton?.ill?.config
else
config = uid
if config?.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = API.service.oab.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use htto because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace 'cache=true', ''
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace '=10.', '=doi:10.'
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
# need to use the proxy as some subscriptions endpoints need a registered IP address, and ours is registered for some of them already
# but having a problem passing proxy details through, so ignore for now
# BUT AGAIN eds definitely does NOT work without puppeteer so going to have to use that again for now and figure out the proxy problem later
#pg = API.http.puppeteer url #, undefined, API.settings.proxy
# then get that link
# then in that link find various content, depending on what kind of service it is
# try doing without puppeteer and see how that goes
API.log 'Using OAB subscription check for ' + url
pg = ''
spg = ''
error = false
res.lookups.push url
try
#pg = HTTP.call('GET', url, {timeout:15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then HTTP.call('GET',url).content else API.http.puppeteer url #, undefined, API.settings.proxy
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
console.log(err) if API.settings.log?.level is 'debug'
API.log {msg: 'ILL subscription check error when looking up ' + url, level:'warn', url: url, error: err}
error = true
#res.u ?= []
#res.u.push url
#res.pg = pg
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# does not appear to need proxy or password
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=PI:NAME:<NAME>END_PI%2C+PI:NAME:<NAME>END_PI&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim() # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# API.http.cache(sig, 'oab_ill_subs', res)
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
#npg = API.http.puppeteer surl #, undefined, API.settings.proxy
API.log 'Using OAB subscription unsuppress for ' + surl
npg = HTTP.call('GET', surl, {timeout: 15000, npmRequestOptions:{proxy:API.settings.proxy}}).content
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0]
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
API.http.cache(sig, 'oab_ill_subs', res)
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
API.http.cache(sig, 'oab_ill_subs', res)
return res
API.http.cache(sig, 'oab_ill_subs', res) if res.uid and not _.isEmpty res.findings
# return cached or empty result if nothing else found
else
res.cache = true
return res
API.service.oab.ill.start = (opts={}) ->
# opts should include a key called metadata at this point containing all metadata known about the object
# but if not, and if needed for the below stages, it is looked up again
opts.metadata ?= {}
meta = API.service.oab.metadata opts
for m of meta
opts.metadata[m] ?= meta[m]
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
if opts.library is 'imperial'
# TODO for now we are just going to send an email when a user creates an ILL
# until we have a script endpoint at the library to hit
# library POST URL: https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php
if not opts.forwarded and not opts.resolved
API.mail.send {
service: 'openaccessbutton',
from: 'PI:EMAIL:<EMAIL>END_PI',
to: ['PI:EMAIL:<EMAIL>END_PI','PI:EMAIL:<EMAIL>END_PI'],
subject: 'EXAMPLE ILL TRIGGER',
text: JSON.stringify(opts,undefined,2)
}
API.service.oab.mail({template:{filename:'imperial_confirmation_example.txt'},to:opts.id})
HTTP.call('POST','https://www.imperial.ac.uk/library/dynamic/oabutton/oabutton3.php',{data:opts})
return oab_ill.insert opts
else if opts.from? or opts.config?
user = API.accounts.retrieve(opts.from) if opts.from isnt 'anonymous'
if user? or opts.config?
config = opts.config ? user?.service?.openaccessbutton?.ill?.config ? {}
if config.requests
config.requests_off ?= config.requests
delete opts.config if opts.config?
vars = {}
vars.name = user?.profile?.firstname ? 'librarian'
vars.details = ''
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars.details += authors + '</p>'
vars[r] = ats
else if ['started','ended','took'].indexOf(r) is -1
vars.details += '<p>' + r + ':<br>' + opts[r] + '</p>'
#vars.details += '<p>' + o + ':<br>' + opts[o] + '</p>'
opts.requests_off = true if config.requests_off
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
delete opts.metadata.author if opts.metadata?.author?
vars.illid = oab_ill.insert opts
vars.details += '<p>Open access button ILL ID:<br>' + vars.illid + '</p>';
eml = if config.email and config.email.length then config.email else if user?.email then user?.email else if user?.emails? and user.emails.length then user.emails[0].address else false
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.details += '<p>Search URL:<br><a href="' + su + '">' + su + '</a></p>'
vars.worldcatsearchurl = su
if not opts.forwarded and not opts.resolved and eml
API.service.oab.mail({vars: vars, template: {filename:'instantill_create.html'}, to: eml, from: "InstantILL <PI:EMAIL:<EMAIL>END_PI>", subject: "ILL request " + vars.illid})
# send msg to mark and joPI:NAME:<NAME>END_PI for testing (can be removed later)
txt = vars.details
delete vars.details
txt += '<br><br>' + JSON.stringify(vars,undefined,2)
API.mail.send {
service: 'openaccessbutton',
from: 'InstantILL <PI:EMAIL:<EMAIL>END_PI>',
to: ['PI:EMAIL:<EMAIL>END_PI','PI:EMAIL:<EMAIL>END_PI'],
subject: 'ILL CREATED',
html: txt,
text: txt
}
return vars.illid
else
return 401
else
return 404
API.service.oab.ill.config = (user, config) ->
# need to set a config on live for the IUPUI user ajrfnwswdr4my8kgd
# the URL params they need are like
# https://ill.ulib.iupui.edu/ILLiad/IUP/illiad.dll?Action=10&Form=30&sid=OABILL&genre=InstantILL&aulast=Sapon-Shevin&aufirst=Mara&issn=10478248&title=Journal+of+Educational+Foundations&atitle=Cooperative+Learning%3A+Liberatory+Praxis+or+Hamburger+Helper&volume=5&part=&issue=3&spage=5&epage=&date=1991-07-01&pmid
# and their openurl config https://docs.google.com/spreadsheets/d/1wGQp7MofLh40JJK32Rp9di7pEkbwOpQ0ioigbqsufU0/edit#gid=806496802
# tested it and set values as below defaults, but also noted that it has year and month boxes, but these do not correspond to year and month params, or date params
if typeof user is 'string' and user.indexOf('.') isnt -1 # user is actually url where an embed has been called from
try
res = oab_find.search 'plugin.exact:instantill AND config:* AND embedded:"' + user.split('?')[0].split('#')[0] + '"'
return JSON.parse res.hits.hits[0]._source.config
catch
return {}
else
user = Users.get(user) if typeof user is 'string'
if typeof user is 'object' and config?
if config.ill_redirect_base_url
config.ill_form = config.ill_redirect_base_url
# ['institution','ill_form','ill_added_params','method','sid','title','doi','pmid','pmcid','author','journal','issn','volume','issue','page','published','year','notes','terms','book','other','cost','time','email','problem','account','subscription','subscription_type','val','search','autorun_off','autorunparams','intro_off','requests_off','ill_info','ill_if_oa_off','ill_if_sub_off','say_paper','pilot','live','advanced_ill_form']
config.pilot = Date.now() if config.pilot is true
config.live = Date.now() if config.live is true
if typeof config.ill_form is 'string'
if config.ill_form.indexOf('illiad.dll') isnt -1 and config.ill_form.toLowerCase().indexOf('action=') is -1
config.ill_form = config.ill_form.split('?')[0]
if config.ill_form.indexOf('/openurl') is -1
config.ill_form = config.ill_form.split('#')[0] + '/openurl'
config.ill_form += if config.ill_form.indexOf('#') is -1 then '' else '#' + config.ill_form.split('#')[1].split('?')[0]
config.ill_form += '?genre=article'
else if config.ill_form.indexOf('relais') isnt -1 and config.ill_form.toLowerCase().indexOf('genre=') is -1
config.ill_form = config.ill_form.split('?')[0]
config.ill_form += '?genre=article'
if JSON.stringify(config).indexOf('<script') is -1
if not user.service?
Users.update user._id, {service: {openaccessbutton: {ill: {config: config, had_old: false}}}}
else if not user.service.openaccessbutton?
Users.update user._id, {'service.openaccessbutton': {ill: {config: config, had_old: false}}}
else if not user.service.openaccessbutton.ill?
Users.update user._id, {'service.openaccessbutton.ill': {config: config, had_old: false}}
else
upd = {'service.openaccessbutton.ill.config': config}
if user.service.openaccessbutton.ill.config? and not user.service.openaccessbutton.ill.old_config? and user.service.openaccessbutton.ill.had_old isnt false
upd['service.openaccessbutton.ill.old_config'] = user.service.openaccessbutton.ill.config
Users.update user._id, upd
try
config ?= user.service.openaccessbutton.ill?.config ? {}
try config.owner ?= user.email ? user.emails[0].address
return config
catch
return {}
API.service.oab.ill.resolver = (user, resolve, config) ->
# should configure and return link resolver settings for the given user
# should be like the users config but can have different params set for it
# and has to default to the ill one anyway
# and has to apply per resolver url that the user gives us
# this shouldn't actually be a user setting - it should be settings for a given link resolver address
return false
API.service.oab.ill.openurl = (uid, meta={}) ->
config = if typeof uid is 'object' then uid else API.service.oab.ill.config uid
config ?= {}
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
#pmid: 'pmid' # same as iupui ill url format
pmcid: 'pmcid' # don't know yet what this should be
#aufirst: 'aufirst' # this is what iupui needs
#aulast: 'aulast' # this is what iupui needs
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
#issn: 'issn' # same as iupui ill url format
#volume: 'volume' # same as iupui ill url format
#issue: 'issue' # same as iupui ill url format
#spage: 'spage' # this is what iupui needs
#epage: 'epage' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
# IUPUI also has a month field, but there is nothing to match to that
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = false
if k is 'author'
# need to check if config has aufirst and aulast or something similar, then need to use those instead,
# if we have author name parts
try
if typeof meta.author is 'string'
v = meta.author
else if _.isArray meta.author
v = ''
for author in meta.author
v += ', ' if v.length
if typeof author is 'string'
v += author
else if author.family
v += author.family + if author.given then ', ' + author.given else ''
else
if meta.author.family
v = meta.author.family + if meta.author.given then ', ' + meta.author.given else ''
else
v = JSON.stringify meta.author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
if v
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&'
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace('usermetadata=true','')
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace(nfield+'=',nfield+'=The user provided some metadata. ')
return url.replace('/&&/g','&')
API.service.oab.ill.url = (uid) ->
# given a uid, find the most recent URL that this users uid submitted an availability check with an ILL for
q = {size: 0, query: {filtered: {query: {bool: {must: [{term: {plugin: "instantill"}},{term: {"from.exact": uid}}]}}}}}
q.aggregations = {embeds: {terms: {field: "embedded.exact"}}}
res = oab_find.search q
for eu in res.aggregations.embeds.buckets
eur = eu.key.split('?')[0].split('#')[0]
if eur.indexOf('instantill.org') is -1 and eur.indexOf('openaccessbutton.org') is -1
return eur
return false
API.service.oab.ill.terms = (uid) ->
if typeof uid is 'object'
return uid.terms
else
return API.service.oab.ill.config(uid).terms
API.service.oab.ill.progress = () ->
# TODO need a function that can lookup ILL progress from the library systems some how
return |
[
{
"context": "or + bleacon.minor\n\n scan: (bleacon) =>\n key = @hashKey(bleacon)\n @beacons[key] = new Beacon(bl",
"end": 291,
"score": 0.5569745302200317,
"start": 290,
"tag": "KEY",
"value": "@"
}
] | app/ble_scan.coffee | c0ze/node-ble-beacon-scanner | 1 | Bleacon = require('bleacon')
Beacon = require('./beacon').Beacon
Report = require('./report').Report
class BleScan
constructor: () ->
@beacons = {}
@reporter = new Report()
hashKey: (bleacon) =>
bleacon.uuid + bleacon.major + bleacon.minor
scan: (bleacon) =>
key = @hashKey(bleacon)
@beacons[key] = new Beacon(bleacon) unless @beacons[key]
@beacons[key].readProximity(bleacon)
@beacons[key].updateCounter()
# beacon.updateBatteryLevel()
report: () =>
console.log "reporting..."
@reporter.post @beacons, (error, body) ->
console.log "reporting of beacons failed."
# do something with the beacons
console.log error
console.log body
@beacons = {}
exports.BLE = BleScan
| 205560 | Bleacon = require('bleacon')
Beacon = require('./beacon').Beacon
Report = require('./report').Report
class BleScan
constructor: () ->
@beacons = {}
@reporter = new Report()
hashKey: (bleacon) =>
bleacon.uuid + bleacon.major + bleacon.minor
scan: (bleacon) =>
key = <KEY>hashKey(bleacon)
@beacons[key] = new Beacon(bleacon) unless @beacons[key]
@beacons[key].readProximity(bleacon)
@beacons[key].updateCounter()
# beacon.updateBatteryLevel()
report: () =>
console.log "reporting..."
@reporter.post @beacons, (error, body) ->
console.log "reporting of beacons failed."
# do something with the beacons
console.log error
console.log body
@beacons = {}
exports.BLE = BleScan
| true | Bleacon = require('bleacon')
Beacon = require('./beacon').Beacon
Report = require('./report').Report
class BleScan
constructor: () ->
@beacons = {}
@reporter = new Report()
hashKey: (bleacon) =>
bleacon.uuid + bleacon.major + bleacon.minor
scan: (bleacon) =>
key = PI:KEY:<KEY>END_PIhashKey(bleacon)
@beacons[key] = new Beacon(bleacon) unless @beacons[key]
@beacons[key].readProximity(bleacon)
@beacons[key].updateCounter()
# beacon.updateBatteryLevel()
report: () =>
console.log "reporting..."
@reporter.post @beacons, (error, body) ->
console.log "reporting of beacons failed."
# do something with the beacons
console.log error
console.log body
@beacons = {}
exports.BLE = BleScan
|
[
{
"context": "= do -> \n # Public variables \n firstName = \"Baptiste\"\n lastName = \"Vannesson\"\n\n # Private variab",
"end": 75,
"score": 0.9997687339782715,
"start": 67,
"tag": "NAME",
"value": "Baptiste"
},
{
"context": "ables \n firstName = \"Baptiste\"\n lastName = \"Vannesson\"\n\n # Private variable \n secretNickname = \"B",
"end": 102,
"score": 0.9995924830436707,
"start": 93,
"tag": "NAME",
"value": "Vannesson"
},
{
"context": "n\"\n\n # Private variable \n secretNickname = \"Bada\"\n\n # Public functions\n sayHello = -> \"He",
"end": 152,
"score": 0.738930881023407,
"start": 151,
"tag": "NAME",
"value": "B"
}
] | misc/RevealingModule/CoffeeScript/index.coffee | irynaO/JavaScript-Design-Patterns | 293 | 'use strict'
me = do ->
# Public variables
firstName = "Baptiste"
lastName = "Vannesson"
# Private variable
secretNickname = "Bada"
# Public functions
sayHello = -> "Hello, #{firstName} #{lastName}!"
getSecretNickname = -> secretNickname
# Revealed module
firstName: firstName
lastName: lastName
sayHello: sayHello
getSecretNickname: getSecretNickname
console.log me.firstName, me.lastName # OK
console.log me.sayHello() # OK
console.log me.secretNickname # Oops! Undefined!
console.log me.getSecretNickname() # OK
| 71280 | 'use strict'
me = do ->
# Public variables
firstName = "<NAME>"
lastName = "<NAME>"
# Private variable
secretNickname = "<NAME>ada"
# Public functions
sayHello = -> "Hello, #{firstName} #{lastName}!"
getSecretNickname = -> secretNickname
# Revealed module
firstName: firstName
lastName: lastName
sayHello: sayHello
getSecretNickname: getSecretNickname
console.log me.firstName, me.lastName # OK
console.log me.sayHello() # OK
console.log me.secretNickname # Oops! Undefined!
console.log me.getSecretNickname() # OK
| true | 'use strict'
me = do ->
# Public variables
firstName = "PI:NAME:<NAME>END_PI"
lastName = "PI:NAME:<NAME>END_PI"
# Private variable
secretNickname = "PI:NAME:<NAME>END_PIada"
# Public functions
sayHello = -> "Hello, #{firstName} #{lastName}!"
getSecretNickname = -> secretNickname
# Revealed module
firstName: firstName
lastName: lastName
sayHello: sayHello
getSecretNickname: getSecretNickname
console.log me.firstName, me.lastName # OK
console.log me.sayHello() # OK
console.log me.secretNickname # Oops! Undefined!
console.log me.getSecretNickname() # OK
|
[
{
"context": "ions.New\n constructor: ->\n Iugu.setAccountID 'a63f657b-a787-4ac0-8e39-1b06e869dea5'\n\n initializeElements: ->\n @form = $('.simple",
"end": 105,
"score": 0.9469227194786072,
"start": 69,
"tag": "KEY",
"value": "a63f657b-a787-4ac0-8e39-1b06e869dea5"
}
] | app/assets/javascripts/modules/registrations.coffee | mailkiq/mailkiq | 12 | class App.Registrations.New
constructor: ->
Iugu.setAccountID 'a63f657b-a787-4ac0-8e39-1b06e869dea5'
initializeElements: ->
@form = $('.simple_form:has(.usable-creditcard-form)')
initializeEvents: =>
$('.credit_card_number').keyup(@onKeyUp)
$('.credit_card_number').formatter
pattern: '{{9999}} {{9999}} {{9999}} {{9999}}'
persistent: false
$('.credit_card_expiration').formatter
pattern: '{{99}}/{{99}}',
persistent: false
$('.credit_card_cvv').formatter pattern: '{{9999}}'
@form.submit @onSubmit
onSubmit: (ev) ->
form = $(this).get(0)
submitButton = $(this).find(':submit')
submitButton.prop('disabled', true)
if not $('.usable-creditcard-form').length
return form.submit()
Iugu.createPaymentToken this, (response) ->
console.log(response) if console
if response.errors
alert('Erro na Cobrança. Verifique os dados do cartão de crédito.')
submitButton.prop('disabled', false)
else
$('#account_credit_card_token').val(response.id)
form.submit()
return false
onKeyUp: (ev) =>
number = $(ev.target).val()
number = number.replace(/\ /g, '')
number = number.replace(/\-/g, '')
brand = Iugu.utils.getBrandByCreditCardNumber(number)
@form
.removeClass('visa')
.removeClass('mastercard')
.removeClass('amex')
.removeClass('diners')
if brand
@form.addClass(brand)
if brand == 'amex'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999999}} {{99999}}')
else if brand == 'diners'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{999999}} {{9999}}')
else
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999}} {{9999}} {{9999}}')
return true
render: ->
@initializeElements()
@initializeEvents()
class App.Registrations.Edit extends App.Registrations.New
| 117399 | class App.Registrations.New
constructor: ->
Iugu.setAccountID '<KEY>'
initializeElements: ->
@form = $('.simple_form:has(.usable-creditcard-form)')
initializeEvents: =>
$('.credit_card_number').keyup(@onKeyUp)
$('.credit_card_number').formatter
pattern: '{{9999}} {{9999}} {{9999}} {{9999}}'
persistent: false
$('.credit_card_expiration').formatter
pattern: '{{99}}/{{99}}',
persistent: false
$('.credit_card_cvv').formatter pattern: '{{9999}}'
@form.submit @onSubmit
onSubmit: (ev) ->
form = $(this).get(0)
submitButton = $(this).find(':submit')
submitButton.prop('disabled', true)
if not $('.usable-creditcard-form').length
return form.submit()
Iugu.createPaymentToken this, (response) ->
console.log(response) if console
if response.errors
alert('Erro na Cobrança. Verifique os dados do cartão de crédito.')
submitButton.prop('disabled', false)
else
$('#account_credit_card_token').val(response.id)
form.submit()
return false
onKeyUp: (ev) =>
number = $(ev.target).val()
number = number.replace(/\ /g, '')
number = number.replace(/\-/g, '')
brand = Iugu.utils.getBrandByCreditCardNumber(number)
@form
.removeClass('visa')
.removeClass('mastercard')
.removeClass('amex')
.removeClass('diners')
if brand
@form.addClass(brand)
if brand == 'amex'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999999}} {{99999}}')
else if brand == 'diners'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{999999}} {{9999}}')
else
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999}} {{9999}} {{9999}}')
return true
render: ->
@initializeElements()
@initializeEvents()
class App.Registrations.Edit extends App.Registrations.New
| true | class App.Registrations.New
constructor: ->
Iugu.setAccountID 'PI:KEY:<KEY>END_PI'
initializeElements: ->
@form = $('.simple_form:has(.usable-creditcard-form)')
initializeEvents: =>
$('.credit_card_number').keyup(@onKeyUp)
$('.credit_card_number').formatter
pattern: '{{9999}} {{9999}} {{9999}} {{9999}}'
persistent: false
$('.credit_card_expiration').formatter
pattern: '{{99}}/{{99}}',
persistent: false
$('.credit_card_cvv').formatter pattern: '{{9999}}'
@form.submit @onSubmit
onSubmit: (ev) ->
form = $(this).get(0)
submitButton = $(this).find(':submit')
submitButton.prop('disabled', true)
if not $('.usable-creditcard-form').length
return form.submit()
Iugu.createPaymentToken this, (response) ->
console.log(response) if console
if response.errors
alert('Erro na Cobrança. Verifique os dados do cartão de crédito.')
submitButton.prop('disabled', false)
else
$('#account_credit_card_token').val(response.id)
form.submit()
return false
onKeyUp: (ev) =>
number = $(ev.target).val()
number = number.replace(/\ /g, '')
number = number.replace(/\-/g, '')
brand = Iugu.utils.getBrandByCreditCardNumber(number)
@form
.removeClass('visa')
.removeClass('mastercard')
.removeClass('amex')
.removeClass('diners')
if brand
@form.addClass(brand)
if brand == 'amex'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999999}} {{99999}}')
else if brand == 'diners'
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{999999}} {{9999}}')
else
@form.find('.credit_card_number').formatter()
.resetPattern('{{9999}} {{9999}} {{9999}} {{9999}}')
return true
render: ->
@initializeElements()
@initializeEvents()
class App.Registrations.Edit extends App.Registrations.New
|
[
{
"context": "s: true\n shadowsocks:\n serverIp: '106.186.30.188'\n serverPort: 3118\n localPort: ",
"end": 216,
"score": 0.9996692538261414,
"start": 202,
"tag": "IP_ADDRESS",
"value": "106.186.30.188"
},
{
"context": "hod: 'aes-256-cfb'\n password: 'MagicaSocks'\n timeout: 600000\n useHttpProxy: ",
"end": 342,
"score": 0.9991357922554016,
"start": 331,
"tag": "PASSWORD",
"value": "MagicaSocks"
},
{
"context": " false\n httpProxy:\n httpProxyIp: '127.0.0.1'\n httpProxyPort: 8099\n useSocksProxy: ",
"end": 450,
"score": 0.9997836351394653,
"start": 441,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " false\n socksProxy:\n socksProxyIp: '127.0.0.1'\n socksProxyPort: 8099\n poi:\n listenPo",
"end": 559,
"score": 0.9997737407684326,
"start": 550,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
}
] | modules/config.coffee | Magimagi/poi | 3 | fs = require('fs')
configPath = "#{global.appDataPath}/poi-config.json"
exports.config = defaultConfig =
version: '0.0.3'
proxy:
useShadowsocks: true
shadowsocks:
serverIp: '106.186.30.188'
serverPort: 3118
localPort: 8788
method: 'aes-256-cfb'
password: 'MagicaSocks'
timeout: 600000
useHttpProxy: false
httpProxy:
httpProxyIp: '127.0.0.1'
httpProxyPort: 8099
useSocksProxy: false
socksProxy:
socksProxyIp: '127.0.0.1'
socksProxyPort: 8099
poi:
listenPort: 8787
cache:
useStorage: true
useRadical: true
useCache: false
antiCat:
retryDelay: 10000
retryTime: 500
saveDefaultConfig = ->
fs.writeFileSync configPath, JSON.stringify defaultConfig, null, 2
saveConfig = ->
fs.writeFileSync configPath, JSON.stringify exports.config, null, 2
exports.loadConfig = ->
try
exports.config = JSON.parse fs.readFileSync configPath
if exports.config.version != defaultConfig.version
throw { err: "version error" }
catch err
exports.config = defaultConfig
saveDefaultConfig()
exports.updateConfig = (conf) ->
exports.config = conf
try
saveConfig()
catch
return false
return true
| 200391 | fs = require('fs')
configPath = "#{global.appDataPath}/poi-config.json"
exports.config = defaultConfig =
version: '0.0.3'
proxy:
useShadowsocks: true
shadowsocks:
serverIp: '172.16.31.10'
serverPort: 3118
localPort: 8788
method: 'aes-256-cfb'
password: '<PASSWORD>'
timeout: 600000
useHttpProxy: false
httpProxy:
httpProxyIp: '127.0.0.1'
httpProxyPort: 8099
useSocksProxy: false
socksProxy:
socksProxyIp: '127.0.0.1'
socksProxyPort: 8099
poi:
listenPort: 8787
cache:
useStorage: true
useRadical: true
useCache: false
antiCat:
retryDelay: 10000
retryTime: 500
saveDefaultConfig = ->
fs.writeFileSync configPath, JSON.stringify defaultConfig, null, 2
saveConfig = ->
fs.writeFileSync configPath, JSON.stringify exports.config, null, 2
exports.loadConfig = ->
try
exports.config = JSON.parse fs.readFileSync configPath
if exports.config.version != defaultConfig.version
throw { err: "version error" }
catch err
exports.config = defaultConfig
saveDefaultConfig()
exports.updateConfig = (conf) ->
exports.config = conf
try
saveConfig()
catch
return false
return true
| true | fs = require('fs')
configPath = "#{global.appDataPath}/poi-config.json"
exports.config = defaultConfig =
version: '0.0.3'
proxy:
useShadowsocks: true
shadowsocks:
serverIp: 'PI:IP_ADDRESS:172.16.31.10END_PI'
serverPort: 3118
localPort: 8788
method: 'aes-256-cfb'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
timeout: 600000
useHttpProxy: false
httpProxy:
httpProxyIp: '127.0.0.1'
httpProxyPort: 8099
useSocksProxy: false
socksProxy:
socksProxyIp: '127.0.0.1'
socksProxyPort: 8099
poi:
listenPort: 8787
cache:
useStorage: true
useRadical: true
useCache: false
antiCat:
retryDelay: 10000
retryTime: 500
saveDefaultConfig = ->
fs.writeFileSync configPath, JSON.stringify defaultConfig, null, 2
saveConfig = ->
fs.writeFileSync configPath, JSON.stringify exports.config, null, 2
exports.loadConfig = ->
try
exports.config = JSON.parse fs.readFileSync configPath
if exports.config.version != defaultConfig.version
throw { err: "version error" }
catch err
exports.config = defaultConfig
saveDefaultConfig()
exports.updateConfig = (conf) ->
exports.config = conf
try
saveConfig()
catch
return false
return true
|
[
{
"context": "mfabrik GmbH\n * MIT Licence\n * https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org\n#",
"end": 166,
"score": 0.9914006590843201,
"start": 152,
"tag": "USERNAME",
"value": "programmfabrik"
},
{
"context": ", pos, defaults, limitRect\n\n\t\tfor key in [\n\t\t\t\"min_w\"\n\t\t\t\"max_w\"\n\t\t\t\"min_h\"\n\t\t\t\"max_h\"\n\t\t\t\"min_x\"\n\t\t\t\"",
"end": 16356,
"score": 0.8622862100601196,
"start": 16355,
"tag": "KEY",
"value": "w"
},
{
"context": "ing(4)\n\t\t\tmkey = key.substring(0,3)\n\t\t\tif key == \"max_x\"\n\t\t\t\tvalue -= pos.w\n\t\t\tif key == \"max_y\"\n\t\t\t\tvalu",
"end": 16745,
"score": 0.9959501624107361,
"start": 16740,
"tag": "KEY",
"value": "max_x"
},
{
"context": "f key == \"max_x\"\n\t\t\t\tvalue -= pos.w\n\t\t\tif key == \"max_y\"\n\t\t\t\tvalue -= pos.h\n\n\t\t\tdiff = pos[skey] - value\n",
"end": 16785,
"score": 0.9947320818901062,
"start": 16780,
"tag": "KEY",
"value": "max_y"
}
] | src/base/DragDropSelect/Draggable.coffee | programmfabrik/coffeescript-ui | 10 | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class CUI.Draggable extends CUI.DragDropSelect
@cls = "draggable"
initOpts: ->
super()
@addOpts
dragClass:
default: "cui-dragging"
check: String
helper:
default: "clone"
check: (v) ->
v == "clone" or CUI.util.isElement(v) or CUI.util.isFunction(v) or null
helper_contain_element:
check: (v) ->
CUI.util.isElement(v)
helper_set_pos:
check: Function
get_cursor:
check: Function
support_touch:
check: Boolean
dragend:
check: Function
dragstop:
check: Function
dragstart:
check: Function
dragging:
check: Function
create:
default: -> true
check: Function
axis:
check: ["x", "y"]
# helper_remove_always:
# check: Boolean
# helper_parent:
# default: "document"
# check: ["document", "parent"]
threshold:
default: 2
check: (v) ->
v >= 0
# ms:
# default: 0
# check: (v) ->
# # must be multiple of MouseIsDownListener.interval_ms or 0
# v % CUI.MouseIsDownListener.interval_ms == 0
selector:
check: (v) =>
CUI.util.isString(v) or CUI.util.isFunction(v)
readOpts: ->
super()
@__autoRepeatTimeout = null
if @supportTouch()
@__event_types =
start: ["mousedown", "touchstart"]
end: ["mouseup", "touchend"]
move: ["mousemove", "touchmove"]
else
@__event_types =
start: ["mousedown"]
end: ["mouseup"]
move: ["mousemove"]
@
getClass: ->
if not @_selector
"cui-draggable "+super()
else
super()
supportTouch: ->
!!@_support_touch
__killTimeout: ->
if @__autoRepeatTimeout
CUI.clearTimeout(@__autoRepeatTimeout)
@__autoRepeatTimeout = null
@
__cleanup: ->
@__killTimeout()
if @__ref
CUI.Events.ignore(instance: @__ref)
@__ref = null
if CUI.globalDrag?.instance == @
CUI.globalDrag = null
return
destroy: ->
super()
CUI.dom.remove(CUI.globalDrag?.helperNode)
@__cleanup()
@
init: ->
# console.debug "Draggable", @options.selector
CUI.util.assert(not @_helper_contain_element or CUI.dom.closest(@_element, @_helper_contain_element), "new CUI.sDraggable", "opts.helper_contain_element needs to be parent of opts.element", opts: @opts)
CUI.Events.listen
type: @__event_types.start
node: @element
# capture: true
instance: @
selector: @_selector
call: (ev) =>
if ev.getButton() > 0 and ev.getType() == "mousedown"
# ignore if not the main button
return
if CUI.globalDrag
# ignore if dragging is in progress
return
# console.debug CUI.util.getObjectClass(@), "[mousedown]", ev.getUniqueId(), @element
# hint possible click event listeners like Sidebar to
# not execute the click anymore...
#
position = CUI.util.elementGetPosition(CUI.util.getCoordinatesFromEvent(ev), ev.getTarget())
dim = CUI.dom.getDimensions(ev.getTarget())
if dim.clientWidthScaled > 0 and position.left - dim.scrollLeftScaled > dim.clientWidthScaled
console.warn("Mousedown on a vertical scrollbar, not starting drag.")
return
if dim.clientHeightScaled > 0 and position.top - dim.scrollTopScaled > dim.clientHeightScaled
console.warn("Mousedown on a horizontal scrollbar, not starting drag.")
return
target = ev.getCurrentTarget()
target_dim = CUI.dom.getDimensions(target)
if not CUI.dom.isInDOM(target) or target_dim.clientWidth == 0 or target_dim.clientHeight == 0
return
if CUI.dom.closest(ev.getTarget(), "input,textarea,select")
return
$target = target
# console.debug "attempting to start drag", ev, $target
@init_drag(ev, $target)
return
init_drag: (ev, $target) ->
if not $target
# if subclasses screw with $target, this can happen
return
CUI.globalDrag = @_create?(ev, $target)
# ev.getMousedownEvent?().preventDefault()
if CUI.globalDrag == false
# console.debug("not creating drag handle, opts.create returned 'false'.", ev, @)
return
# ev.preventDefault()
if CUI.util.isNull(CUI.globalDrag) or CUI.globalDrag == true
CUI.globalDrag = {}
CUI.util.assert(CUI.util.isPlainObject(CUI.globalDrag), "CUI.Draggable.init_drag", "returned data must be a plain object", data: CUI.globalDrag)
point = CUI.util.getCoordinatesFromEvent(ev)
position = CUI.util.elementGetPosition(point, $target)
init =
$source: $target
startEvent: ev
startCoordinates: point
instance: @
startScroll:
top: $target.scrollTop
left: $target.scrollLeft
start: position # offset to the $target
threshold: @_threshold
for k, v of init
CUI.globalDrag[k] = v
ev.stopPropagation()
# ev.preventDefault()
@before_drag(ev, $target)
@__ref = new CUI.Dummy() # instance to easily remove events
dragover_count = 0
moveEvent = null
dragover_scroll = =>
# during a dragover scroll, the original target
# might be not available any more, we need to recalculate it
pointTarget = moveEvent.getPointTarget() or moveEvent.getTarget()
CUI.Events.trigger
type: "dragover-scroll"
node: pointTarget
count: dragover_count
originalEvent: moveEvent
dragover_count = dragover_count + 1
@__killTimeout()
@__autoRepeatTimeout = CUI.setTimeout
ms: 100
track: false
call: dragover_scroll
CUI.Events.listen
node: document
type: @__event_types.move
instance: @__ref
call: (ev) =>
if not CUI.globalDrag
return
# this prevents chrome from focussing element while
# we drag
ev.preventDefault()
$target = ev.getTarget()
if not $target
return
if CUI.globalDrag.ended
return
coordinates = CUI.util.getCoordinatesFromEvent(ev)
diff =
x: coordinates.pageX - CUI.globalDrag.startCoordinates.pageX
y: coordinates.pageY - CUI.globalDrag.startCoordinates.pageY
eventPoint: coordinates
switch @get_axis()
when "x"
diff.y = 0
when "y"
diff.x = 0
diff.bare_x = diff.x
diff.bare_y = diff.y
diff.x += CUI.globalDrag.$source.scrollLeft - CUI.globalDrag.startScroll.left
diff.y += CUI.globalDrag.$source.scrollTop - CUI.globalDrag.startScroll.top
if Math.abs(diff.x) >= CUI.globalDrag.threshold or
Math.abs(diff.y) >= CUI.globalDrag.threshold or
CUI.globalDrag.dragStarted
CUI.globalDrag.dragDiff = diff
if not CUI.globalDrag.dragStarted
CUI.globalDrag.startEvent.preventDefault()
@__startDrag(ev, $target, diff)
if @_get_cursor
document.body.setAttribute("data-cursor", @_get_cursor(CUI.globalDrag))
else
document.body.setAttribute("data-cursor", @getCursor())
moveEvent = ev
dragover_scroll()
@do_drag(ev, $target, diff)
@_dragging?(ev, CUI.globalDrag, diff)
return
# Stop is used by ESC button to stop the dragging.
end_drag = (ev, stop = false) =>
start_target = CUI.globalDrag.$source
start_target_parents = CUI.dom.parents(start_target)
CUI.globalDrag.ended = true
document.body.removeAttribute("data-cursor")
if stop
CUI.globalDrag.stopped = true
@stop_drag(ev)
@_dragstop?(ev, CUI.globalDrag, @)
else
@end_drag(ev)
@_dragend?(ev, CUI.globalDrag, @)
if @isDestroyed()
# this can happen if any of the
# callbacks cleanup / reload
return
noClickKill = CUI.globalDrag.noClickKill
@__cleanup()
if noClickKill
return
has_same_parents = =>
parents_now = CUI.dom.parents(start_target)
for p, idx in start_target_parents
if parents_now[idx] != p
return false
return true
if not has_same_parents or not CUI.dom.isInDOM(ev.getTarget())
return
CUI.Events.listen
type: "click"
capture: true
only_once: true
node: window
call: (ev) ->
# console.error "Killing click after drag", ev.getTarget()
return ev.stop()
return
CUI.Events.listen
node: document
type: ["keyup"]
capture: true
instance: @__ref
call: (ev) =>
if not CUI.globalDrag.dragStarted
@__cleanup()
return
if ev.keyCode() == 27
# console.error "stopped.."
end_drag(ev, true)
return ev.stop()
return
CUI.Events.listen
node: document
type: @__event_types.end
capture: true
instance: @__ref
call: (ev) =>
# console.debug "event received: ", ev.getType()
# console.debug "draggable", ev.type
if not CUI.globalDrag
return
if not CUI.globalDrag.dragStarted
@__cleanup()
return
end_drag(ev)
return ev.stop()
# console.debug "mouseup, resetting drag stuff"
#
return
getCursor: ->
"grabbing"
__startDrag: (ev, $target, diff) ->
# It's ok to stop the events here, the "mouseup" and "keyup"
# we need to end the drag are initialized before in init drag,
# so they are executed before
# console.debug "start drag", diff
@_dragstart?(ev, CUI.globalDrag)
@init_helper(ev, $target, diff)
CUI.dom.addClass(CUI.globalDrag.$source, @_dragClass)
@start_drag(ev, $target, diff)
CUI.globalDrag.dragStarted = true
# call after first mousedown
before_drag: ->
start_drag: (ev, $target, diff) ->
# do drag
# first call
do_drag: (ev, $target, diff) ->
# position helper
@position_helper(ev, $target, diff)
if CUI.globalDrag.dragoverTarget and CUI.globalDrag.dragoverTarget != $target
CUI.Events.trigger
type: "cui-dragleave"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
if not CUI.globalDrag.dragoverTarget
CUI.globalDrag.dragoverTarget = $target
# console.debug "target:", $target
CUI.Events.trigger
type: "cui-dragenter"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
# trigger our own dragover event on the correct target
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragover"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
return
cleanup_drag: (ev) ->
if @isDestroyed()
return
CUI.dom.removeClass(CUI.globalDrag.$source, @_dragClass)
CUI.dom.remove(CUI.globalDrag.helperNode)
stop_drag: (ev) ->
@__finish_drag(ev)
@cleanup_drag(ev)
__finish_drag: (ev) ->
if not CUI.globalDrag.dragoverTarget
return
# console.debug "sending pf_dragleave", CUI.globalDrag.dragoverTarget
# console.debug "pf_dragleave.event", CUI.globalDrag.dragoverTarget
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragleave"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
if not CUI.globalDrag.stopped
# console.error "cui-drop", ev
CUI.Events.trigger
type: "cui-drop"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragend"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
@
end_drag: (ev) ->
# console.debug CUI.globalDrag.dragoverTarget, ev.getType(), ev
if @isDestroyed()
return
@__finish_drag(ev)
@cleanup_drag(ev)
@
get_helper_pos: (ev, gd, diff) ->
top: CUI.globalDrag.helperNodeStart.top + diff.y
left: CUI.globalDrag.helperNodeStart.left + diff.x
width: CUI.globalDrag.helperNodeStart.width
height: CUI.globalDrag.helperNodeStart.height
get_helper_contain_element: ->
@_helper_contain_element
position_helper: (ev, $target, diff) ->
# console.debug "position helper", CUI.globalDrag.helperNodeStart, ev, $target, diff
if not CUI.globalDrag.helperNode
return
helper_pos = @get_helper_pos(ev, CUI.globalDrag, diff)
pos =
x: helper_pos.left
y: helper_pos.top
w: helper_pos.width
h: helper_pos.height
helper_contain_element = @get_helper_contain_element(ev, $target, diff)
if helper_contain_element
dim_contain = CUI.dom.getDimensions(helper_contain_element)
if dim_contain.clientWidth == 0 or dim_contain.clientHeight == 0
console.warn('Draggable[position_helper]: Containing element has no dimensions.', helper_contain_element);
# pos is changed in place
CUI.Draggable.limitRect pos,
min_x: dim_contain.viewportLeft + dim_contain.borderLeftWidth
max_x: dim_contain.viewportRight - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.viewportTop + dim_contain.borderTopWidth
max_y: dim_contain.viewportBottom - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
else
dim_contain = CUI.globalDrag.helperNodeStart.body_dim
CUI.Draggable.limitRect pos,
min_x: dim_contain.borderLeftWidth
max_x: dim_contain.scrollWidth - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.borderTopWidth
max_y: dim_contain.scrollHeight - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
# console.debug "limitRect", CUI.util.dump(pos), dim_contain
helper_pos.top = pos.y
helper_pos.left = pos.x
helper_pos.dragDiff =
x: helper_pos.left - CUI.globalDrag.helperNodeStart.left
y: helper_pos.top - CUI.globalDrag.helperNodeStart.top
if helper_pos.width != CUI.globalDrag.helperNodeStart.width
new_width = helper_pos.width
if helper_pos.height != CUI.globalDrag.helperNodeStart.height
new_height = helper_pos.height
CUI.dom.setStyle CUI.globalDrag.helperNode,
transform: CUI.globalDrag.helperNodeStart.transform+" translateX("+helper_pos.dragDiff.x+"px) translateY("+helper_pos.dragDiff.y+"px)"
CUI.dom.setDimensions CUI.globalDrag.helperNode,
borderBoxWidth: new_width
borderBoxHeight: new_height
CUI.globalDrag.helperPos = helper_pos
return
getCloneSourceForHelper: ->
CUI.globalDrag.$source
get_axis: ->
@_axis
get_helper: (ev, gd, diff) ->
@_helper
get_init_helper_pos: (node, gd, offset = top: 0, left: 0) ->
top: gd.startCoordinates.pageY - offset.top
left: gd.startCoordinates.pageX - offset.left
init_helper: (ev, $target, diff) ->
helper = @get_helper(ev, CUI.globalDrag, diff)
if not helper
return
if helper == "clone"
clone_source = @getCloneSourceForHelper()
hn = clone_source.cloneNode(true)
hn.classList.remove("cui-selected")
# offset the layer to the click
offset =
top: CUI.globalDrag.start.top
left: CUI.globalDrag.start.left
else if CUI.util.isFunction(helper)
hn = CUI.globalDrag.helperNode = helper(CUI.globalDrag)
set_dim = null
else
hn = CUI.globalDrag.helperNode = helper
if not hn
return
CUI.globalDrag.helperNode = hn
CUI.dom.addClass(hn, "cui-drag-drop-select-helper")
document.body.appendChild(hn)
start = @get_init_helper_pos(hn, CUI.globalDrag, offset)
CUI.dom.setStyle(hn, start)
if helper == "clone"
# set width & height
set_dim = CUI.dom.getDimensions(clone_source)
# console.error "measureing clone", set_dim.marginBoxWidth, CUI.globalDrag.$source, dim
CUI.dom.setDimensions hn,
marginBoxWidth: set_dim.marginBoxWidth
marginBoxHeight: set_dim.marginBoxHeight
dim = CUI.dom.getDimensions(hn)
start.width = dim.borderBoxWidth
start.height = dim.borderBoxHeight
start.marginTop = dim.marginTop
start.marginLeft = dim.marginLeft
start.marginVertical = dim.marginVertical
start.marginHorizontal = dim.marginHorizontal
start.transform = dim.computedStyle.transform
if start.transform == 'none'
start.transform = ''
start.body_dim = CUI.dom.getDimensions(document.body)
CUI.globalDrag.helperNodeStart = start
# keep pos inside certain constraints
# pos.fix is an Array containing any of "n","w","e","s"
# limitRect: min_w, min_h, max_w, max_h, min_x, max_x, min_y, max_y
# !!! The order of the parameters is how we want them, in Movable it
# is different for compability reasons
@limitRect: (pos, limitRect, defaults={}) ->
pos.fix = pos.fix or []
for k, v of defaults
if CUI.util.isUndef(pos[k])
pos[k] = v
# console.debug "limitRect", pos, defaults, limitRect
for key in [
"min_w"
"max_w"
"min_h"
"max_h"
"min_x"
"max_x"
"min_y"
"max_y"
]
value = limitRect[key]
if CUI.util.isUndef(value)
continue
CUI.util.assert(not isNaN(value), "#{CUI.util.getObjectClass(@)}.limitRect", "key #{key} in pos isNaN", pos: pos, defaults: defaults, limitRect: limitRect)
skey = key.substring(4)
mkey = key.substring(0,3)
if key == "max_x"
value -= pos.w
if key == "max_y"
value -= pos.h
diff = pos[skey] - value
if mkey == "min"
if diff >= 0
continue
if mkey == "max"
if diff <= 0
continue
if skey == "y" and "n" in pos.fix
pos.h -= diff
continue
if skey == "x" and "w" in pos.fix
pos.w -= diff
continue
# console.debug "correcting #{skey} by #{diff} from #{pos[skey]}"
pos[skey]-=diff
if skey == "h" and "s" in pos.fix
# console.debug "FIX y"
pos.y += diff
if skey == "w" and "e" in pos.fix
# console.debug "FIX x"
pos.x += diff
if skey == "x" and "e" in pos.fix
# console.debug "FIX w"
pos.w += diff
if skey == "y" and "s" in pos.fix
# console.debug "FIX h"
pos.h += diff
# console.debug "limitRect AFTER", pos, diff
return pos
| 100652 | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class CUI.Draggable extends CUI.DragDropSelect
@cls = "draggable"
initOpts: ->
super()
@addOpts
dragClass:
default: "cui-dragging"
check: String
helper:
default: "clone"
check: (v) ->
v == "clone" or CUI.util.isElement(v) or CUI.util.isFunction(v) or null
helper_contain_element:
check: (v) ->
CUI.util.isElement(v)
helper_set_pos:
check: Function
get_cursor:
check: Function
support_touch:
check: Boolean
dragend:
check: Function
dragstop:
check: Function
dragstart:
check: Function
dragging:
check: Function
create:
default: -> true
check: Function
axis:
check: ["x", "y"]
# helper_remove_always:
# check: Boolean
# helper_parent:
# default: "document"
# check: ["document", "parent"]
threshold:
default: 2
check: (v) ->
v >= 0
# ms:
# default: 0
# check: (v) ->
# # must be multiple of MouseIsDownListener.interval_ms or 0
# v % CUI.MouseIsDownListener.interval_ms == 0
selector:
check: (v) =>
CUI.util.isString(v) or CUI.util.isFunction(v)
readOpts: ->
super()
@__autoRepeatTimeout = null
if @supportTouch()
@__event_types =
start: ["mousedown", "touchstart"]
end: ["mouseup", "touchend"]
move: ["mousemove", "touchmove"]
else
@__event_types =
start: ["mousedown"]
end: ["mouseup"]
move: ["mousemove"]
@
getClass: ->
if not @_selector
"cui-draggable "+super()
else
super()
supportTouch: ->
!!@_support_touch
__killTimeout: ->
if @__autoRepeatTimeout
CUI.clearTimeout(@__autoRepeatTimeout)
@__autoRepeatTimeout = null
@
__cleanup: ->
@__killTimeout()
if @__ref
CUI.Events.ignore(instance: @__ref)
@__ref = null
if CUI.globalDrag?.instance == @
CUI.globalDrag = null
return
destroy: ->
super()
CUI.dom.remove(CUI.globalDrag?.helperNode)
@__cleanup()
@
init: ->
# console.debug "Draggable", @options.selector
CUI.util.assert(not @_helper_contain_element or CUI.dom.closest(@_element, @_helper_contain_element), "new CUI.sDraggable", "opts.helper_contain_element needs to be parent of opts.element", opts: @opts)
CUI.Events.listen
type: @__event_types.start
node: @element
# capture: true
instance: @
selector: @_selector
call: (ev) =>
if ev.getButton() > 0 and ev.getType() == "mousedown"
# ignore if not the main button
return
if CUI.globalDrag
# ignore if dragging is in progress
return
# console.debug CUI.util.getObjectClass(@), "[mousedown]", ev.getUniqueId(), @element
# hint possible click event listeners like Sidebar to
# not execute the click anymore...
#
position = CUI.util.elementGetPosition(CUI.util.getCoordinatesFromEvent(ev), ev.getTarget())
dim = CUI.dom.getDimensions(ev.getTarget())
if dim.clientWidthScaled > 0 and position.left - dim.scrollLeftScaled > dim.clientWidthScaled
console.warn("Mousedown on a vertical scrollbar, not starting drag.")
return
if dim.clientHeightScaled > 0 and position.top - dim.scrollTopScaled > dim.clientHeightScaled
console.warn("Mousedown on a horizontal scrollbar, not starting drag.")
return
target = ev.getCurrentTarget()
target_dim = CUI.dom.getDimensions(target)
if not CUI.dom.isInDOM(target) or target_dim.clientWidth == 0 or target_dim.clientHeight == 0
return
if CUI.dom.closest(ev.getTarget(), "input,textarea,select")
return
$target = target
# console.debug "attempting to start drag", ev, $target
@init_drag(ev, $target)
return
init_drag: (ev, $target) ->
if not $target
# if subclasses screw with $target, this can happen
return
CUI.globalDrag = @_create?(ev, $target)
# ev.getMousedownEvent?().preventDefault()
if CUI.globalDrag == false
# console.debug("not creating drag handle, opts.create returned 'false'.", ev, @)
return
# ev.preventDefault()
if CUI.util.isNull(CUI.globalDrag) or CUI.globalDrag == true
CUI.globalDrag = {}
CUI.util.assert(CUI.util.isPlainObject(CUI.globalDrag), "CUI.Draggable.init_drag", "returned data must be a plain object", data: CUI.globalDrag)
point = CUI.util.getCoordinatesFromEvent(ev)
position = CUI.util.elementGetPosition(point, $target)
init =
$source: $target
startEvent: ev
startCoordinates: point
instance: @
startScroll:
top: $target.scrollTop
left: $target.scrollLeft
start: position # offset to the $target
threshold: @_threshold
for k, v of init
CUI.globalDrag[k] = v
ev.stopPropagation()
# ev.preventDefault()
@before_drag(ev, $target)
@__ref = new CUI.Dummy() # instance to easily remove events
dragover_count = 0
moveEvent = null
dragover_scroll = =>
# during a dragover scroll, the original target
# might be not available any more, we need to recalculate it
pointTarget = moveEvent.getPointTarget() or moveEvent.getTarget()
CUI.Events.trigger
type: "dragover-scroll"
node: pointTarget
count: dragover_count
originalEvent: moveEvent
dragover_count = dragover_count + 1
@__killTimeout()
@__autoRepeatTimeout = CUI.setTimeout
ms: 100
track: false
call: dragover_scroll
CUI.Events.listen
node: document
type: @__event_types.move
instance: @__ref
call: (ev) =>
if not CUI.globalDrag
return
# this prevents chrome from focussing element while
# we drag
ev.preventDefault()
$target = ev.getTarget()
if not $target
return
if CUI.globalDrag.ended
return
coordinates = CUI.util.getCoordinatesFromEvent(ev)
diff =
x: coordinates.pageX - CUI.globalDrag.startCoordinates.pageX
y: coordinates.pageY - CUI.globalDrag.startCoordinates.pageY
eventPoint: coordinates
switch @get_axis()
when "x"
diff.y = 0
when "y"
diff.x = 0
diff.bare_x = diff.x
diff.bare_y = diff.y
diff.x += CUI.globalDrag.$source.scrollLeft - CUI.globalDrag.startScroll.left
diff.y += CUI.globalDrag.$source.scrollTop - CUI.globalDrag.startScroll.top
if Math.abs(diff.x) >= CUI.globalDrag.threshold or
Math.abs(diff.y) >= CUI.globalDrag.threshold or
CUI.globalDrag.dragStarted
CUI.globalDrag.dragDiff = diff
if not CUI.globalDrag.dragStarted
CUI.globalDrag.startEvent.preventDefault()
@__startDrag(ev, $target, diff)
if @_get_cursor
document.body.setAttribute("data-cursor", @_get_cursor(CUI.globalDrag))
else
document.body.setAttribute("data-cursor", @getCursor())
moveEvent = ev
dragover_scroll()
@do_drag(ev, $target, diff)
@_dragging?(ev, CUI.globalDrag, diff)
return
# Stop is used by ESC button to stop the dragging.
end_drag = (ev, stop = false) =>
start_target = CUI.globalDrag.$source
start_target_parents = CUI.dom.parents(start_target)
CUI.globalDrag.ended = true
document.body.removeAttribute("data-cursor")
if stop
CUI.globalDrag.stopped = true
@stop_drag(ev)
@_dragstop?(ev, CUI.globalDrag, @)
else
@end_drag(ev)
@_dragend?(ev, CUI.globalDrag, @)
if @isDestroyed()
# this can happen if any of the
# callbacks cleanup / reload
return
noClickKill = CUI.globalDrag.noClickKill
@__cleanup()
if noClickKill
return
has_same_parents = =>
parents_now = CUI.dom.parents(start_target)
for p, idx in start_target_parents
if parents_now[idx] != p
return false
return true
if not has_same_parents or not CUI.dom.isInDOM(ev.getTarget())
return
CUI.Events.listen
type: "click"
capture: true
only_once: true
node: window
call: (ev) ->
# console.error "Killing click after drag", ev.getTarget()
return ev.stop()
return
CUI.Events.listen
node: document
type: ["keyup"]
capture: true
instance: @__ref
call: (ev) =>
if not CUI.globalDrag.dragStarted
@__cleanup()
return
if ev.keyCode() == 27
# console.error "stopped.."
end_drag(ev, true)
return ev.stop()
return
CUI.Events.listen
node: document
type: @__event_types.end
capture: true
instance: @__ref
call: (ev) =>
# console.debug "event received: ", ev.getType()
# console.debug "draggable", ev.type
if not CUI.globalDrag
return
if not CUI.globalDrag.dragStarted
@__cleanup()
return
end_drag(ev)
return ev.stop()
# console.debug "mouseup, resetting drag stuff"
#
return
getCursor: ->
"grabbing"
__startDrag: (ev, $target, diff) ->
# It's ok to stop the events here, the "mouseup" and "keyup"
# we need to end the drag are initialized before in init drag,
# so they are executed before
# console.debug "start drag", diff
@_dragstart?(ev, CUI.globalDrag)
@init_helper(ev, $target, diff)
CUI.dom.addClass(CUI.globalDrag.$source, @_dragClass)
@start_drag(ev, $target, diff)
CUI.globalDrag.dragStarted = true
# call after first mousedown
before_drag: ->
start_drag: (ev, $target, diff) ->
# do drag
# first call
do_drag: (ev, $target, diff) ->
# position helper
@position_helper(ev, $target, diff)
if CUI.globalDrag.dragoverTarget and CUI.globalDrag.dragoverTarget != $target
CUI.Events.trigger
type: "cui-dragleave"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
if not CUI.globalDrag.dragoverTarget
CUI.globalDrag.dragoverTarget = $target
# console.debug "target:", $target
CUI.Events.trigger
type: "cui-dragenter"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
# trigger our own dragover event on the correct target
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragover"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
return
cleanup_drag: (ev) ->
if @isDestroyed()
return
CUI.dom.removeClass(CUI.globalDrag.$source, @_dragClass)
CUI.dom.remove(CUI.globalDrag.helperNode)
stop_drag: (ev) ->
@__finish_drag(ev)
@cleanup_drag(ev)
__finish_drag: (ev) ->
if not CUI.globalDrag.dragoverTarget
return
# console.debug "sending pf_dragleave", CUI.globalDrag.dragoverTarget
# console.debug "pf_dragleave.event", CUI.globalDrag.dragoverTarget
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragleave"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
if not CUI.globalDrag.stopped
# console.error "cui-drop", ev
CUI.Events.trigger
type: "cui-drop"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragend"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
@
end_drag: (ev) ->
# console.debug CUI.globalDrag.dragoverTarget, ev.getType(), ev
if @isDestroyed()
return
@__finish_drag(ev)
@cleanup_drag(ev)
@
get_helper_pos: (ev, gd, diff) ->
top: CUI.globalDrag.helperNodeStart.top + diff.y
left: CUI.globalDrag.helperNodeStart.left + diff.x
width: CUI.globalDrag.helperNodeStart.width
height: CUI.globalDrag.helperNodeStart.height
get_helper_contain_element: ->
@_helper_contain_element
position_helper: (ev, $target, diff) ->
# console.debug "position helper", CUI.globalDrag.helperNodeStart, ev, $target, diff
if not CUI.globalDrag.helperNode
return
helper_pos = @get_helper_pos(ev, CUI.globalDrag, diff)
pos =
x: helper_pos.left
y: helper_pos.top
w: helper_pos.width
h: helper_pos.height
helper_contain_element = @get_helper_contain_element(ev, $target, diff)
if helper_contain_element
dim_contain = CUI.dom.getDimensions(helper_contain_element)
if dim_contain.clientWidth == 0 or dim_contain.clientHeight == 0
console.warn('Draggable[position_helper]: Containing element has no dimensions.', helper_contain_element);
# pos is changed in place
CUI.Draggable.limitRect pos,
min_x: dim_contain.viewportLeft + dim_contain.borderLeftWidth
max_x: dim_contain.viewportRight - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.viewportTop + dim_contain.borderTopWidth
max_y: dim_contain.viewportBottom - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
else
dim_contain = CUI.globalDrag.helperNodeStart.body_dim
CUI.Draggable.limitRect pos,
min_x: dim_contain.borderLeftWidth
max_x: dim_contain.scrollWidth - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.borderTopWidth
max_y: dim_contain.scrollHeight - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
# console.debug "limitRect", CUI.util.dump(pos), dim_contain
helper_pos.top = pos.y
helper_pos.left = pos.x
helper_pos.dragDiff =
x: helper_pos.left - CUI.globalDrag.helperNodeStart.left
y: helper_pos.top - CUI.globalDrag.helperNodeStart.top
if helper_pos.width != CUI.globalDrag.helperNodeStart.width
new_width = helper_pos.width
if helper_pos.height != CUI.globalDrag.helperNodeStart.height
new_height = helper_pos.height
CUI.dom.setStyle CUI.globalDrag.helperNode,
transform: CUI.globalDrag.helperNodeStart.transform+" translateX("+helper_pos.dragDiff.x+"px) translateY("+helper_pos.dragDiff.y+"px)"
CUI.dom.setDimensions CUI.globalDrag.helperNode,
borderBoxWidth: new_width
borderBoxHeight: new_height
CUI.globalDrag.helperPos = helper_pos
return
getCloneSourceForHelper: ->
CUI.globalDrag.$source
get_axis: ->
@_axis
get_helper: (ev, gd, diff) ->
@_helper
get_init_helper_pos: (node, gd, offset = top: 0, left: 0) ->
top: gd.startCoordinates.pageY - offset.top
left: gd.startCoordinates.pageX - offset.left
init_helper: (ev, $target, diff) ->
helper = @get_helper(ev, CUI.globalDrag, diff)
if not helper
return
if helper == "clone"
clone_source = @getCloneSourceForHelper()
hn = clone_source.cloneNode(true)
hn.classList.remove("cui-selected")
# offset the layer to the click
offset =
top: CUI.globalDrag.start.top
left: CUI.globalDrag.start.left
else if CUI.util.isFunction(helper)
hn = CUI.globalDrag.helperNode = helper(CUI.globalDrag)
set_dim = null
else
hn = CUI.globalDrag.helperNode = helper
if not hn
return
CUI.globalDrag.helperNode = hn
CUI.dom.addClass(hn, "cui-drag-drop-select-helper")
document.body.appendChild(hn)
start = @get_init_helper_pos(hn, CUI.globalDrag, offset)
CUI.dom.setStyle(hn, start)
if helper == "clone"
# set width & height
set_dim = CUI.dom.getDimensions(clone_source)
# console.error "measureing clone", set_dim.marginBoxWidth, CUI.globalDrag.$source, dim
CUI.dom.setDimensions hn,
marginBoxWidth: set_dim.marginBoxWidth
marginBoxHeight: set_dim.marginBoxHeight
dim = CUI.dom.getDimensions(hn)
start.width = dim.borderBoxWidth
start.height = dim.borderBoxHeight
start.marginTop = dim.marginTop
start.marginLeft = dim.marginLeft
start.marginVertical = dim.marginVertical
start.marginHorizontal = dim.marginHorizontal
start.transform = dim.computedStyle.transform
if start.transform == 'none'
start.transform = ''
start.body_dim = CUI.dom.getDimensions(document.body)
CUI.globalDrag.helperNodeStart = start
# keep pos inside certain constraints
# pos.fix is an Array containing any of "n","w","e","s"
# limitRect: min_w, min_h, max_w, max_h, min_x, max_x, min_y, max_y
# !!! The order of the parameters is how we want them, in Movable it
# is different for compability reasons
@limitRect: (pos, limitRect, defaults={}) ->
pos.fix = pos.fix or []
for k, v of defaults
if CUI.util.isUndef(pos[k])
pos[k] = v
# console.debug "limitRect", pos, defaults, limitRect
for key in [
"min_<KEY>"
"max_w"
"min_h"
"max_h"
"min_x"
"max_x"
"min_y"
"max_y"
]
value = limitRect[key]
if CUI.util.isUndef(value)
continue
CUI.util.assert(not isNaN(value), "#{CUI.util.getObjectClass(@)}.limitRect", "key #{key} in pos isNaN", pos: pos, defaults: defaults, limitRect: limitRect)
skey = key.substring(4)
mkey = key.substring(0,3)
if key == "<KEY>"
value -= pos.w
if key == "<KEY>"
value -= pos.h
diff = pos[skey] - value
if mkey == "min"
if diff >= 0
continue
if mkey == "max"
if diff <= 0
continue
if skey == "y" and "n" in pos.fix
pos.h -= diff
continue
if skey == "x" and "w" in pos.fix
pos.w -= diff
continue
# console.debug "correcting #{skey} by #{diff} from #{pos[skey]}"
pos[skey]-=diff
if skey == "h" and "s" in pos.fix
# console.debug "FIX y"
pos.y += diff
if skey == "w" and "e" in pos.fix
# console.debug "FIX x"
pos.x += diff
if skey == "x" and "e" in pos.fix
# console.debug "FIX w"
pos.w += diff
if skey == "y" and "s" in pos.fix
# console.debug "FIX h"
pos.h += diff
# console.debug "limitRect AFTER", pos, diff
return pos
| true | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class CUI.Draggable extends CUI.DragDropSelect
@cls = "draggable"
initOpts: ->
super()
@addOpts
dragClass:
default: "cui-dragging"
check: String
helper:
default: "clone"
check: (v) ->
v == "clone" or CUI.util.isElement(v) or CUI.util.isFunction(v) or null
helper_contain_element:
check: (v) ->
CUI.util.isElement(v)
helper_set_pos:
check: Function
get_cursor:
check: Function
support_touch:
check: Boolean
dragend:
check: Function
dragstop:
check: Function
dragstart:
check: Function
dragging:
check: Function
create:
default: -> true
check: Function
axis:
check: ["x", "y"]
# helper_remove_always:
# check: Boolean
# helper_parent:
# default: "document"
# check: ["document", "parent"]
threshold:
default: 2
check: (v) ->
v >= 0
# ms:
# default: 0
# check: (v) ->
# # must be multiple of MouseIsDownListener.interval_ms or 0
# v % CUI.MouseIsDownListener.interval_ms == 0
selector:
check: (v) =>
CUI.util.isString(v) or CUI.util.isFunction(v)
readOpts: ->
super()
@__autoRepeatTimeout = null
if @supportTouch()
@__event_types =
start: ["mousedown", "touchstart"]
end: ["mouseup", "touchend"]
move: ["mousemove", "touchmove"]
else
@__event_types =
start: ["mousedown"]
end: ["mouseup"]
move: ["mousemove"]
@
getClass: ->
if not @_selector
"cui-draggable "+super()
else
super()
supportTouch: ->
!!@_support_touch
__killTimeout: ->
if @__autoRepeatTimeout
CUI.clearTimeout(@__autoRepeatTimeout)
@__autoRepeatTimeout = null
@
__cleanup: ->
@__killTimeout()
if @__ref
CUI.Events.ignore(instance: @__ref)
@__ref = null
if CUI.globalDrag?.instance == @
CUI.globalDrag = null
return
destroy: ->
super()
CUI.dom.remove(CUI.globalDrag?.helperNode)
@__cleanup()
@
init: ->
# console.debug "Draggable", @options.selector
CUI.util.assert(not @_helper_contain_element or CUI.dom.closest(@_element, @_helper_contain_element), "new CUI.sDraggable", "opts.helper_contain_element needs to be parent of opts.element", opts: @opts)
CUI.Events.listen
type: @__event_types.start
node: @element
# capture: true
instance: @
selector: @_selector
call: (ev) =>
if ev.getButton() > 0 and ev.getType() == "mousedown"
# ignore if not the main button
return
if CUI.globalDrag
# ignore if dragging is in progress
return
# console.debug CUI.util.getObjectClass(@), "[mousedown]", ev.getUniqueId(), @element
# hint possible click event listeners like Sidebar to
# not execute the click anymore...
#
position = CUI.util.elementGetPosition(CUI.util.getCoordinatesFromEvent(ev), ev.getTarget())
dim = CUI.dom.getDimensions(ev.getTarget())
if dim.clientWidthScaled > 0 and position.left - dim.scrollLeftScaled > dim.clientWidthScaled
console.warn("Mousedown on a vertical scrollbar, not starting drag.")
return
if dim.clientHeightScaled > 0 and position.top - dim.scrollTopScaled > dim.clientHeightScaled
console.warn("Mousedown on a horizontal scrollbar, not starting drag.")
return
target = ev.getCurrentTarget()
target_dim = CUI.dom.getDimensions(target)
if not CUI.dom.isInDOM(target) or target_dim.clientWidth == 0 or target_dim.clientHeight == 0
return
if CUI.dom.closest(ev.getTarget(), "input,textarea,select")
return
$target = target
# console.debug "attempting to start drag", ev, $target
@init_drag(ev, $target)
return
init_drag: (ev, $target) ->
if not $target
# if subclasses screw with $target, this can happen
return
CUI.globalDrag = @_create?(ev, $target)
# ev.getMousedownEvent?().preventDefault()
if CUI.globalDrag == false
# console.debug("not creating drag handle, opts.create returned 'false'.", ev, @)
return
# ev.preventDefault()
if CUI.util.isNull(CUI.globalDrag) or CUI.globalDrag == true
CUI.globalDrag = {}
CUI.util.assert(CUI.util.isPlainObject(CUI.globalDrag), "CUI.Draggable.init_drag", "returned data must be a plain object", data: CUI.globalDrag)
point = CUI.util.getCoordinatesFromEvent(ev)
position = CUI.util.elementGetPosition(point, $target)
init =
$source: $target
startEvent: ev
startCoordinates: point
instance: @
startScroll:
top: $target.scrollTop
left: $target.scrollLeft
start: position # offset to the $target
threshold: @_threshold
for k, v of init
CUI.globalDrag[k] = v
ev.stopPropagation()
# ev.preventDefault()
@before_drag(ev, $target)
@__ref = new CUI.Dummy() # instance to easily remove events
dragover_count = 0
moveEvent = null
dragover_scroll = =>
# during a dragover scroll, the original target
# might be not available any more, we need to recalculate it
pointTarget = moveEvent.getPointTarget() or moveEvent.getTarget()
CUI.Events.trigger
type: "dragover-scroll"
node: pointTarget
count: dragover_count
originalEvent: moveEvent
dragover_count = dragover_count + 1
@__killTimeout()
@__autoRepeatTimeout = CUI.setTimeout
ms: 100
track: false
call: dragover_scroll
CUI.Events.listen
node: document
type: @__event_types.move
instance: @__ref
call: (ev) =>
if not CUI.globalDrag
return
# this prevents chrome from focussing element while
# we drag
ev.preventDefault()
$target = ev.getTarget()
if not $target
return
if CUI.globalDrag.ended
return
coordinates = CUI.util.getCoordinatesFromEvent(ev)
diff =
x: coordinates.pageX - CUI.globalDrag.startCoordinates.pageX
y: coordinates.pageY - CUI.globalDrag.startCoordinates.pageY
eventPoint: coordinates
switch @get_axis()
when "x"
diff.y = 0
when "y"
diff.x = 0
diff.bare_x = diff.x
diff.bare_y = diff.y
diff.x += CUI.globalDrag.$source.scrollLeft - CUI.globalDrag.startScroll.left
diff.y += CUI.globalDrag.$source.scrollTop - CUI.globalDrag.startScroll.top
if Math.abs(diff.x) >= CUI.globalDrag.threshold or
Math.abs(diff.y) >= CUI.globalDrag.threshold or
CUI.globalDrag.dragStarted
CUI.globalDrag.dragDiff = diff
if not CUI.globalDrag.dragStarted
CUI.globalDrag.startEvent.preventDefault()
@__startDrag(ev, $target, diff)
if @_get_cursor
document.body.setAttribute("data-cursor", @_get_cursor(CUI.globalDrag))
else
document.body.setAttribute("data-cursor", @getCursor())
moveEvent = ev
dragover_scroll()
@do_drag(ev, $target, diff)
@_dragging?(ev, CUI.globalDrag, diff)
return
# Stop is used by ESC button to stop the dragging.
end_drag = (ev, stop = false) =>
start_target = CUI.globalDrag.$source
start_target_parents = CUI.dom.parents(start_target)
CUI.globalDrag.ended = true
document.body.removeAttribute("data-cursor")
if stop
CUI.globalDrag.stopped = true
@stop_drag(ev)
@_dragstop?(ev, CUI.globalDrag, @)
else
@end_drag(ev)
@_dragend?(ev, CUI.globalDrag, @)
if @isDestroyed()
# this can happen if any of the
# callbacks cleanup / reload
return
noClickKill = CUI.globalDrag.noClickKill
@__cleanup()
if noClickKill
return
has_same_parents = =>
parents_now = CUI.dom.parents(start_target)
for p, idx in start_target_parents
if parents_now[idx] != p
return false
return true
if not has_same_parents or not CUI.dom.isInDOM(ev.getTarget())
return
CUI.Events.listen
type: "click"
capture: true
only_once: true
node: window
call: (ev) ->
# console.error "Killing click after drag", ev.getTarget()
return ev.stop()
return
CUI.Events.listen
node: document
type: ["keyup"]
capture: true
instance: @__ref
call: (ev) =>
if not CUI.globalDrag.dragStarted
@__cleanup()
return
if ev.keyCode() == 27
# console.error "stopped.."
end_drag(ev, true)
return ev.stop()
return
CUI.Events.listen
node: document
type: @__event_types.end
capture: true
instance: @__ref
call: (ev) =>
# console.debug "event received: ", ev.getType()
# console.debug "draggable", ev.type
if not CUI.globalDrag
return
if not CUI.globalDrag.dragStarted
@__cleanup()
return
end_drag(ev)
return ev.stop()
# console.debug "mouseup, resetting drag stuff"
#
return
getCursor: ->
"grabbing"
__startDrag: (ev, $target, diff) ->
# It's ok to stop the events here, the "mouseup" and "keyup"
# we need to end the drag are initialized before in init drag,
# so they are executed before
# console.debug "start drag", diff
@_dragstart?(ev, CUI.globalDrag)
@init_helper(ev, $target, diff)
CUI.dom.addClass(CUI.globalDrag.$source, @_dragClass)
@start_drag(ev, $target, diff)
CUI.globalDrag.dragStarted = true
# call after first mousedown
before_drag: ->
start_drag: (ev, $target, diff) ->
# do drag
# first call
do_drag: (ev, $target, diff) ->
# position helper
@position_helper(ev, $target, diff)
if CUI.globalDrag.dragoverTarget and CUI.globalDrag.dragoverTarget != $target
CUI.Events.trigger
type: "cui-dragleave"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
if not CUI.globalDrag.dragoverTarget
CUI.globalDrag.dragoverTarget = $target
# console.debug "target:", $target
CUI.Events.trigger
type: "cui-dragenter"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
# trigger our own dragover event on the correct target
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragover"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
return
cleanup_drag: (ev) ->
if @isDestroyed()
return
CUI.dom.removeClass(CUI.globalDrag.$source, @_dragClass)
CUI.dom.remove(CUI.globalDrag.helperNode)
stop_drag: (ev) ->
@__finish_drag(ev)
@cleanup_drag(ev)
__finish_drag: (ev) ->
if not CUI.globalDrag.dragoverTarget
return
# console.debug "sending pf_dragleave", CUI.globalDrag.dragoverTarget
# console.debug "pf_dragleave.event", CUI.globalDrag.dragoverTarget
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragleave"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
if not CUI.globalDrag.stopped
# console.error "cui-drop", ev
CUI.Events.trigger
type: "cui-drop"
node: CUI.globalDrag.dragoverTarget
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.Events.trigger
node: CUI.globalDrag.dragoverTarget
type: "cui-dragend"
info:
globalDrag: CUI.globalDrag
originalEvent: ev
CUI.globalDrag.dragoverTarget = null
@
end_drag: (ev) ->
# console.debug CUI.globalDrag.dragoverTarget, ev.getType(), ev
if @isDestroyed()
return
@__finish_drag(ev)
@cleanup_drag(ev)
@
get_helper_pos: (ev, gd, diff) ->
top: CUI.globalDrag.helperNodeStart.top + diff.y
left: CUI.globalDrag.helperNodeStart.left + diff.x
width: CUI.globalDrag.helperNodeStart.width
height: CUI.globalDrag.helperNodeStart.height
get_helper_contain_element: ->
@_helper_contain_element
position_helper: (ev, $target, diff) ->
# console.debug "position helper", CUI.globalDrag.helperNodeStart, ev, $target, diff
if not CUI.globalDrag.helperNode
return
helper_pos = @get_helper_pos(ev, CUI.globalDrag, diff)
pos =
x: helper_pos.left
y: helper_pos.top
w: helper_pos.width
h: helper_pos.height
helper_contain_element = @get_helper_contain_element(ev, $target, diff)
if helper_contain_element
dim_contain = CUI.dom.getDimensions(helper_contain_element)
if dim_contain.clientWidth == 0 or dim_contain.clientHeight == 0
console.warn('Draggable[position_helper]: Containing element has no dimensions.', helper_contain_element);
# pos is changed in place
CUI.Draggable.limitRect pos,
min_x: dim_contain.viewportLeft + dim_contain.borderLeftWidth
max_x: dim_contain.viewportRight - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.viewportTop + dim_contain.borderTopWidth
max_y: dim_contain.viewportBottom - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
else
dim_contain = CUI.globalDrag.helperNodeStart.body_dim
CUI.Draggable.limitRect pos,
min_x: dim_contain.borderLeftWidth
max_x: dim_contain.scrollWidth - dim_contain.borderRightWidth - CUI.globalDrag.helperNodeStart.marginHorizontal
min_y: dim_contain.borderTopWidth
max_y: dim_contain.scrollHeight - dim_contain.borderBottomWidth - CUI.globalDrag.helperNodeStart.marginVertical
# console.debug "limitRect", CUI.util.dump(pos), dim_contain
helper_pos.top = pos.y
helper_pos.left = pos.x
helper_pos.dragDiff =
x: helper_pos.left - CUI.globalDrag.helperNodeStart.left
y: helper_pos.top - CUI.globalDrag.helperNodeStart.top
if helper_pos.width != CUI.globalDrag.helperNodeStart.width
new_width = helper_pos.width
if helper_pos.height != CUI.globalDrag.helperNodeStart.height
new_height = helper_pos.height
CUI.dom.setStyle CUI.globalDrag.helperNode,
transform: CUI.globalDrag.helperNodeStart.transform+" translateX("+helper_pos.dragDiff.x+"px) translateY("+helper_pos.dragDiff.y+"px)"
CUI.dom.setDimensions CUI.globalDrag.helperNode,
borderBoxWidth: new_width
borderBoxHeight: new_height
CUI.globalDrag.helperPos = helper_pos
return
getCloneSourceForHelper: ->
CUI.globalDrag.$source
get_axis: ->
@_axis
get_helper: (ev, gd, diff) ->
@_helper
get_init_helper_pos: (node, gd, offset = top: 0, left: 0) ->
top: gd.startCoordinates.pageY - offset.top
left: gd.startCoordinates.pageX - offset.left
init_helper: (ev, $target, diff) ->
helper = @get_helper(ev, CUI.globalDrag, diff)
if not helper
return
if helper == "clone"
clone_source = @getCloneSourceForHelper()
hn = clone_source.cloneNode(true)
hn.classList.remove("cui-selected")
# offset the layer to the click
offset =
top: CUI.globalDrag.start.top
left: CUI.globalDrag.start.left
else if CUI.util.isFunction(helper)
hn = CUI.globalDrag.helperNode = helper(CUI.globalDrag)
set_dim = null
else
hn = CUI.globalDrag.helperNode = helper
if not hn
return
CUI.globalDrag.helperNode = hn
CUI.dom.addClass(hn, "cui-drag-drop-select-helper")
document.body.appendChild(hn)
start = @get_init_helper_pos(hn, CUI.globalDrag, offset)
CUI.dom.setStyle(hn, start)
if helper == "clone"
# set width & height
set_dim = CUI.dom.getDimensions(clone_source)
# console.error "measureing clone", set_dim.marginBoxWidth, CUI.globalDrag.$source, dim
CUI.dom.setDimensions hn,
marginBoxWidth: set_dim.marginBoxWidth
marginBoxHeight: set_dim.marginBoxHeight
dim = CUI.dom.getDimensions(hn)
start.width = dim.borderBoxWidth
start.height = dim.borderBoxHeight
start.marginTop = dim.marginTop
start.marginLeft = dim.marginLeft
start.marginVertical = dim.marginVertical
start.marginHorizontal = dim.marginHorizontal
start.transform = dim.computedStyle.transform
if start.transform == 'none'
start.transform = ''
start.body_dim = CUI.dom.getDimensions(document.body)
CUI.globalDrag.helperNodeStart = start
# keep pos inside certain constraints
# pos.fix is an Array containing any of "n","w","e","s"
# limitRect: min_w, min_h, max_w, max_h, min_x, max_x, min_y, max_y
# !!! The order of the parameters is how we want them, in Movable it
# is different for compability reasons
@limitRect: (pos, limitRect, defaults={}) ->
pos.fix = pos.fix or []
for k, v of defaults
if CUI.util.isUndef(pos[k])
pos[k] = v
# console.debug "limitRect", pos, defaults, limitRect
for key in [
"min_PI:KEY:<KEY>END_PI"
"max_w"
"min_h"
"max_h"
"min_x"
"max_x"
"min_y"
"max_y"
]
value = limitRect[key]
if CUI.util.isUndef(value)
continue
CUI.util.assert(not isNaN(value), "#{CUI.util.getObjectClass(@)}.limitRect", "key #{key} in pos isNaN", pos: pos, defaults: defaults, limitRect: limitRect)
skey = key.substring(4)
mkey = key.substring(0,3)
if key == "PI:KEY:<KEY>END_PI"
value -= pos.w
if key == "PI:KEY:<KEY>END_PI"
value -= pos.h
diff = pos[skey] - value
if mkey == "min"
if diff >= 0
continue
if mkey == "max"
if diff <= 0
continue
if skey == "y" and "n" in pos.fix
pos.h -= diff
continue
if skey == "x" and "w" in pos.fix
pos.w -= diff
continue
# console.debug "correcting #{skey} by #{diff} from #{pos[skey]}"
pos[skey]-=diff
if skey == "h" and "s" in pos.fix
# console.debug "FIX y"
pos.y += diff
if skey == "w" and "e" in pos.fix
# console.debug "FIX x"
pos.x += diff
if skey == "x" and "e" in pos.fix
# console.debug "FIX w"
pos.w += diff
if skey == "y" and "s" in pos.fix
# console.debug "FIX h"
pos.h += diff
# console.debug "limitRect AFTER", pos, diff
return pos
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.999911904335022,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/lib/play-detail-list.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { PlayDetail } from 'play-detail'
import { createElement as el, PureComponent } from 'react'
import * as React from 'react'
import { div } from 'react-dom-factories'
import { activeKeyDidChange, ContainerContext, KeyContext } from 'stateful-activation-context'
osu = window.osu
export class PlayDetailList extends PureComponent
constructor: (props) ->
super props
@activeKeyDidChange = activeKeyDidChange.bind(@)
@state = {}
render: =>
classMods = ['menu-active'] if @state.activeKey?
el ContainerContext.Provider,
value:
activeKeyDidChange: @activeKeyDidChange
div
className: osu.classWithModifiers('play-detail-list', classMods)
@props.scores.map (score, key) =>
activated = @state.activeKey == key
el KeyContext.Provider,
key: key
value: key
el PlayDetail,
{ activated, score }
| 21198 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { PlayDetail } from 'play-detail'
import { createElement as el, PureComponent } from 'react'
import * as React from 'react'
import { div } from 'react-dom-factories'
import { activeKeyDidChange, ContainerContext, KeyContext } from 'stateful-activation-context'
osu = window.osu
export class PlayDetailList extends PureComponent
constructor: (props) ->
super props
@activeKeyDidChange = activeKeyDidChange.bind(@)
@state = {}
render: =>
classMods = ['menu-active'] if @state.activeKey?
el ContainerContext.Provider,
value:
activeKeyDidChange: @activeKeyDidChange
div
className: osu.classWithModifiers('play-detail-list', classMods)
@props.scores.map (score, key) =>
activated = @state.activeKey == key
el KeyContext.Provider,
key: key
value: key
el PlayDetail,
{ activated, score }
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { PlayDetail } from 'play-detail'
import { createElement as el, PureComponent } from 'react'
import * as React from 'react'
import { div } from 'react-dom-factories'
import { activeKeyDidChange, ContainerContext, KeyContext } from 'stateful-activation-context'
osu = window.osu
export class PlayDetailList extends PureComponent
constructor: (props) ->
super props
@activeKeyDidChange = activeKeyDidChange.bind(@)
@state = {}
render: =>
classMods = ['menu-active'] if @state.activeKey?
el ContainerContext.Provider,
value:
activeKeyDidChange: @activeKeyDidChange
div
className: osu.classWithModifiers('play-detail-list', classMods)
@props.scores.map (score, key) =>
activated = @state.activeKey == key
el KeyContext.Provider,
key: key
value: key
el PlayDetail,
{ activated, score }
|
[
{
"context": "xtends LayerInfo\n @shouldParse: (key) -> key is 'TySh'\n\n TRANSFORM_VALUE = ['xx', 'xy', 'yx', 'yy', 't",
"end": 246,
"score": 0.9982836246490479,
"start": 242,
"tag": "KEY",
"value": "TySh"
}
] | lib/psd/layer_info/typetool.coffee | PUGE/psd.js | 0 | _ = require 'lodash'
parseEngineData = require 'parse-engine-data'
LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
module.exports = class TextElements extends LayerInfo
@shouldParse: (key) -> key is 'TySh'
TRANSFORM_VALUE = ['xx', 'xy', 'yx', 'yy', 'tx', 'ty']
COORDS_VALUE = ['left', 'top', 'right', 'bottom']
constructor: (layer, length) ->
super(layer, length)
@version = null
@transform = {}
@textVersion = null
@descriptorVersion = null
@textData = null
@engineData = null
@textValue = null
@warpVersion = null
@descriptorVersion = null
@warpData = null
@coords = {}
parse: ->
@version = @file.readShort()
@parseTransformInfo()
@textVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@textData = new Descriptor(@file).parse()
@textValue = @textData['Txt ']
@engineData = parseEngineData(@textData.EngineData)
@warpVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@warpData = new Descriptor(@file).parse()
for name, index in COORDS_VALUE
@coords[name] = @file.readInt()
parseTransformInfo: ->
for name, index in TRANSFORM_VALUE
@transform[name] = @file.readDouble()
fonts: ->
return [] unless @engineData?
@engineData.ResourceDict.FontSet.map (f) -> f.Name
lengthArray: ->
arr = @engineData.EngineDict.StyleRun.RunLengthArray
sum = _.reduce(arr, (m, o) -> m + o)
arr[arr.length - 1] = arr[arr.length - 1] - 1 if sum - @textValue.length == 1
return arr
fontStyles: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxItalic
style = 'italic'
else
style = 'normal'
return style
fontWeights: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxBold
weight = 'bold'
else
weight = 'normal'
return weight
textDecoration: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Underline
decoration = 'underline'
else
decoration = 'none'
return decoration
leading: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Leading
leading = f.Leading
else
leading = 'auto'
return leading
tracking: ->
lal = @styles()
return [] if not @styles().Tracking?
@styles().Tracking
sizes: ->
return [] if not @engineData? and not @styles().FontSize?
@styles().FontSize
alignment: ->
return [] unless @engineData?
alignments = ['left', 'right', 'center', 'justify']
@engineData.EngineDict.ParagraphRun.RunArray.map (s) ->
alignments[Math.min(parseInt(s.ParagraphSheet.Properties.Justification, 10), 3)]
# Return all colors used for text in this layer. The colors are returned in RGBA
# format as an array of arrays.
colors: ->
# If the color is opaque black, this field is sometimes omitted.
return [[0, 0, 0, 255]] if not @engineData? or not @styles().FillColor?
@styles().FillColor.map (s) ->
values = s.Values.map (v) -> Math.round(v * 255)
values.push values.shift() # Change ARGB -> RGBA for consistency
values
styles: ->
return {} unless @engineData?
return @_styles if @_styles?
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
@_styles = _.reduce(data, (m, o) ->
for own k, v of o
m[k] or= []
m[k].push v
m
, {})
# Creates the CSS string and returns it. Each property is newline separated
# and not all properties may be present depending on the document.
#
# Colors are returned in rgba() format and fonts may include some internal
# Photoshop fonts.
toCSS: ->
definition =
'font-family': @fonts().join(', ')
'font-size': "#{@sizes()[0]}pt"
'color': "rgba(#{@colors()[0].join(', ')})"
'text-align': @alignment()[0]
css = []
for k, v of definition
continue unless v?
css.push "#{k}: #{v};"
css.join("\n")
export: ->
value: @textValue
font:
lengthArray: @lengthArray()
styles: @fontStyles()
weights: @fontWeights()
names: @fonts()
sizes: @sizes()
tracking: @tracking()
colors: @colors()
alignment: @alignment()
textDecoration: @textDecoration()
leading: @leading()
left: @coords.left
top: @coords.top
right: @coords.right
bottom: @coords.bottom
transform: @transform
| 91535 | _ = require 'lodash'
parseEngineData = require 'parse-engine-data'
LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
module.exports = class TextElements extends LayerInfo
@shouldParse: (key) -> key is '<KEY>'
TRANSFORM_VALUE = ['xx', 'xy', 'yx', 'yy', 'tx', 'ty']
COORDS_VALUE = ['left', 'top', 'right', 'bottom']
constructor: (layer, length) ->
super(layer, length)
@version = null
@transform = {}
@textVersion = null
@descriptorVersion = null
@textData = null
@engineData = null
@textValue = null
@warpVersion = null
@descriptorVersion = null
@warpData = null
@coords = {}
parse: ->
@version = @file.readShort()
@parseTransformInfo()
@textVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@textData = new Descriptor(@file).parse()
@textValue = @textData['Txt ']
@engineData = parseEngineData(@textData.EngineData)
@warpVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@warpData = new Descriptor(@file).parse()
for name, index in COORDS_VALUE
@coords[name] = @file.readInt()
parseTransformInfo: ->
for name, index in TRANSFORM_VALUE
@transform[name] = @file.readDouble()
fonts: ->
return [] unless @engineData?
@engineData.ResourceDict.FontSet.map (f) -> f.Name
lengthArray: ->
arr = @engineData.EngineDict.StyleRun.RunLengthArray
sum = _.reduce(arr, (m, o) -> m + o)
arr[arr.length - 1] = arr[arr.length - 1] - 1 if sum - @textValue.length == 1
return arr
fontStyles: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxItalic
style = 'italic'
else
style = 'normal'
return style
fontWeights: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxBold
weight = 'bold'
else
weight = 'normal'
return weight
textDecoration: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Underline
decoration = 'underline'
else
decoration = 'none'
return decoration
leading: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Leading
leading = f.Leading
else
leading = 'auto'
return leading
tracking: ->
lal = @styles()
return [] if not @styles().Tracking?
@styles().Tracking
sizes: ->
return [] if not @engineData? and not @styles().FontSize?
@styles().FontSize
alignment: ->
return [] unless @engineData?
alignments = ['left', 'right', 'center', 'justify']
@engineData.EngineDict.ParagraphRun.RunArray.map (s) ->
alignments[Math.min(parseInt(s.ParagraphSheet.Properties.Justification, 10), 3)]
# Return all colors used for text in this layer. The colors are returned in RGBA
# format as an array of arrays.
colors: ->
# If the color is opaque black, this field is sometimes omitted.
return [[0, 0, 0, 255]] if not @engineData? or not @styles().FillColor?
@styles().FillColor.map (s) ->
values = s.Values.map (v) -> Math.round(v * 255)
values.push values.shift() # Change ARGB -> RGBA for consistency
values
styles: ->
return {} unless @engineData?
return @_styles if @_styles?
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
@_styles = _.reduce(data, (m, o) ->
for own k, v of o
m[k] or= []
m[k].push v
m
, {})
# Creates the CSS string and returns it. Each property is newline separated
# and not all properties may be present depending on the document.
#
# Colors are returned in rgba() format and fonts may include some internal
# Photoshop fonts.
toCSS: ->
definition =
'font-family': @fonts().join(', ')
'font-size': "#{@sizes()[0]}pt"
'color': "rgba(#{@colors()[0].join(', ')})"
'text-align': @alignment()[0]
css = []
for k, v of definition
continue unless v?
css.push "#{k}: #{v};"
css.join("\n")
export: ->
value: @textValue
font:
lengthArray: @lengthArray()
styles: @fontStyles()
weights: @fontWeights()
names: @fonts()
sizes: @sizes()
tracking: @tracking()
colors: @colors()
alignment: @alignment()
textDecoration: @textDecoration()
leading: @leading()
left: @coords.left
top: @coords.top
right: @coords.right
bottom: @coords.bottom
transform: @transform
| true | _ = require 'lodash'
parseEngineData = require 'parse-engine-data'
LayerInfo = require '../layer_info.coffee'
Descriptor = require '../descriptor.coffee'
module.exports = class TextElements extends LayerInfo
@shouldParse: (key) -> key is 'PI:KEY:<KEY>END_PI'
TRANSFORM_VALUE = ['xx', 'xy', 'yx', 'yy', 'tx', 'ty']
COORDS_VALUE = ['left', 'top', 'right', 'bottom']
constructor: (layer, length) ->
super(layer, length)
@version = null
@transform = {}
@textVersion = null
@descriptorVersion = null
@textData = null
@engineData = null
@textValue = null
@warpVersion = null
@descriptorVersion = null
@warpData = null
@coords = {}
parse: ->
@version = @file.readShort()
@parseTransformInfo()
@textVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@textData = new Descriptor(@file).parse()
@textValue = @textData['Txt ']
@engineData = parseEngineData(@textData.EngineData)
@warpVersion = @file.readShort()
@descriptorVersion = @file.readInt()
@warpData = new Descriptor(@file).parse()
for name, index in COORDS_VALUE
@coords[name] = @file.readInt()
parseTransformInfo: ->
for name, index in TRANSFORM_VALUE
@transform[name] = @file.readDouble()
fonts: ->
return [] unless @engineData?
@engineData.ResourceDict.FontSet.map (f) -> f.Name
lengthArray: ->
arr = @engineData.EngineDict.StyleRun.RunLengthArray
sum = _.reduce(arr, (m, o) -> m + o)
arr[arr.length - 1] = arr[arr.length - 1] - 1 if sum - @textValue.length == 1
return arr
fontStyles: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxItalic
style = 'italic'
else
style = 'normal'
return style
fontWeights: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.FauxBold
weight = 'bold'
else
weight = 'normal'
return weight
textDecoration: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Underline
decoration = 'underline'
else
decoration = 'none'
return decoration
leading: ->
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
data.map (f) ->
if f.Leading
leading = f.Leading
else
leading = 'auto'
return leading
tracking: ->
lal = @styles()
return [] if not @styles().Tracking?
@styles().Tracking
sizes: ->
return [] if not @engineData? and not @styles().FontSize?
@styles().FontSize
alignment: ->
return [] unless @engineData?
alignments = ['left', 'right', 'center', 'justify']
@engineData.EngineDict.ParagraphRun.RunArray.map (s) ->
alignments[Math.min(parseInt(s.ParagraphSheet.Properties.Justification, 10), 3)]
# Return all colors used for text in this layer. The colors are returned in RGBA
# format as an array of arrays.
colors: ->
# If the color is opaque black, this field is sometimes omitted.
return [[0, 0, 0, 255]] if not @engineData? or not @styles().FillColor?
@styles().FillColor.map (s) ->
values = s.Values.map (v) -> Math.round(v * 255)
values.push values.shift() # Change ARGB -> RGBA for consistency
values
styles: ->
return {} unless @engineData?
return @_styles if @_styles?
data = @engineData.EngineDict.StyleRun.RunArray.map (r) ->
r.StyleSheet.StyleSheetData
@_styles = _.reduce(data, (m, o) ->
for own k, v of o
m[k] or= []
m[k].push v
m
, {})
# Creates the CSS string and returns it. Each property is newline separated
# and not all properties may be present depending on the document.
#
# Colors are returned in rgba() format and fonts may include some internal
# Photoshop fonts.
toCSS: ->
definition =
'font-family': @fonts().join(', ')
'font-size': "#{@sizes()[0]}pt"
'color': "rgba(#{@colors()[0].join(', ')})"
'text-align': @alignment()[0]
css = []
for k, v of definition
continue unless v?
css.push "#{k}: #{v};"
css.join("\n")
export: ->
value: @textValue
font:
lengthArray: @lengthArray()
styles: @fontStyles()
weights: @fontWeights()
names: @fonts()
sizes: @sizes()
tracking: @tracking()
colors: @colors()
alignment: @alignment()
textDecoration: @textDecoration()
leading: @leading()
left: @coords.left
top: @coords.top
right: @coords.right
bottom: @coords.bottom
transform: @transform
|
[
{
"context": "\n#\n# Bot Builder SDK Github:\n# https://github.com/Microsoft/BotBuilder\n\n{ Robot, TextMessage, Message, User }",
"end": 193,
"score": 0.7812799215316772,
"start": 184,
"tag": "USERNAME",
"value": "Microsoft"
},
{
"context": "...\"\n @appId = appId\n @appPassword = appPassword\n\n toReceivable: (activity) ->\n ",
"end": 493,
"score": 0.9984325766563416,
"start": 490,
"tag": "PASSWORD",
"value": "app"
}
] | src/adapter-middleware.coffee | stephanepiel/BotFramework-Hubot | 0 | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license.
#
# Microsoft Bot Framework: http://botframework.com
#
# Bot Builder SDK Github:
# https://github.com/Microsoft/BotBuilder
{ Robot, TextMessage, Message, User } = require 'hubot'
LogPrefix = "hubot-botframework-middleware:"
class BaseMiddleware
constructor: (@robot, appId, appPassword) ->
@robot.logger.info "#{LogPrefix} creating middleware..."
@appId = appId
@appPassword = appPassword
toReceivable: (activity) ->
throw new Error('toReceivable not implemented')
toSendable: (context, message) ->
throw new Error('toSendable not implemented')
class TextMiddleware extends BaseMiddleware
# TextMiddleware doesn't use invokes currently, so just return null
handleInvoke: (invokeEvent, connector) ->
return null
toReceivable: (activity) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toReceivable"
address = activity.address
user = @robot.brain.userForId address.user.id, name: address.user.name, room: address.conversation.id
user.activity = activity
if activity.type == 'message'
return new TextMessage(user, activity.text, activity.sourceEvent?.clientActivityId || '')
return new Message(user)
toSendable: (context, message) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toSendable"
if typeof message is 'string'
return {
type: 'message'
text: message
address: context.user.activity.address
}
return message
# Constructs a text message response to indicate an error to the user in the
# message channel they are using
constructErrorResponse: (activity, text) ->
payload =
type: 'message'
text: "#{text}"
address: activity?.address
return payload
# Sends an error message back to the user if authorization isn't supported for the
# channel or prepares and sends the message to hubot for reception
maybeReceive: (activity, connector, authEnabled) ->
# Return an error to the user if the message channel doesn't support authorization
# and authorization is enabled
if authEnabled
@robot.logger.info "#{LogPrefix} Authorization isn\'t supported
for the channel error"
text = "Authorization isn't supported for this channel"
payload = @constructErrorResponse(activity, text)
@send(connector, payload)
else
event = @toReceivable activity
if event?
@robot.receive event
# Sends the payload to the bot framework messaging channel
send: (connector, payload) ->
robot.logger.info "#{LogPrefix} payload = #{JSON.stringify(payload)}"
if !Array.isArray(payload)
payload = [payload]
connector.send payload, (err, _) ->
if err
@robot.logger.error "#{LogPrefix} err = #{JSON.stringify(err)}"
throw err
Middleware = {
'*': TextMiddleware
}
module.exports = {
registerMiddleware: (name, middleware) ->
Middleware[name] = middleware
middlewareFor: (name) ->
Middleware[name] || Middleware['*']
BaseMiddleware
TextMiddleware
}
| 35500 | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license.
#
# Microsoft Bot Framework: http://botframework.com
#
# Bot Builder SDK Github:
# https://github.com/Microsoft/BotBuilder
{ Robot, TextMessage, Message, User } = require 'hubot'
LogPrefix = "hubot-botframework-middleware:"
class BaseMiddleware
constructor: (@robot, appId, appPassword) ->
@robot.logger.info "#{LogPrefix} creating middleware..."
@appId = appId
@appPassword = <PASSWORD>Password
toReceivable: (activity) ->
throw new Error('toReceivable not implemented')
toSendable: (context, message) ->
throw new Error('toSendable not implemented')
class TextMiddleware extends BaseMiddleware
# TextMiddleware doesn't use invokes currently, so just return null
handleInvoke: (invokeEvent, connector) ->
return null
toReceivable: (activity) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toReceivable"
address = activity.address
user = @robot.brain.userForId address.user.id, name: address.user.name, room: address.conversation.id
user.activity = activity
if activity.type == 'message'
return new TextMessage(user, activity.text, activity.sourceEvent?.clientActivityId || '')
return new Message(user)
toSendable: (context, message) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toSendable"
if typeof message is 'string'
return {
type: 'message'
text: message
address: context.user.activity.address
}
return message
# Constructs a text message response to indicate an error to the user in the
# message channel they are using
constructErrorResponse: (activity, text) ->
payload =
type: 'message'
text: "#{text}"
address: activity?.address
return payload
# Sends an error message back to the user if authorization isn't supported for the
# channel or prepares and sends the message to hubot for reception
maybeReceive: (activity, connector, authEnabled) ->
# Return an error to the user if the message channel doesn't support authorization
# and authorization is enabled
if authEnabled
@robot.logger.info "#{LogPrefix} Authorization isn\'t supported
for the channel error"
text = "Authorization isn't supported for this channel"
payload = @constructErrorResponse(activity, text)
@send(connector, payload)
else
event = @toReceivable activity
if event?
@robot.receive event
# Sends the payload to the bot framework messaging channel
send: (connector, payload) ->
robot.logger.info "#{LogPrefix} payload = #{JSON.stringify(payload)}"
if !Array.isArray(payload)
payload = [payload]
connector.send payload, (err, _) ->
if err
@robot.logger.error "#{LogPrefix} err = #{JSON.stringify(err)}"
throw err
Middleware = {
'*': TextMiddleware
}
module.exports = {
registerMiddleware: (name, middleware) ->
Middleware[name] = middleware
middlewareFor: (name) ->
Middleware[name] || Middleware['*']
BaseMiddleware
TextMiddleware
}
| true | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license.
#
# Microsoft Bot Framework: http://botframework.com
#
# Bot Builder SDK Github:
# https://github.com/Microsoft/BotBuilder
{ Robot, TextMessage, Message, User } = require 'hubot'
LogPrefix = "hubot-botframework-middleware:"
class BaseMiddleware
constructor: (@robot, appId, appPassword) ->
@robot.logger.info "#{LogPrefix} creating middleware..."
@appId = appId
@appPassword = PI:PASSWORD:<PASSWORD>END_PIPassword
toReceivable: (activity) ->
throw new Error('toReceivable not implemented')
toSendable: (context, message) ->
throw new Error('toSendable not implemented')
class TextMiddleware extends BaseMiddleware
# TextMiddleware doesn't use invokes currently, so just return null
handleInvoke: (invokeEvent, connector) ->
return null
toReceivable: (activity) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toReceivable"
address = activity.address
user = @robot.brain.userForId address.user.id, name: address.user.name, room: address.conversation.id
user.activity = activity
if activity.type == 'message'
return new TextMessage(user, activity.text, activity.sourceEvent?.clientActivityId || '')
return new Message(user)
toSendable: (context, message) ->
@robot.logger.info "#{LogPrefix} TextMiddleware toSendable"
if typeof message is 'string'
return {
type: 'message'
text: message
address: context.user.activity.address
}
return message
# Constructs a text message response to indicate an error to the user in the
# message channel they are using
constructErrorResponse: (activity, text) ->
payload =
type: 'message'
text: "#{text}"
address: activity?.address
return payload
# Sends an error message back to the user if authorization isn't supported for the
# channel or prepares and sends the message to hubot for reception
maybeReceive: (activity, connector, authEnabled) ->
# Return an error to the user if the message channel doesn't support authorization
# and authorization is enabled
if authEnabled
@robot.logger.info "#{LogPrefix} Authorization isn\'t supported
for the channel error"
text = "Authorization isn't supported for this channel"
payload = @constructErrorResponse(activity, text)
@send(connector, payload)
else
event = @toReceivable activity
if event?
@robot.receive event
# Sends the payload to the bot framework messaging channel
send: (connector, payload) ->
robot.logger.info "#{LogPrefix} payload = #{JSON.stringify(payload)}"
if !Array.isArray(payload)
payload = [payload]
connector.send payload, (err, _) ->
if err
@robot.logger.error "#{LogPrefix} err = #{JSON.stringify(err)}"
throw err
Middleware = {
'*': TextMiddleware
}
module.exports = {
registerMiddleware: (name, middleware) ->
Middleware[name] = middleware
middlewareFor: (name) ->
Middleware[name] || Middleware['*']
BaseMiddleware
TextMiddleware
}
|
[
{
"context": " cb? _c[key]\n _key = (key) ->\n \"cache.#{_name}.#{key}\"\n class InnerCache\n ",
"end": 391,
"score": 0.9371328353881836,
"start": 383,
"tag": "KEY",
"value": "cache.#{"
},
{
"context": "ey]\n _key = (key) ->\n \"cache.#{_name}.#{key}\"\n class InnerCache\n constructor:",
"end": 405,
"score": 0.8168401718139648,
"start": 392,
"tag": "KEY",
"value": "name}.#{key}\""
}
] | src/js/cache.coffee | GW2Treasures/gw2treasures-assets | 12 | define 'cache', ['storage'], (storage) ->
Cache = (name, storage) ->
_name = 'globalCache'
_storage =
_c = {}
_raw = (key, cb) ->
if _c[key] == undefined
storage.get (_key key), (x) =>
cb? _c[key] = new CacheObject x
else
cb? _c[key]
_key = (key) ->
"cache.#{_name}.#{key}"
class InnerCache
constructor: (name, storage) ->
_name = name || 'globalCache'
_storage = storage || window['storage']
_c = {}
#console.log "new cache: #{_name}"
_storage.get "cache.#{_name}", (x) =>
_c[y] = undefined for y in x if x
'put': (key, value, seconds, cb) ->
_c[key] = new CacheObject key, value, Math.ceil new Date()/1000 + (seconds || 360)
_storage.put "cache.#{_name}", Object.keys _c
_storage.put (_key key), _c[key]
cb? value
'get': (key, cb) ->
if @has key
_raw key, (raw) =>
if raw.seconds < new Date()/1000
@remove key
cb? undefined
else
cb? raw.value
else
cb? undefined
'remove': (key, cb) ->
if @has key
_storage.remove _key key
delete _c[key]
_storage.put "cache.#{_name}", Object.keys _c
cb? true
else
cb? false
'remember': (key, seconds, valuecb, cb) ->
@get key, (value) =>
if value == undefined
value = valuecb()
@put key, value, seconds
cb? value
'has': (key) ->
key of _c
'clear': ->
#console.log "clear cache: #{_name}"
for key in Object.keys _c
@remove key
return
'clean': ->
#console.log "clean cache: #{_name}"
for key in Object.keys _c
@get key
return
class CacheObject
constructor: (o, value, seconds) ->
return null if o == null
if o && !value && !seconds
@value = o.value
@seconds = o.seconds
@key = o.key
else
@key = o
@value = value
@seconds = seconds
new InnerCache name, storage
| 52924 | define 'cache', ['storage'], (storage) ->
Cache = (name, storage) ->
_name = 'globalCache'
_storage =
_c = {}
_raw = (key, cb) ->
if _c[key] == undefined
storage.get (_key key), (x) =>
cb? _c[key] = new CacheObject x
else
cb? _c[key]
_key = (key) ->
"<KEY>_<KEY>
class InnerCache
constructor: (name, storage) ->
_name = name || 'globalCache'
_storage = storage || window['storage']
_c = {}
#console.log "new cache: #{_name}"
_storage.get "cache.#{_name}", (x) =>
_c[y] = undefined for y in x if x
'put': (key, value, seconds, cb) ->
_c[key] = new CacheObject key, value, Math.ceil new Date()/1000 + (seconds || 360)
_storage.put "cache.#{_name}", Object.keys _c
_storage.put (_key key), _c[key]
cb? value
'get': (key, cb) ->
if @has key
_raw key, (raw) =>
if raw.seconds < new Date()/1000
@remove key
cb? undefined
else
cb? raw.value
else
cb? undefined
'remove': (key, cb) ->
if @has key
_storage.remove _key key
delete _c[key]
_storage.put "cache.#{_name}", Object.keys _c
cb? true
else
cb? false
'remember': (key, seconds, valuecb, cb) ->
@get key, (value) =>
if value == undefined
value = valuecb()
@put key, value, seconds
cb? value
'has': (key) ->
key of _c
'clear': ->
#console.log "clear cache: #{_name}"
for key in Object.keys _c
@remove key
return
'clean': ->
#console.log "clean cache: #{_name}"
for key in Object.keys _c
@get key
return
class CacheObject
constructor: (o, value, seconds) ->
return null if o == null
if o && !value && !seconds
@value = o.value
@seconds = o.seconds
@key = o.key
else
@key = o
@value = value
@seconds = seconds
new InnerCache name, storage
| true | define 'cache', ['storage'], (storage) ->
Cache = (name, storage) ->
_name = 'globalCache'
_storage =
_c = {}
_raw = (key, cb) ->
if _c[key] == undefined
storage.get (_key key), (x) =>
cb? _c[key] = new CacheObject x
else
cb? _c[key]
_key = (key) ->
"PI:KEY:<KEY>END_PI_PI:KEY:<KEY>END_PI
class InnerCache
constructor: (name, storage) ->
_name = name || 'globalCache'
_storage = storage || window['storage']
_c = {}
#console.log "new cache: #{_name}"
_storage.get "cache.#{_name}", (x) =>
_c[y] = undefined for y in x if x
'put': (key, value, seconds, cb) ->
_c[key] = new CacheObject key, value, Math.ceil new Date()/1000 + (seconds || 360)
_storage.put "cache.#{_name}", Object.keys _c
_storage.put (_key key), _c[key]
cb? value
'get': (key, cb) ->
if @has key
_raw key, (raw) =>
if raw.seconds < new Date()/1000
@remove key
cb? undefined
else
cb? raw.value
else
cb? undefined
'remove': (key, cb) ->
if @has key
_storage.remove _key key
delete _c[key]
_storage.put "cache.#{_name}", Object.keys _c
cb? true
else
cb? false
'remember': (key, seconds, valuecb, cb) ->
@get key, (value) =>
if value == undefined
value = valuecb()
@put key, value, seconds
cb? value
'has': (key) ->
key of _c
'clear': ->
#console.log "clear cache: #{_name}"
for key in Object.keys _c
@remove key
return
'clean': ->
#console.log "clean cache: #{_name}"
for key in Object.keys _c
@get key
return
class CacheObject
constructor: (o, value, seconds) ->
return null if o == null
if o && !value && !seconds
@value = o.value
@seconds = o.seconds
@key = o.key
else
@key = o
@value = value
@seconds = seconds
new InnerCache name, storage
|
[
{
"context": " 'key', {\n links: [{ bucket: 'test', key: 'doc%2$@', tag: 'next' }]\n fire: true\n overr",
"end": 1711,
"score": 0.9897633790969849,
"start": 1704,
"tag": "KEY",
"value": "doc%2$@"
},
{
"context": "meta.bucket, 'test'\n assert.equal meta.key, 'bzPygTesROPtGGVUKfyvp2RR49'\n assert.equal meta.statusCode, 201\n \n ",
"end": 2768,
"score": 0.9997221231460571,
"start": 2742,
"tag": "KEY",
"value": "bzPygTesROPtGGVUKfyvp2RR49"
},
{
"context": "ta {\n bucket: 'spåce bucket'\n key: 'çøµπléx–key'\n encodeUri: true\n }\n \n 'shou",
"end": 4250,
"score": 0.9971930384635925,
"start": 4239,
"tag": "KEY",
"value": "çøµπléx–key"
}
] | spec/test_http_meta.coffee | geeklist/riak-js-geeklist | 1 | vows = require 'vows'
assert = require 'assert'
Meta = require '../src/http_meta'
full = {}
vows.describe('Meta for HTTP').addBatch(
'a meta with a key':
topic: ->
new Meta 'bucket', 'key'
'gives back its HTTP path': (keyed) ->
assert.equal "/riak/bucket/key", keyed.path
'a meta loaded with a Riak response':
topic: ->
riakResponse =
httpVersion: '1.1'
headers:
vary: 'Accept-Encoding'
server: 'MochiWeb/1.1 WebMachine/1.7.1 (participate in the frantic)'
'x-riak-vclock': 'a85hYGBgzGDKBVIsbLvm1WYwJTLmsTLcjeE5ypcFAA=='
'x-riak-meta-acl': 'users:r,administrators:f'
link: '</riak/test>; rel="up", </riak/test/doc%252%24%40>; riaktag="next"'
'last-modified': 'Wed, 10 Mar 2010 18:11:41 GMT'
etag: '6dQBm9oYA1mxRSH0e96l5W'
date: 'Wed, 10 Mar 2010 18:11:52 GMT'
'content-type': 'text/rtf'
'content-length': '2946'
statusCode: 200
meta = new Meta { bucket: 'bucket', key: 'key' }
meta.loadResponse riakResponse
meta
'parses correctly from HTTP headers': (meta) ->
assert.deepEqual meta.usermeta, { acl: 'users:r,administrators:f' }
assert.equal meta.statusCode, 200
assert.equal meta.date, undefined
assert.equal new Date(meta.lastMod).getTime(), 1268244701000
assert.deepEqual meta.links, [{ bucket: 'test', key: 'doc%2$@', tag: 'next' }]
assert.equal meta.contentType, 'text/rtf'
assert.equal meta.path, '/riak/bucket/key'
'a meta with some properties and headers':
topic: ->
meta = new Meta 'bucket', 'key', {
links: [{ bucket: 'test', key: 'doc%2$@', tag: 'next' }]
fire: true
overridable: true
headers: { Authorization: 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==', 'X-Riak-Meta-overridable': 'yes!' }
}
meta.toHeaders()
'parses them correctly': (headers) ->
assert.notEqual headers.statusCode?
assert.equal headers['X-Riak-Meta-fire'], 'true'
assert.equal headers['Link'], '</riak/test/doc%252%24%40>; riaktag="next"'
'overrides them correctly': (headers) ->
assert.equal headers['X-Riak-Meta-overridable'], 'yes!'
assert.equal headers['Authorization'], 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
'a meta partly loaded with POST response headers':
topic: ->
riakPostResponse =
headers:
location: '/riak/test/bzPygTesROPtGGVUKfyvp2RR49'
statusCode: 201
meta = new Meta
meta.loadResponse riakPostResponse
meta
'returns its location and status code': (meta) ->
assert.equal meta.bucket, 'test'
assert.equal meta.key, 'bzPygTesROPtGGVUKfyvp2RR49'
assert.equal meta.statusCode, 201
'a meta with JSON data':
topic: ->
meta = new Meta 'bucket', 'json-data'
meta.data = { test: true }
meta.toHeaders()
'guesses its content-type': (headers) ->
assert.equal headers['Content-Type'], 'application/json'
assert.equal headers['Link'], undefined
'a meta without a vclock':
topic: ->
meta = new Meta 'bucket', 'test'
meta.toHeaders()
'does not send a clientId header': (headers) ->
assert.isUndefined headers['X-Riak-ClientId']
'a meta with responseEncoding=binary':
topic: ->
new Meta 'bucket', 'binary-data', {
data: new Buffer('binary-data')
responseEncoding: 'binary'
}
'recognizes it as a first-class property': (meta) ->
assert.equal meta.responseEncoding, 'binary'
'a meta with query properties':
topic: ->
new Meta {
bucket: 'bucket'
key: 'key'
r: 1
w: 2
dw: 2
rw: 2
keys: true
props: false
vtag: 'asweetvtag'
returnbody: true
chunked: true
}
'knows how to create its HTTP path': (meta) ->
assert.equal "/riak/bucket/key?r=1&w=2&dw=2&rw=2&keys=true&props=false&vtag=asweetvtag&returnbody=true&chunked=true", meta.path
'a Meta that encodes its URI components':
topic: ->
new Meta {
bucket: 'spåce bucket'
key: 'çøµπléx–key'
encodeUri: true
}
'should have a URI encoded path': (meta) ->
assert.equal "/riak/sp%C3%A5ce%20bucket/%C3%A7%C3%B8%C2%B5%CF%80l%C3%A9x%E2%80%93key", meta.path
).export module | 90440 | vows = require 'vows'
assert = require 'assert'
Meta = require '../src/http_meta'
full = {}
vows.describe('Meta for HTTP').addBatch(
'a meta with a key':
topic: ->
new Meta 'bucket', 'key'
'gives back its HTTP path': (keyed) ->
assert.equal "/riak/bucket/key", keyed.path
'a meta loaded with a Riak response':
topic: ->
riakResponse =
httpVersion: '1.1'
headers:
vary: 'Accept-Encoding'
server: 'MochiWeb/1.1 WebMachine/1.7.1 (participate in the frantic)'
'x-riak-vclock': 'a85hYGBgzGDKBVIsbLvm1WYwJTLmsTLcjeE5ypcFAA=='
'x-riak-meta-acl': 'users:r,administrators:f'
link: '</riak/test>; rel="up", </riak/test/doc%252%24%40>; riaktag="next"'
'last-modified': 'Wed, 10 Mar 2010 18:11:41 GMT'
etag: '6dQBm9oYA1mxRSH0e96l5W'
date: 'Wed, 10 Mar 2010 18:11:52 GMT'
'content-type': 'text/rtf'
'content-length': '2946'
statusCode: 200
meta = new Meta { bucket: 'bucket', key: 'key' }
meta.loadResponse riakResponse
meta
'parses correctly from HTTP headers': (meta) ->
assert.deepEqual meta.usermeta, { acl: 'users:r,administrators:f' }
assert.equal meta.statusCode, 200
assert.equal meta.date, undefined
assert.equal new Date(meta.lastMod).getTime(), 1268244701000
assert.deepEqual meta.links, [{ bucket: 'test', key: 'doc%2$@', tag: 'next' }]
assert.equal meta.contentType, 'text/rtf'
assert.equal meta.path, '/riak/bucket/key'
'a meta with some properties and headers':
topic: ->
meta = new Meta 'bucket', 'key', {
links: [{ bucket: 'test', key: '<KEY>', tag: 'next' }]
fire: true
overridable: true
headers: { Authorization: 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==', 'X-Riak-Meta-overridable': 'yes!' }
}
meta.toHeaders()
'parses them correctly': (headers) ->
assert.notEqual headers.statusCode?
assert.equal headers['X-Riak-Meta-fire'], 'true'
assert.equal headers['Link'], '</riak/test/doc%252%24%40>; riaktag="next"'
'overrides them correctly': (headers) ->
assert.equal headers['X-Riak-Meta-overridable'], 'yes!'
assert.equal headers['Authorization'], 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
'a meta partly loaded with POST response headers':
topic: ->
riakPostResponse =
headers:
location: '/riak/test/bzPygTesROPtGGVUKfyvp2RR49'
statusCode: 201
meta = new Meta
meta.loadResponse riakPostResponse
meta
'returns its location and status code': (meta) ->
assert.equal meta.bucket, 'test'
assert.equal meta.key, '<KEY>'
assert.equal meta.statusCode, 201
'a meta with JSON data':
topic: ->
meta = new Meta 'bucket', 'json-data'
meta.data = { test: true }
meta.toHeaders()
'guesses its content-type': (headers) ->
assert.equal headers['Content-Type'], 'application/json'
assert.equal headers['Link'], undefined
'a meta without a vclock':
topic: ->
meta = new Meta 'bucket', 'test'
meta.toHeaders()
'does not send a clientId header': (headers) ->
assert.isUndefined headers['X-Riak-ClientId']
'a meta with responseEncoding=binary':
topic: ->
new Meta 'bucket', 'binary-data', {
data: new Buffer('binary-data')
responseEncoding: 'binary'
}
'recognizes it as a first-class property': (meta) ->
assert.equal meta.responseEncoding, 'binary'
'a meta with query properties':
topic: ->
new Meta {
bucket: 'bucket'
key: 'key'
r: 1
w: 2
dw: 2
rw: 2
keys: true
props: false
vtag: 'asweetvtag'
returnbody: true
chunked: true
}
'knows how to create its HTTP path': (meta) ->
assert.equal "/riak/bucket/key?r=1&w=2&dw=2&rw=2&keys=true&props=false&vtag=asweetvtag&returnbody=true&chunked=true", meta.path
'a Meta that encodes its URI components':
topic: ->
new Meta {
bucket: 'spåce bucket'
key: '<KEY>'
encodeUri: true
}
'should have a URI encoded path': (meta) ->
assert.equal "/riak/sp%C3%A5ce%20bucket/%C3%A7%C3%B8%C2%B5%CF%80l%C3%A9x%E2%80%93key", meta.path
).export module | true | vows = require 'vows'
assert = require 'assert'
Meta = require '../src/http_meta'
full = {}
vows.describe('Meta for HTTP').addBatch(
'a meta with a key':
topic: ->
new Meta 'bucket', 'key'
'gives back its HTTP path': (keyed) ->
assert.equal "/riak/bucket/key", keyed.path
'a meta loaded with a Riak response':
topic: ->
riakResponse =
httpVersion: '1.1'
headers:
vary: 'Accept-Encoding'
server: 'MochiWeb/1.1 WebMachine/1.7.1 (participate in the frantic)'
'x-riak-vclock': 'a85hYGBgzGDKBVIsbLvm1WYwJTLmsTLcjeE5ypcFAA=='
'x-riak-meta-acl': 'users:r,administrators:f'
link: '</riak/test>; rel="up", </riak/test/doc%252%24%40>; riaktag="next"'
'last-modified': 'Wed, 10 Mar 2010 18:11:41 GMT'
etag: '6dQBm9oYA1mxRSH0e96l5W'
date: 'Wed, 10 Mar 2010 18:11:52 GMT'
'content-type': 'text/rtf'
'content-length': '2946'
statusCode: 200
meta = new Meta { bucket: 'bucket', key: 'key' }
meta.loadResponse riakResponse
meta
'parses correctly from HTTP headers': (meta) ->
assert.deepEqual meta.usermeta, { acl: 'users:r,administrators:f' }
assert.equal meta.statusCode, 200
assert.equal meta.date, undefined
assert.equal new Date(meta.lastMod).getTime(), 1268244701000
assert.deepEqual meta.links, [{ bucket: 'test', key: 'doc%2$@', tag: 'next' }]
assert.equal meta.contentType, 'text/rtf'
assert.equal meta.path, '/riak/bucket/key'
'a meta with some properties and headers':
topic: ->
meta = new Meta 'bucket', 'key', {
links: [{ bucket: 'test', key: 'PI:KEY:<KEY>END_PI', tag: 'next' }]
fire: true
overridable: true
headers: { Authorization: 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==', 'X-Riak-Meta-overridable': 'yes!' }
}
meta.toHeaders()
'parses them correctly': (headers) ->
assert.notEqual headers.statusCode?
assert.equal headers['X-Riak-Meta-fire'], 'true'
assert.equal headers['Link'], '</riak/test/doc%252%24%40>; riaktag="next"'
'overrides them correctly': (headers) ->
assert.equal headers['X-Riak-Meta-overridable'], 'yes!'
assert.equal headers['Authorization'], 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
'a meta partly loaded with POST response headers':
topic: ->
riakPostResponse =
headers:
location: '/riak/test/bzPygTesROPtGGVUKfyvp2RR49'
statusCode: 201
meta = new Meta
meta.loadResponse riakPostResponse
meta
'returns its location and status code': (meta) ->
assert.equal meta.bucket, 'test'
assert.equal meta.key, 'PI:KEY:<KEY>END_PI'
assert.equal meta.statusCode, 201
'a meta with JSON data':
topic: ->
meta = new Meta 'bucket', 'json-data'
meta.data = { test: true }
meta.toHeaders()
'guesses its content-type': (headers) ->
assert.equal headers['Content-Type'], 'application/json'
assert.equal headers['Link'], undefined
'a meta without a vclock':
topic: ->
meta = new Meta 'bucket', 'test'
meta.toHeaders()
'does not send a clientId header': (headers) ->
assert.isUndefined headers['X-Riak-ClientId']
'a meta with responseEncoding=binary':
topic: ->
new Meta 'bucket', 'binary-data', {
data: new Buffer('binary-data')
responseEncoding: 'binary'
}
'recognizes it as a first-class property': (meta) ->
assert.equal meta.responseEncoding, 'binary'
'a meta with query properties':
topic: ->
new Meta {
bucket: 'bucket'
key: 'key'
r: 1
w: 2
dw: 2
rw: 2
keys: true
props: false
vtag: 'asweetvtag'
returnbody: true
chunked: true
}
'knows how to create its HTTP path': (meta) ->
assert.equal "/riak/bucket/key?r=1&w=2&dw=2&rw=2&keys=true&props=false&vtag=asweetvtag&returnbody=true&chunked=true", meta.path
'a Meta that encodes its URI components':
topic: ->
new Meta {
bucket: 'spåce bucket'
key: 'PI:KEY:<KEY>END_PI'
encodeUri: true
}
'should have a URI encoded path': (meta) ->
assert.equal "/riak/sp%C3%A5ce%20bucket/%C3%A7%C3%B8%C2%B5%CF%80l%C3%A9x%E2%80%93key", meta.path
).export module |
[
{
"context": " null)\n requestParams =\n password: password\n confirm_password: confirmPassword\n ",
"end": 637,
"score": 0.9991713166236877,
"start": 629,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " password: password\n confirm_password: confirmPassword\n reset_password_token: resetPasswordToke",
"end": 681,
"score": 0.9967879056930542,
"start": 666,
"tag": "PASSWORD",
"value": "confirmPassword"
},
{
"context": "oginParams = {email: res.users[0].email, password: password}\n Session.create(loginParams, redirect",
"end": 881,
"score": 0.9989318251609802,
"start": 873,
"tag": "PASSWORD",
"value": "password"
}
] | ui/app/components/reset-password-page/reset-password-page.coffee | Metaburn/cobudget | 1 | module.exports =
url: '/reset_password?reset_password_token'
template: require('./reset-password-page.html')
reloadOnSearch: false
controller: (Dialog, LoadBar, $location, Records, $scope, Session, $stateParams, Toast) ->
$scope.formData = {}
resetPasswordToken = $stateParams.reset_password_token
$scope.resetPassword = ->
LoadBar.start()
password = $scope.formData.password
confirmPassword = $scope.formData.confirmPassword
$scope.formData = {}
if password == confirmPassword
$location.search('reset_password_token', null)
requestParams =
password: password
confirm_password: confirmPassword
reset_password_token: resetPasswordToken
Records.users.resetPassword(requestParams)
.then (res) ->
loginParams = {email: res.users[0].email, password: password}
Session.create(loginParams, redirectTo: 'group')
.catch (err) ->
Toast.show('Your reset password token has expired, please request another')
$location.path('/forgot_password')
LoadBar.stop()
else
LoadBar.stop()
Dialog.alert(title: 'Error!', content: 'Passwords must match.')
| 199108 | module.exports =
url: '/reset_password?reset_password_token'
template: require('./reset-password-page.html')
reloadOnSearch: false
controller: (Dialog, LoadBar, $location, Records, $scope, Session, $stateParams, Toast) ->
$scope.formData = {}
resetPasswordToken = $stateParams.reset_password_token
$scope.resetPassword = ->
LoadBar.start()
password = $scope.formData.password
confirmPassword = $scope.formData.confirmPassword
$scope.formData = {}
if password == confirmPassword
$location.search('reset_password_token', null)
requestParams =
password: <PASSWORD>
confirm_password: <PASSWORD>
reset_password_token: resetPasswordToken
Records.users.resetPassword(requestParams)
.then (res) ->
loginParams = {email: res.users[0].email, password: <PASSWORD>}
Session.create(loginParams, redirectTo: 'group')
.catch (err) ->
Toast.show('Your reset password token has expired, please request another')
$location.path('/forgot_password')
LoadBar.stop()
else
LoadBar.stop()
Dialog.alert(title: 'Error!', content: 'Passwords must match.')
| true | module.exports =
url: '/reset_password?reset_password_token'
template: require('./reset-password-page.html')
reloadOnSearch: false
controller: (Dialog, LoadBar, $location, Records, $scope, Session, $stateParams, Toast) ->
$scope.formData = {}
resetPasswordToken = $stateParams.reset_password_token
$scope.resetPassword = ->
LoadBar.start()
password = $scope.formData.password
confirmPassword = $scope.formData.confirmPassword
$scope.formData = {}
if password == confirmPassword
$location.search('reset_password_token', null)
requestParams =
password: PI:PASSWORD:<PASSWORD>END_PI
confirm_password: PI:PASSWORD:<PASSWORD>END_PI
reset_password_token: resetPasswordToken
Records.users.resetPassword(requestParams)
.then (res) ->
loginParams = {email: res.users[0].email, password: PI:PASSWORD:<PASSWORD>END_PI}
Session.create(loginParams, redirectTo: 'group')
.catch (err) ->
Toast.show('Your reset password token has expired, please request another')
$location.path('/forgot_password')
LoadBar.stop()
else
LoadBar.stop()
Dialog.alert(title: 'Error!', content: 'Passwords must match.')
|
[
{
"context": "ndicatorDefinition:\n fields:[\n name: 'theDate'\n type: 'date'\n ]\n )\n\n assert.s",
"end": 885,
"score": 0.6209664940834045,
"start": 882,
"tag": "NAME",
"value": "the"
}
] | client/test/src/models/indicator_model.coffee | unepwcmc/NRT | 0 | suite('Indicator Model')
test('when initialised with page attributes, it creates a page model with those attributes', ->
indicator = Factory.indicator(page: {})
assert.strictEqual indicator.get('page').constructor.name, 'Page'
)
test('when initialised with owner attributes, it creates an user model with
those attributes', ->
indicator = Factory.indicator(owner: {})
assert.strictEqual indicator.get('owner').constructor.name, 'User'
)
test('.toJSON when model has owner attributes only includes the owner id', ->
owner = Factory.user()
indicator = Factory.indicator(owner: owner)
json = indicator.toJSON()
assert.strictEqual json.owner, owner.get(Backbone.Models.User::idAttribute)
)
test('.getFieldType returns the type of a field from the indicator definition', ->
indicator = Factory.indicator(
indicatorDefinition:
fields:[
name: 'theDate'
type: 'date'
]
)
assert.strictEqual indicator.getFieldType('theDate'), 'date'
)
test('.getFieldType on an indicator with no field definition returns "Unknown"', ->
indicator = Factory.indicator()
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
test('.getFieldType on an indicator with no fields in the field definitions
returns "Unknown"', ->
indicator = Factory.indicator(
indicatorDefinition: {}
)
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
| 137157 | suite('Indicator Model')
test('when initialised with page attributes, it creates a page model with those attributes', ->
indicator = Factory.indicator(page: {})
assert.strictEqual indicator.get('page').constructor.name, 'Page'
)
test('when initialised with owner attributes, it creates an user model with
those attributes', ->
indicator = Factory.indicator(owner: {})
assert.strictEqual indicator.get('owner').constructor.name, 'User'
)
test('.toJSON when model has owner attributes only includes the owner id', ->
owner = Factory.user()
indicator = Factory.indicator(owner: owner)
json = indicator.toJSON()
assert.strictEqual json.owner, owner.get(Backbone.Models.User::idAttribute)
)
test('.getFieldType returns the type of a field from the indicator definition', ->
indicator = Factory.indicator(
indicatorDefinition:
fields:[
name: '<NAME>Date'
type: 'date'
]
)
assert.strictEqual indicator.getFieldType('theDate'), 'date'
)
test('.getFieldType on an indicator with no field definition returns "Unknown"', ->
indicator = Factory.indicator()
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
test('.getFieldType on an indicator with no fields in the field definitions
returns "Unknown"', ->
indicator = Factory.indicator(
indicatorDefinition: {}
)
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
| true | suite('Indicator Model')
test('when initialised with page attributes, it creates a page model with those attributes', ->
indicator = Factory.indicator(page: {})
assert.strictEqual indicator.get('page').constructor.name, 'Page'
)
test('when initialised with owner attributes, it creates an user model with
those attributes', ->
indicator = Factory.indicator(owner: {})
assert.strictEqual indicator.get('owner').constructor.name, 'User'
)
test('.toJSON when model has owner attributes only includes the owner id', ->
owner = Factory.user()
indicator = Factory.indicator(owner: owner)
json = indicator.toJSON()
assert.strictEqual json.owner, owner.get(Backbone.Models.User::idAttribute)
)
test('.getFieldType returns the type of a field from the indicator definition', ->
indicator = Factory.indicator(
indicatorDefinition:
fields:[
name: 'PI:NAME:<NAME>END_PIDate'
type: 'date'
]
)
assert.strictEqual indicator.getFieldType('theDate'), 'date'
)
test('.getFieldType on an indicator with no field definition returns "Unknown"', ->
indicator = Factory.indicator()
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
test('.getFieldType on an indicator with no fields in the field definitions
returns "Unknown"', ->
indicator = Factory.indicator(
indicatorDefinition: {}
)
assert.strictEqual indicator.getFieldType('someField'), 'Unknown'
)
|
[
{
"context": "ber\n @num: 1\n\n constructor: () ->\n @name = \"Robber\"\n\n # setAssignedMemberName: (@assignedMemberName",
"end": 65,
"score": 0.9707460403442383,
"start": 59,
"tag": "NAME",
"value": "Robber"
},
{
"context": " @name = \"Robber\"\n\n # setAssignedMemberName: (@assignedMemberName) ->\n # setMemberManager: (@memberManager) ->\n\n ",
"end": 115,
"score": 0.8008260130882263,
"start": 97,
"tag": "USERNAME",
"value": "assignedMemberName"
},
{
"context": " assignedMember = @memberManager.getMemberByName(@assignedMemberName)\n stolenMember =\n @memberManager.getMembe",
"end": 330,
"score": 0.8942782878875732,
"start": 312,
"tag": "USERNAME",
"value": "assignedMemberName"
},
{
"context": " # assignedMember.role.setAssignedMemberName( @assignedMemberName )\n # assignedMember.stolenMember = s",
"end": 899,
"score": 0.5605494379997253,
"start": 891,
"tag": "USERNAME",
"value": "assigned"
}
] | scripts/robber.coffee | mpppk/hubot-onenight-werewolf | 1 | class Robber
@num: 1
constructor: () ->
@name = "Robber"
# setAssignedMemberName: (@assignedMemberName) ->
# setMemberManager: (@memberManager) ->
# 夜に表示するメッセージ
getMessageAtNight: () ->
@messageAtNight
# 夜に行う行動
workAtNight: () ->
assignedMember = @memberManager.getMemberByName(@assignedMemberName)
stolenMember =
@memberManager.getMemberByRandomWithout(@assignedMemberName)
unless stolenMember?
assignedMember.role.getMessageAfterNight = () ->
"申し訳ありません。エラーが発生しました。ゲームをやり直してください。" +
"(Robber couldn't find other player)"
return
# 盗まれる前の役職を覚えておく
stolenMember.role.beforeRole = assignedMember.role
# 対象のメンバーと役職を入れ替える.
tempRole = assignedMember.role
assignedMember.role = stolenMember.role
stolenMember.role = tempRole
# assignedMember.role.setAssignedMemberName( @assignedMemberName )
# assignedMember.stolenMember = stolenMember
# assignedMember.role.getMessageAfterNight = () ->
# roleName = assignedMember.role.name
# "#{stolenMember.name}の「#{roleName}」とあなたの「怪盗」を交換しました。"
# stolenMember.role = new Robber
# stolenMember.role.setAssignedMemberName( stolenMember.name )
# stolenMember.role.setMemberManager( @memberManager )
makeMessageAtNight: (@assignedMemberName, memberManager, messageManager) ->
@messageAtNight = messageManager.youAre(messageManager.robberName)
exports.Robber = Robber
| 51605 | class Robber
@num: 1
constructor: () ->
@name = "<NAME>"
# setAssignedMemberName: (@assignedMemberName) ->
# setMemberManager: (@memberManager) ->
# 夜に表示するメッセージ
getMessageAtNight: () ->
@messageAtNight
# 夜に行う行動
workAtNight: () ->
assignedMember = @memberManager.getMemberByName(@assignedMemberName)
stolenMember =
@memberManager.getMemberByRandomWithout(@assignedMemberName)
unless stolenMember?
assignedMember.role.getMessageAfterNight = () ->
"申し訳ありません。エラーが発生しました。ゲームをやり直してください。" +
"(Robber couldn't find other player)"
return
# 盗まれる前の役職を覚えておく
stolenMember.role.beforeRole = assignedMember.role
# 対象のメンバーと役職を入れ替える.
tempRole = assignedMember.role
assignedMember.role = stolenMember.role
stolenMember.role = tempRole
# assignedMember.role.setAssignedMemberName( @assignedMemberName )
# assignedMember.stolenMember = stolenMember
# assignedMember.role.getMessageAfterNight = () ->
# roleName = assignedMember.role.name
# "#{stolenMember.name}の「#{roleName}」とあなたの「怪盗」を交換しました。"
# stolenMember.role = new Robber
# stolenMember.role.setAssignedMemberName( stolenMember.name )
# stolenMember.role.setMemberManager( @memberManager )
makeMessageAtNight: (@assignedMemberName, memberManager, messageManager) ->
@messageAtNight = messageManager.youAre(messageManager.robberName)
exports.Robber = Robber
| true | class Robber
@num: 1
constructor: () ->
@name = "PI:NAME:<NAME>END_PI"
# setAssignedMemberName: (@assignedMemberName) ->
# setMemberManager: (@memberManager) ->
# 夜に表示するメッセージ
getMessageAtNight: () ->
@messageAtNight
# 夜に行う行動
workAtNight: () ->
assignedMember = @memberManager.getMemberByName(@assignedMemberName)
stolenMember =
@memberManager.getMemberByRandomWithout(@assignedMemberName)
unless stolenMember?
assignedMember.role.getMessageAfterNight = () ->
"申し訳ありません。エラーが発生しました。ゲームをやり直してください。" +
"(Robber couldn't find other player)"
return
# 盗まれる前の役職を覚えておく
stolenMember.role.beforeRole = assignedMember.role
# 対象のメンバーと役職を入れ替える.
tempRole = assignedMember.role
assignedMember.role = stolenMember.role
stolenMember.role = tempRole
# assignedMember.role.setAssignedMemberName( @assignedMemberName )
# assignedMember.stolenMember = stolenMember
# assignedMember.role.getMessageAfterNight = () ->
# roleName = assignedMember.role.name
# "#{stolenMember.name}の「#{roleName}」とあなたの「怪盗」を交換しました。"
# stolenMember.role = new Robber
# stolenMember.role.setAssignedMemberName( stolenMember.name )
# stolenMember.role.setMemberManager( @memberManager )
makeMessageAtNight: (@assignedMemberName, memberManager, messageManager) ->
@messageAtNight = messageManager.youAre(messageManager.robberName)
exports.Robber = Robber
|
[
{
"context": "ia.org/wiki/Error_function\n# - https://github.com/ghewgill/picomath\n#\nMath.erf = (x) ->\n # Constants\n a1 =",
"end": 796,
"score": 0.9982037544250488,
"start": 788,
"tag": "USERNAME",
"value": "ghewgill"
},
{
"context": "\n x = Math.abs(x)\n\n # Formula 7.1.26 from:\n # - Abramowitz, M. and Stegun, I. A. (Eds.). \"Error Function and Fre",
"end": 1073,
"score": 0.9997408390045166,
"start": 1060,
"tag": "NAME",
"value": "Abramowitz, M"
},
{
"context": "\n # Formula 7.1.26 from:\n # - Abramowitz, M. and Stegun, I. A. (Eds.). \"Error Function and Fresnel Integrals.\" ",
"end": 1091,
"score": 0.9995872974395752,
"start": 1079,
"tag": "NAME",
"value": "Stegun, I. A"
}
] | src/math-extras.coffee | msmiley/math-extras | 0 |
# #### Useful mathematical functions which should be part of the JavaScript Math object.
#
# Compute conversion from degrees to radians.
#
Math.radians = (degrees) ->
degrees * Math.PI / 180
#
# Compute conversion from radians to degrees.
#
Math.degrees = (radians) ->
radians * 180 / Math.PI
#
# Obtain the sign of the given number.
#
Math.sign = (num) ->
if num
if num < 0
return -1
else
return 1
else
return 0
#
# Compute conversion from degrees to radians.
#
Math.factorial = (x) ->
if x < 2
return 1
else
return Math.factorial(x-1)*x
#
# Compute the base-10 logarithm of x.
#
Math.log10 = (x) ->
Math.log(x) / Math.LN10
#
# Compute the error function of x.
# - http://en.wikipedia.org/wiki/Error_function
# - https://github.com/ghewgill/picomath
#
Math.erf = (x) ->
# Constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# Save the sign of x
sign = 1
sign = -1 if x < 0
x = Math.abs(x)
# Formula 7.1.26 from:
# - Abramowitz, M. and Stegun, I. A. (Eds.). "Error Function and Fresnel Integrals." Ch. 7 in Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables, 9th printing. New York: Dover, pp. 297-309, 1972.
t = 1.0 / (1.0 + p * x)
y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x)
sign * y
| 155928 |
# #### Useful mathematical functions which should be part of the JavaScript Math object.
#
# Compute conversion from degrees to radians.
#
Math.radians = (degrees) ->
degrees * Math.PI / 180
#
# Compute conversion from radians to degrees.
#
Math.degrees = (radians) ->
radians * 180 / Math.PI
#
# Obtain the sign of the given number.
#
Math.sign = (num) ->
if num
if num < 0
return -1
else
return 1
else
return 0
#
# Compute conversion from degrees to radians.
#
Math.factorial = (x) ->
if x < 2
return 1
else
return Math.factorial(x-1)*x
#
# Compute the base-10 logarithm of x.
#
Math.log10 = (x) ->
Math.log(x) / Math.LN10
#
# Compute the error function of x.
# - http://en.wikipedia.org/wiki/Error_function
# - https://github.com/ghewgill/picomath
#
Math.erf = (x) ->
# Constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# Save the sign of x
sign = 1
sign = -1 if x < 0
x = Math.abs(x)
# Formula 7.1.26 from:
# - <NAME>. and <NAME>. (Eds.). "Error Function and Fresnel Integrals." Ch. 7 in Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables, 9th printing. New York: Dover, pp. 297-309, 1972.
t = 1.0 / (1.0 + p * x)
y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x)
sign * y
| true |
# #### Useful mathematical functions which should be part of the JavaScript Math object.
#
# Compute conversion from degrees to radians.
#
Math.radians = (degrees) ->
degrees * Math.PI / 180
#
# Compute conversion from radians to degrees.
#
Math.degrees = (radians) ->
radians * 180 / Math.PI
#
# Obtain the sign of the given number.
#
Math.sign = (num) ->
if num
if num < 0
return -1
else
return 1
else
return 0
#
# Compute conversion from degrees to radians.
#
Math.factorial = (x) ->
if x < 2
return 1
else
return Math.factorial(x-1)*x
#
# Compute the base-10 logarithm of x.
#
Math.log10 = (x) ->
Math.log(x) / Math.LN10
#
# Compute the error function of x.
# - http://en.wikipedia.org/wiki/Error_function
# - https://github.com/ghewgill/picomath
#
Math.erf = (x) ->
# Constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# Save the sign of x
sign = 1
sign = -1 if x < 0
x = Math.abs(x)
# Formula 7.1.26 from:
# - PI:NAME:<NAME>END_PI. and PI:NAME:<NAME>END_PI. (Eds.). "Error Function and Fresnel Integrals." Ch. 7 in Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables, 9th printing. New York: Dover, pp. 297-309, 1972.
t = 1.0 / (1.0 + p * x)
y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x)
sign * y
|
[
{
"context": "vice')\n\nclass NPMDetailService\n constructor: ({ @npmUsername, @npmPassword, @npmEmail })->\n @NPM_REGISTRY_A",
"end": 216,
"score": 0.8740590810775757,
"start": 205,
"tag": "USERNAME",
"value": "npmUsername"
},
{
"context": "nt = new NPMClient {\n auth:\n username: @npmUsername\n password: @npmPassword\n email: @np",
"end": 379,
"score": 0.9919773936271667,
"start": 367,
"tag": "USERNAME",
"value": "@npmUsername"
},
{
"context": ":\n username: @npmUsername\n password: @npmPassword\n email: @npmEmail\n alwaysAuth: true",
"end": 410,
"score": 0.9989498257637024,
"start": 398,
"tag": "PASSWORD",
"value": "@npmPassword"
}
] | src/services/npm-detail-service.coffee | octoblu/connector-detail-service | 0 | _ = require 'lodash'
NPMClient = require 'npm-registry-client'
debug = require('debug')('connector-detail-service:npm-detail-service')
class NPMDetailService
constructor: ({ @npmUsername, @npmPassword, @npmEmail })->
@NPM_REGISTRY_API_URL = 'https://registry.npmjs.org'
@npmClient = new NPMClient {
auth:
username: @npmUsername
password: @npmPassword
email: @npmEmail
alwaysAuth: true
}
getDependenciesForPackage: (packageName, callback) =>
@getPackage packageName, (error, body) =>
return callback error if error?
latestVersion = body?["dist-tags"]?.latest
platformDependencies = _.get(body, "versions['#{latestVersion}'].platformDependencies")
callback null, platformDependencies
getPackage: (packageName, callback) =>
uri = "#{@NPM_REGISTRY_API_URL}/#{packageName}"
@npmClient.get uri, {}, (error, response) =>
return callback @_createError error.code, error.message if error?
callback null, response
_createError: (code, message) =>
error = new Error message
error.code = code if code?
return error
module.exports = NPMDetailService
| 202785 | _ = require 'lodash'
NPMClient = require 'npm-registry-client'
debug = require('debug')('connector-detail-service:npm-detail-service')
class NPMDetailService
constructor: ({ @npmUsername, @npmPassword, @npmEmail })->
@NPM_REGISTRY_API_URL = 'https://registry.npmjs.org'
@npmClient = new NPMClient {
auth:
username: @npmUsername
password: <PASSWORD>
email: @npmEmail
alwaysAuth: true
}
getDependenciesForPackage: (packageName, callback) =>
@getPackage packageName, (error, body) =>
return callback error if error?
latestVersion = body?["dist-tags"]?.latest
platformDependencies = _.get(body, "versions['#{latestVersion}'].platformDependencies")
callback null, platformDependencies
getPackage: (packageName, callback) =>
uri = "#{@NPM_REGISTRY_API_URL}/#{packageName}"
@npmClient.get uri, {}, (error, response) =>
return callback @_createError error.code, error.message if error?
callback null, response
_createError: (code, message) =>
error = new Error message
error.code = code if code?
return error
module.exports = NPMDetailService
| true | _ = require 'lodash'
NPMClient = require 'npm-registry-client'
debug = require('debug')('connector-detail-service:npm-detail-service')
class NPMDetailService
constructor: ({ @npmUsername, @npmPassword, @npmEmail })->
@NPM_REGISTRY_API_URL = 'https://registry.npmjs.org'
@npmClient = new NPMClient {
auth:
username: @npmUsername
password: PI:PASSWORD:<PASSWORD>END_PI
email: @npmEmail
alwaysAuth: true
}
getDependenciesForPackage: (packageName, callback) =>
@getPackage packageName, (error, body) =>
return callback error if error?
latestVersion = body?["dist-tags"]?.latest
platformDependencies = _.get(body, "versions['#{latestVersion}'].platformDependencies")
callback null, platformDependencies
getPackage: (packageName, callback) =>
uri = "#{@NPM_REGISTRY_API_URL}/#{packageName}"
@npmClient.get uri, {}, (error, response) =>
return callback @_createError error.code, error.message if error?
callback null, response
_createError: (code, message) =>
error = new Error message
error.code = code if code?
return error
module.exports = NPMDetailService
|
[
{
"context": "###\n#Authentication Actions\n*__Author__: Panjie SW <panjie@panjiesw.com>*\n*__Project__: ah-auth-plug",
"end": 50,
"score": 0.9998830556869507,
"start": 41,
"tag": "NAME",
"value": "Panjie SW"
},
{
"context": "#\n#Authentication Actions\n*__Author__: Panjie SW <panjie@panjiesw.com>*\n*__Project__: ah-auth-plugin*\n*__Company__: Pan",
"end": 71,
"score": 0.9999309778213501,
"start": 52,
"tag": "EMAIL",
"value": "panjie@panjiesw.com"
}
] | .src/actions/auth.coffee | manjunathkg/ah-auth-plugin | 0 | ###
#Authentication Actions
*__Author__: Panjie SW <panjie@panjiesw.com>*
*__Project__: ah-auth-plugin*
*__Company__: PanjieSW*
Defines actions related to authentication process
*********************************************
###
authenticateAction =
name: "authenticate"
description: "Authenticate a user"
inputs:
required: ['login', 'password']
optional: []
blockedConnectionTypes: []
outputExample:
token: 'The user payload encoded with JSON Web Token'
run: (api, connection, next) ->
api.Auth.authenticate(
connection.params.login, connection.params.password)
.then (token) ->
connection.response.token = token
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
signupAction =
name: "signup"
description: "Sign a new user up"
inputs:
required: ['data']
optional: []
blockedConnectionTypes: []
outputExample: {}
run: (api, connection, next) ->
api.Auth.signUp(connection.params.data, 'password', yes)
.then (response) ->
if response
connection.rawConnection.responseHttpCode = 201
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
exports.authenticate = authenticateAction
exports.signup = signupAction
| 98762 | ###
#Authentication Actions
*__Author__: <NAME> <<EMAIL>>*
*__Project__: ah-auth-plugin*
*__Company__: PanjieSW*
Defines actions related to authentication process
*********************************************
###
authenticateAction =
name: "authenticate"
description: "Authenticate a user"
inputs:
required: ['login', 'password']
optional: []
blockedConnectionTypes: []
outputExample:
token: 'The user payload encoded with JSON Web Token'
run: (api, connection, next) ->
api.Auth.authenticate(
connection.params.login, connection.params.password)
.then (token) ->
connection.response.token = token
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
signupAction =
name: "signup"
description: "Sign a new user up"
inputs:
required: ['data']
optional: []
blockedConnectionTypes: []
outputExample: {}
run: (api, connection, next) ->
api.Auth.signUp(connection.params.data, 'password', yes)
.then (response) ->
if response
connection.rawConnection.responseHttpCode = 201
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
exports.authenticate = authenticateAction
exports.signup = signupAction
| true | ###
#Authentication Actions
*__Author__: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>*
*__Project__: ah-auth-plugin*
*__Company__: PanjieSW*
Defines actions related to authentication process
*********************************************
###
authenticateAction =
name: "authenticate"
description: "Authenticate a user"
inputs:
required: ['login', 'password']
optional: []
blockedConnectionTypes: []
outputExample:
token: 'The user payload encoded with JSON Web Token'
run: (api, connection, next) ->
api.Auth.authenticate(
connection.params.login, connection.params.password)
.then (token) ->
connection.response.token = token
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
signupAction =
name: "signup"
description: "Sign a new user up"
inputs:
required: ['data']
optional: []
blockedConnectionTypes: []
outputExample: {}
run: (api, connection, next) ->
api.Auth.signUp(connection.params.data, 'password', yes)
.then (response) ->
if response
connection.rawConnection.responseHttpCode = 201
.catch (err) ->
connection.error = err
if err.status
connection.rawConnection.responseHttpCode = err.status
.finally ->
next connection, yes
return
exports.authenticate = authenticateAction
exports.signup = signupAction
|
[
{
"context": "Map\", ->\n\tmap = null\n\tsorter = null\n\tkey1 = name:\"Alice\", age:42\n\tkey2 = name:\"Bob\", age:43\n\tkey3 =",
"end": 169,
"score": 0.9997026920318604,
"start": 164,
"tag": "NAME",
"value": "Alice"
},
{
"context": "null\n\tkey1 = name:\"Alice\", age:42\n\tkey2 = name:\"Bob\", age:43\n\tkey3 = name:\"Charlie\", age:44\n\tval1",
"end": 198,
"score": 0.9996871948242188,
"start": 195,
"tag": "NAME",
"value": "Bob"
},
{
"context": "e:42\n\tkey2 = name:\"Bob\", age:43\n\tkey3 = name:\"Charlie\", age:44\n\tval1 = floor:1, salary:1000\n\tval2 = flo",
"end": 233,
"score": 0.9990495443344116,
"start": 226,
"tag": "NAME",
"value": "Charlie"
}
] | spec/SortedMapSpec.coffee | kennethjor/discrete | 0 | _ = require "underscore"
sinon = require "sinon"
{SortedMap, Map, Model} = require "../discrete"
describe "SortedMap", ->
map = null
sorter = null
key1 = name:"Alice", age:42
key2 = name:"Bob", age:43
key3 = name:"Charlie", age:44
val1 = floor:1, salary:1000
val2 = floor:2, salary:2000
val3 = floor:3, salary:3000
beforeEach ->
sorter = sinon.spy (a, b) ->
return a.key.age - b.key.age
map = new SortedMap sorter
it "should be empty when first created", ->
expect(map.size()).toBe 0
it "should use sorter to evaluate order", ->
expect(sorter.callCount).toBe 0
map.put key1, val1
map.put key2, val2
expect(sorter.callCount).toBe 1
expect(sorter.args[0][0].key).toBe key1
expect(sorter.args[0][0].value).toBe val1
expect(sorter.args[0][1].key).toBe key2
expect(sorter.args[0][1].value).toBe val2
it "should return the first and the last element", ->
# Empty.
expect(map.firstKey()).toBe null
expect(map.lastKey()).toBe null
# Add three entries.
map.put key1, val1
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key1
map.put key2, val2
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.put key3, val3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
# Remove two entries.
map.remove key3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.remove key1
expect(map.firstKey()).toBe key2
expect(map.lastKey()).toBe key2
it "should sort initially", ->
oldMap = new Map
oldMap.put key2, val2
oldMap.put key3, val3
oldMap.put key1, val1
map = new SortedMap oldMap, sorter
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
| 10177 | _ = require "underscore"
sinon = require "sinon"
{SortedMap, Map, Model} = require "../discrete"
describe "SortedMap", ->
map = null
sorter = null
key1 = name:"<NAME>", age:42
key2 = name:"<NAME>", age:43
key3 = name:"<NAME>", age:44
val1 = floor:1, salary:1000
val2 = floor:2, salary:2000
val3 = floor:3, salary:3000
beforeEach ->
sorter = sinon.spy (a, b) ->
return a.key.age - b.key.age
map = new SortedMap sorter
it "should be empty when first created", ->
expect(map.size()).toBe 0
it "should use sorter to evaluate order", ->
expect(sorter.callCount).toBe 0
map.put key1, val1
map.put key2, val2
expect(sorter.callCount).toBe 1
expect(sorter.args[0][0].key).toBe key1
expect(sorter.args[0][0].value).toBe val1
expect(sorter.args[0][1].key).toBe key2
expect(sorter.args[0][1].value).toBe val2
it "should return the first and the last element", ->
# Empty.
expect(map.firstKey()).toBe null
expect(map.lastKey()).toBe null
# Add three entries.
map.put key1, val1
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key1
map.put key2, val2
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.put key3, val3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
# Remove two entries.
map.remove key3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.remove key1
expect(map.firstKey()).toBe key2
expect(map.lastKey()).toBe key2
it "should sort initially", ->
oldMap = new Map
oldMap.put key2, val2
oldMap.put key3, val3
oldMap.put key1, val1
map = new SortedMap oldMap, sorter
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
| true | _ = require "underscore"
sinon = require "sinon"
{SortedMap, Map, Model} = require "../discrete"
describe "SortedMap", ->
map = null
sorter = null
key1 = name:"PI:NAME:<NAME>END_PI", age:42
key2 = name:"PI:NAME:<NAME>END_PI", age:43
key3 = name:"PI:NAME:<NAME>END_PI", age:44
val1 = floor:1, salary:1000
val2 = floor:2, salary:2000
val3 = floor:3, salary:3000
beforeEach ->
sorter = sinon.spy (a, b) ->
return a.key.age - b.key.age
map = new SortedMap sorter
it "should be empty when first created", ->
expect(map.size()).toBe 0
it "should use sorter to evaluate order", ->
expect(sorter.callCount).toBe 0
map.put key1, val1
map.put key2, val2
expect(sorter.callCount).toBe 1
expect(sorter.args[0][0].key).toBe key1
expect(sorter.args[0][0].value).toBe val1
expect(sorter.args[0][1].key).toBe key2
expect(sorter.args[0][1].value).toBe val2
it "should return the first and the last element", ->
# Empty.
expect(map.firstKey()).toBe null
expect(map.lastKey()).toBe null
# Add three entries.
map.put key1, val1
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key1
map.put key2, val2
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.put key3, val3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
# Remove two entries.
map.remove key3
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key2
map.remove key1
expect(map.firstKey()).toBe key2
expect(map.lastKey()).toBe key2
it "should sort initially", ->
oldMap = new Map
oldMap.put key2, val2
oldMap.put key3, val3
oldMap.put key1, val1
map = new SortedMap oldMap, sorter
expect(map.firstKey()).toBe key1
expect(map.lastKey()).toBe key3
|
[
{
"context": " email = generateRandomEmail()\n username = generateRandomUsername()\n\n queue = [\n\n (next) ->\n # regis",
"end": 1962,
"score": 0.9773983955383301,
"start": 1940,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": " email : email\n username : username\n\n request.post registerRequestParams, (err",
"end": 2175,
"score": 0.9991796612739563,
"start": 2167,
"tag": "USERNAME",
"value": "username"
},
{
"context": "rams\n body :\n username : username\n password : ''\n\n request.post l",
"end": 2526,
"score": 0.9991934895515442,
"start": 2518,
"tag": "USERNAME",
"value": "username"
},
{
"context": "tatus is \"needs reset\"', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 2920,
"score": 0.9924516677856445,
"start": 2898,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 2947,
"score": 0.9994215965270996,
"start": 2939,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 3130,
"score": 0.9991639852523804,
"start": 3122,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 3162,
"score": 0.9994142055511475,
"start": 3154,
"tag": "PASSWORD",
"value": "password"
},
{
"context": ":\n email : ''\n username : username\n password : password\n\n expected",
"end": 3799,
"score": 0.9993032217025757,
"start": 3791,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n expectedBody = 'You should reset your pa",
"end": 3831,
"score": 0.9994076490402222,
"start": 3823,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "a code is not provided', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 4286,
"score": 0.9959304928779602,
"start": 4264,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 4313,
"score": 0.9994454383850098,
"start": 4305,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 4496,
"score": 0.9993100166320801,
"start": 4488,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 4528,
"score": 0.9989074468612671,
"start": 4520,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "'\n tfcode : ''\n username : username\n password : password\n\n request.",
"end": 5167,
"score": 0.9991229176521301,
"start": 5159,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 5199,
"score": 0.9989181756973267,
"start": 5191,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 5610,
"score": 0.9991732239723206,
"start": 5602,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 5793,
"score": 0.9991181492805481,
"start": 5785,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 5825,
"score": 0.9991534352302551,
"start": 5817,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "fcode : 'someInvalidCode'\n username : username\n password : password\n\n request.",
"end": 6481,
"score": 0.9989390969276428,
"start": 6473,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 6513,
"score": 0.9988433122634888,
"start": 6505,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "me = generateRandomUsername()\n password = 'testpass'\n validtfKey = null\n\n queue = [\n\n (nex",
"end": 6921,
"score": 0.9992457628250122,
"start": 6913,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 7126,
"score": 0.9947691559791565,
"start": 7118,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 7158,
"score": 0.999115526676178,
"start": 7150,
"tag": "PASSWORD",
"value": "password"
},
{
"context": ":\n tfcode : ''\n username : username\n password : password\n\n request.",
"end": 7751,
"score": 0.9987187385559082,
"start": 7743,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 7783,
"score": 0.9991356730461121,
"start": 7775,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "fcode : 'someInvalidCode'\n username : username\n password : password\n\n request.",
"end": 8221,
"score": 0.9977027773857117,
"start": 8213,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 8253,
"score": 0.9992004036903381,
"start": 8245,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "tfcode : verificationCode\n username : username\n password : password\n\n request.",
"end": 9158,
"score": 0.9986488223075867,
"start": 9150,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 9190,
"score": 0.9994708299636841,
"start": 9182,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ation token is invalid', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 9539,
"score": 0.9468839764595032,
"start": 9517,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 9566,
"score": 0.9994220733642578,
"start": 9558,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 9749,
"score": 0.9978883862495422,
"start": 9741,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 9781,
"score": 0.9994204044342041,
"start": 9773,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ken : 'someInvalidToken'\n username : username\n password : password\n\n request.",
"end": 10218,
"score": 0.9984318017959595,
"start": 10210,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 10250,
"score": 0.9995183944702148,
"start": 10242,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " groupname is invalid', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n groupName = gene",
"end": 10617,
"score": 0.9771015644073486,
"start": 10595,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "name = generateRandomUsername()\n password = 'testpass'\n groupName = generateRandomString()\n\n queu",
"end": 10645,
"score": 0.9994266033172607,
"start": 10637,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 10867,
"score": 0.9977204203605652,
"start": 10859,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 10899,
"score": 0.9994449019432068,
"start": 10891,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " email : ''\n username : username\n password : password\n grou",
"end": 11290,
"score": 0.9992974996566772,
"start": 11282,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n groupName : 'someInvalidGroupName'\n\n ",
"end": 11323,
"score": 0.9995405673980713,
"start": 11315,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "tack', (done) ->\n\n queue = []\n username = generateRandomUsername()\n password = 'testpass'\n\n loginRequestPara",
"end": 11755,
"score": 0.9957484602928162,
"start": 11733,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n loginRequestParams = generateLoginRequestPa",
"end": 11782,
"score": 0.9993958473205566,
"start": 11774,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "equestParams\n body :\n username : username\n password : 'someInvalidPassword'\n\n add",
"end": 11883,
"score": 0.9985635876655579,
"start": 11875,
"tag": "USERNAME",
"value": "username"
},
{
"context": ":\n username : username\n password : 'someInvalidPassword'\n\n addRemoveUserLogsToQueue = (queue, username",
"end": 11923,
"score": 0.9993782043457031,
"start": 11904,
"tag": "PASSWORD",
"value": "someInvalidPassword"
},
{
"context": "stParams\n body :\n username : username\n password : password\n\n request.post",
"end": 12759,
"score": 0.9984633922576904,
"start": 12751,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err, ",
"end": 12789,
"score": 0.9994471669197083,
"start": 12781,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " account was not found', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 13390,
"score": 0.9960653781890869,
"start": 13368,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 13417,
"score": 0.9993800520896912,
"start": 13409,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 13600,
"score": 0.9962217807769775,
"start": 13592,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 13632,
"score": 0.9994547963142395,
"start": 13624,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 14171,
"score": 0.9925715327262878,
"start": 14163,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 14203,
"score": 0.9993775486946106,
"start": 14195,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ed', (done) ->\n\n user = null\n username = generateRandomUsername()\n password = 'testpass'\n\n loginRequestPara",
"end": 14577,
"score": 0.9954030513763428,
"start": 14555,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n loginRequestParams = generateLoginRequestPa",
"end": 14604,
"score": 0.9993860721588135,
"start": 14596,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "equestParams\n body :\n username : username\n password : password\n\n queue = [\n\n ",
"end": 14705,
"score": 0.9931554794311523,
"start": 14697,
"tag": "USERNAME",
"value": "username"
},
{
"context": " :\n username : username\n password : password\n\n queue = [\n\n (next) ->\n # registe",
"end": 14733,
"score": 0.9995008707046509,
"start": 14725,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 14915,
"score": 0.9874700903892517,
"start": 14907,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 14947,
"score": 0.9993385672569275,
"start": 14939,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " email = generateRandomEmail()\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 16726,
"score": 0.9890914559364319,
"start": 16704,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 16753,
"score": 0.9994006752967834,
"start": 16745,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": " email : email\n username : username\n password : password\n\n request.",
"end": 16965,
"score": 0.995041012763977,
"start": 16957,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 16997,
"score": 0.9982019662857056,
"start": 16989,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 17359,
"score": 0.9952878952026367,
"start": 17351,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 17391,
"score": 0.9974914193153381,
"start": 17383,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " username : email\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 17814,
"score": 0.9982590079307556,
"start": 17806,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "session does not exist', (done) ->\n\n username = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n ",
"end": 18235,
"score": 0.9960641860961914,
"start": 18213,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "ername = generateRandomUsername()\n password = 'testpass'\n\n queue = [\n\n (next) ->\n # regist",
"end": 18262,
"score": 0.9994522929191589,
"start": 18254,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 18445,
"score": 0.9956361651420593,
"start": 18437,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 18477,
"score": 0.9985295534133911,
"start": 18469,
"tag": "PASSWORD",
"value": "password"
},
{
"context": ":\n email : ''\n username : username\n password : password\n\n request.",
"end": 19009,
"score": 0.995970606803894,
"start": 19001,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 19041,
"score": 0.9985876679420471,
"start": 19033,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " = generateRandomEmail()\n username = generateRandomUsername()\n password = 'testpass'\n juserLa",
"end": 19466,
"score": 0.9904454350471497,
"start": 19444,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "enerateRandomUsername()\n password = 'testpass'\n juserLastLoginDate = null\n jsessionLastAc",
"end": 19503,
"score": 0.9994029998779297,
"start": 19495,
"tag": "PASSWORD",
"value": "testpass"
},
{
"context": " email : email\n username : username\n password : password\n\n request.",
"end": 19775,
"score": 0.9972198009490967,
"start": 19767,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post registerRequestParams, (err",
"end": 19807,
"score": 0.9987339377403259,
"start": 19799,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "rams\n body :\n username : username\n password : password\n\n request.",
"end": 20586,
"score": 0.997596800327301,
"start": 20578,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username : username\n password : password\n\n request.post loginRequestParams, (err, r",
"end": 20618,
"score": 0.9980947375297546,
"start": 20610,
"tag": "PASSWORD",
"value": "password"
}
] | servers/lib/server/handlers/login.test.coffee | ezgikaysi/koding | 1 | Speakeasy = require 'speakeasy'
{ async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ testCsrfToken } = require '../../../testhelper/handler'
{ generateLoginRequestParams } = require '../../../testhelper/handler/loginhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
JLog = require '../../../models/log'
JUser = require '../../../models/user'
JAccount = require '../../../models/account'
JSession = require '../../../models/session'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.login', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
loginRequestParams = generateLoginRequestParams()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
loginRequestParams.method = method
request loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 403 if _csrf token is invalid', (done) ->
testCsrfToken generateLoginRequestParams, 'post', done
it 'should send HTTP 403 if username is empty', (done) ->
loginRequestParams = generateLoginRequestParams
body :
username : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Unknown user name'
done()
it 'should send HTTP 403 if username exists but password is empty', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login to fail when password is empty
loginRequestParams = generateLoginRequestParams
body :
username : username
password : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 403 if password status is "needs reset"', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# updating user passwordStatus as needs reset
options = { $set: { passwordStatus: 'needs reset' } }
JUser.update { username }, options, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when passwordStatus is 'needs reset'
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : password
expectedBody = 'You should reset your password in order to continue!'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated but 2fa code is not provided', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is empty
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : ''
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated and 2fa code is invalid', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : 'someInvalidCode'
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 200 if two factor authentication code is correct', (done) ->
username = generateRandomUsername()
password = 'testpass'
validtfKey = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# trying to login with empty tf code
loginRequestParams = generateLoginRequestParams
body :
tfcode : ''
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
(next) ->
# trying to login with invalid tfcode
loginRequestParams = generateLoginRequestParams
body :
tfcode : 'someInvalidCode'
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
(next) ->
# generating a 2fa key and saving it in mongo
{ base32 : tfcode } = Speakeasy.generate_key
length : 20
encoding : 'base32'
validtfKey = tfcode
JUser.update { username }, { $set: { twofactorkey: tfcode } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# generating a verificationCode and expecting a successful login
verificationCode = Speakeasy.totp
key : validtfKey
encoding : 'base32'
loginRequestParams = generateLoginRequestParams
body :
tfcode : verificationCode
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 403 if invitation token is invalid', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when invitation token is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
token : 'someInvalidToken'
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'invitation is not valid'
next()
]
async.series queue, done
it 'should send HTTP 403 if groupname is invalid', (done) ->
username = generateRandomUsername()
password = 'testpass'
groupName = generateRandomString()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when groupName is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : password
groupName : 'someInvalidGroupName'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'group doesnt exist'
next()
]
async.series queue, done
it 'should send HTTP 403 if there is brute force attack', (done) ->
queue = []
username = generateRandomUsername()
password = 'testpass'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : 'someInvalidPassword'
addRemoveUserLogsToQueue = (queue, username) ->
queue.push (next) ->
JLog.remove { username }, (err) ->
expect(err).to.not.exist
next()
addLoginTrialToQueue = (queue, tryCount) ->
queue.push (next) ->
expectedBody = switch
when tryCount < JLog.tryLimit()
'Access denied!'
else
"Your login access is blocked for
#{JLog.timeLimit()} minutes."
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
queue.push (next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
# removing logs for a fresh start
addRemoveUserLogsToQueue queue, username
# this loop adds try_limit + 1 trials to queue
for i in [0..JLog.tryLimit()]
addLoginTrialToQueue queue, i
# removing logs for this username after test passes
addRemoveUserLogsToQueue queue, username
async.series queue, done
it 'should send HTTP 403 if account was not found', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# deleting account of newly registered user
JAccount.remove { 'profile.nickname': username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login to fail after deleting account
loginRequestParams = generateLoginRequestParams
body :
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'No account found!'
next()
]
async.series queue, done
it 'should send HTTP 403 if user is blocked', (done) ->
user = null
username = generateRandomUsername()
password = 'testpass'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : password
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# fetching user record
JUser.one { username }, (err, user_) ->
expect(err).to.not.exist
user = user_
next()
(next) ->
# blocking user for 1 day
untilDate = new Date(Date.now() + 1000 * 60 * 60 * 24)
user.block untilDate, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail and return blocked message
toDate = user.blockedUntil.toUTCString()
expectedBody = JUser.getBlockedMessage toDate
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
(next) ->
# unblocking user
user.unblock (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting user to be able to login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 and normalizeLoginId if user exists', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting successful login with newly registered email
loginRequestParams = generateLoginRequestParams
body :
username : email
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
# session is being craeted by jsession.fetchSession if does not exist
it 'should send HTTP 200 even if session does not exist', (done) ->
username = generateRandomUsername()
password = 'testpass'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# removing session
JSession.remove { username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting successful login even if the session was removed
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 if data is valid and update user and session data', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'testpass'
juserLastLoginDate = null
jsessionLastAccess = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : password
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# keeping juser last login date
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
juserLastLoginDate = user.lastLoginDate
next()
(next) ->
# keeping jsession last access
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
jsessionLastAccess = session.lastAccess
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : password
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting juser last login date not to be same after login
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
expect(juserLastLoginDate).not.to.be.equal user.lastLoginDate
next()
(next) ->
# expecting jsession last login date not to be same after login
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
expect(jsessionLastAccess).not.to.be.equal session.lastAccess
next()
]
async.series queue, done
beforeTests()
runTests()
| 54839 | Speakeasy = require 'speakeasy'
{ async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ testCsrfToken } = require '../../../testhelper/handler'
{ generateLoginRequestParams } = require '../../../testhelper/handler/loginhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
JLog = require '../../../models/log'
JUser = require '../../../models/user'
JAccount = require '../../../models/account'
JSession = require '../../../models/session'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.login', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
loginRequestParams = generateLoginRequestParams()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
loginRequestParams.method = method
request loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 403 if _csrf token is invalid', (done) ->
testCsrfToken generateLoginRequestParams, 'post', done
it 'should send HTTP 403 if username is empty', (done) ->
loginRequestParams = generateLoginRequestParams
body :
username : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Unknown user name'
done()
it 'should send HTTP 403 if username exists but password is empty', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login to fail when password is empty
loginRequestParams = generateLoginRequestParams
body :
username : username
password : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 403 if password status is "needs reset"', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# updating user passwordStatus as needs reset
options = { $set: { passwordStatus: 'needs reset' } }
JUser.update { username }, options, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when passwordStatus is 'needs reset'
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : <PASSWORD>
expectedBody = 'You should reset your password in order to continue!'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated but 2fa code is not provided', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is empty
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : ''
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated and 2fa code is invalid', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : 'someInvalidCode'
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 200 if two factor authentication code is correct', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
validtfKey = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# trying to login with empty tf code
loginRequestParams = generateLoginRequestParams
body :
tfcode : ''
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
(next) ->
# trying to login with invalid tfcode
loginRequestParams = generateLoginRequestParams
body :
tfcode : 'someInvalidCode'
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
(next) ->
# generating a 2fa key and saving it in mongo
{ base32 : tfcode } = Speakeasy.generate_key
length : 20
encoding : 'base32'
validtfKey = tfcode
JUser.update { username }, { $set: { twofactorkey: tfcode } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# generating a verificationCode and expecting a successful login
verificationCode = Speakeasy.totp
key : validtfKey
encoding : 'base32'
loginRequestParams = generateLoginRequestParams
body :
tfcode : verificationCode
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 403 if invitation token is invalid', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when invitation token is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
token : 'someInvalidToken'
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'invitation is not valid'
next()
]
async.series queue, done
it 'should send HTTP 403 if groupname is invalid', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
groupName = generateRandomString()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when groupName is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : <PASSWORD>
groupName : 'someInvalidGroupName'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'group doesnt exist'
next()
]
async.series queue, done
it 'should send HTTP 403 if there is brute force attack', (done) ->
queue = []
username = generateRandomUsername()
password = '<PASSWORD>'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : '<PASSWORD>'
addRemoveUserLogsToQueue = (queue, username) ->
queue.push (next) ->
JLog.remove { username }, (err) ->
expect(err).to.not.exist
next()
addLoginTrialToQueue = (queue, tryCount) ->
queue.push (next) ->
expectedBody = switch
when tryCount < JLog.tryLimit()
'Access denied!'
else
"Your login access is blocked for
#{JLog.timeLimit()} minutes."
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
queue.push (next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
# removing logs for a fresh start
addRemoveUserLogsToQueue queue, username
# this loop adds try_limit + 1 trials to queue
for i in [0..JLog.tryLimit()]
addLoginTrialToQueue queue, i
# removing logs for this username after test passes
addRemoveUserLogsToQueue queue, username
async.series queue, done
it 'should send HTTP 403 if account was not found', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# deleting account of newly registered user
JAccount.remove { 'profile.nickname': username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login to fail after deleting account
loginRequestParams = generateLoginRequestParams
body :
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'No account found!'
next()
]
async.series queue, done
it 'should send HTTP 403 if user is blocked', (done) ->
user = null
username = generateRandomUsername()
password = '<PASSWORD>'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : <PASSWORD>
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# fetching user record
JUser.one { username }, (err, user_) ->
expect(err).to.not.exist
user = user_
next()
(next) ->
# blocking user for 1 day
untilDate = new Date(Date.now() + 1000 * 60 * 60 * 24)
user.block untilDate, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail and return blocked message
toDate = user.blockedUntil.toUTCString()
expectedBody = JUser.getBlockedMessage toDate
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
(next) ->
# unblocking user
user.unblock (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting user to be able to login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 and normalizeLoginId if user exists', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting successful login with newly registered email
loginRequestParams = generateLoginRequestParams
body :
username : email
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
# session is being craeted by jsession.fetchSession if does not exist
it 'should send HTTP 200 even if session does not exist', (done) ->
username = generateRandomUsername()
password = '<PASSWORD>'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# removing session
JSession.remove { username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting successful login even if the session was removed
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 if data is valid and update user and session data', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = '<PASSWORD>'
juserLastLoginDate = null
jsessionLastAccess = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : <PASSWORD>
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# keeping juser last login date
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
juserLastLoginDate = user.lastLoginDate
next()
(next) ->
# keeping jsession last access
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
jsessionLastAccess = session.lastAccess
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : <PASSWORD>
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting juser last login date not to be same after login
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
expect(juserLastLoginDate).not.to.be.equal user.lastLoginDate
next()
(next) ->
# expecting jsession last login date not to be same after login
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
expect(jsessionLastAccess).not.to.be.equal session.lastAccess
next()
]
async.series queue, done
beforeTests()
runTests()
| true | Speakeasy = require 'speakeasy'
{ async
expect
request
generateRandomEmail
generateRandomString
generateRandomUsername
checkBongoConnectivity } = require '../../../testhelper'
{ testCsrfToken } = require '../../../testhelper/handler'
{ generateLoginRequestParams } = require '../../../testhelper/handler/loginhelper'
{ generateRegisterRequestParams } = require '../../../testhelper/handler/registerhelper'
JLog = require '../../../models/log'
JUser = require '../../../models/user'
JAccount = require '../../../models/account'
JSession = require '../../../models/session'
beforeTests = -> before (done) ->
checkBongoConnectivity done
# here we have actual tests
runTests = -> describe 'server.handlers.login', ->
it 'should send HTTP 404 if request method is not POST', (done) ->
loginRequestParams = generateLoginRequestParams()
queue = []
methods = ['put', 'patch', 'delete']
addRequestToQueue = (queue, method) -> queue.push (next) ->
loginRequestParams.method = method
request loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
next()
for method in methods
addRequestToQueue queue, method
async.series queue, done
it 'should send HTTP 403 if _csrf token is invalid', (done) ->
testCsrfToken generateLoginRequestParams, 'post', done
it 'should send HTTP 403 if username is empty', (done) ->
loginRequestParams = generateLoginRequestParams
body :
username : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Unknown user name'
done()
it 'should send HTTP 403 if username exists but password is empty', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login to fail when password is empty
loginRequestParams = generateLoginRequestParams
body :
username : username
password : ''
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 403 if password status is "needs reset"', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# updating user passwordStatus as needs reset
options = { $set: { passwordStatus: 'needs reset' } }
JUser.update { username }, options, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when passwordStatus is 'needs reset'
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
expectedBody = 'You should reset your password in order to continue!'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated but 2fa code is not provided', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is empty
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : ''
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
]
async.series queue, done
it 'should send HTTP 403 if 2fa is activated and 2fa code is invalid', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail when 2fa code is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
tfcode : 'someInvalidCode'
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
]
async.series queue, done
it 'should send HTTP 200 if two factor authentication code is correct', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
validtfKey = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# setting two factor authentication on by adding twofactorkey field
JUser.update { username }, { $set: { twofactorkey: 'somekey' } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# trying to login with empty tf code
loginRequestParams = generateLoginRequestParams
body :
tfcode : ''
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'TwoFactor auth Enabled'
next()
(next) ->
# trying to login with invalid tfcode
loginRequestParams = generateLoginRequestParams
body :
tfcode : 'someInvalidCode'
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'Access denied!'
next()
(next) ->
# generating a 2fa key and saving it in mongo
{ base32 : tfcode } = Speakeasy.generate_key
length : 20
encoding : 'base32'
validtfKey = tfcode
JUser.update { username }, { $set: { twofactorkey: tfcode } }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# generating a verificationCode and expecting a successful login
verificationCode = Speakeasy.totp
key : validtfKey
encoding : 'base32'
loginRequestParams = generateLoginRequestParams
body :
tfcode : verificationCode
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 403 if invitation token is invalid', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when invitation token is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
token : 'someInvalidToken'
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'invitation is not valid'
next()
]
async.series queue, done
it 'should send HTTP 403 if groupname is invalid', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
groupName = generateRandomString()
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting login attempt to fail when groupName is invalid
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
groupName : 'someInvalidGroupName'
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'group doesnt exist'
next()
]
async.series queue, done
it 'should send HTTP 403 if there is brute force attack', (done) ->
queue = []
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : 'PI:PASSWORD:<PASSWORD>END_PI'
addRemoveUserLogsToQueue = (queue, username) ->
queue.push (next) ->
JLog.remove { username }, (err) ->
expect(err).to.not.exist
next()
addLoginTrialToQueue = (queue, tryCount) ->
queue.push (next) ->
expectedBody = switch
when tryCount < JLog.tryLimit()
'Access denied!'
else
"Your login access is blocked for
#{JLog.timeLimit()} minutes."
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
queue.push (next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
# removing logs for a fresh start
addRemoveUserLogsToQueue queue, username
# this loop adds try_limit + 1 trials to queue
for i in [0..JLog.tryLimit()]
addLoginTrialToQueue queue, i
# removing logs for this username after test passes
addRemoveUserLogsToQueue queue, username
async.series queue, done
it 'should send HTTP 403 if account was not found', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# deleting account of newly registered user
JAccount.remove { 'profile.nickname': username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login to fail after deleting account
loginRequestParams = generateLoginRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal 'No account found!'
next()
]
async.series queue, done
it 'should send HTTP 403 if user is blocked', (done) ->
user = null
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
loginRequestParams = generateLoginRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# fetching user record
JUser.one { username }, (err, user_) ->
expect(err).to.not.exist
user = user_
next()
(next) ->
# blocking user for 1 day
untilDate = new Date(Date.now() + 1000 * 60 * 60 * 24)
user.block untilDate, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting login attempt to fail and return blocked message
toDate = user.blockedUntil.toUTCString()
expectedBody = JUser.getBlockedMessage toDate
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 403
expect(body).to.be.equal expectedBody
next()
(next) ->
# unblocking user
user.unblock (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting user to be able to login
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 and normalizeLoginId if user exists', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting successful login with newly registered email
loginRequestParams = generateLoginRequestParams
body :
username : email
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
# session is being craeted by jsession.fetchSession if does not exist
it 'should send HTTP 200 even if session does not exist', (done) ->
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# removing session
JSession.remove { username }, (err) ->
expect(err).to.not.exist
next()
(next) ->
# expecting successful login even if the session was removed
loginRequestParams = generateLoginRequestParams
body :
email : ''
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
]
async.series queue, done
it 'should send HTTP 200 if data is valid and update user and session data', (done) ->
email = generateRandomEmail()
username = generateRandomUsername()
password = 'PI:PASSWORD:<PASSWORD>END_PI'
juserLastLoginDate = null
jsessionLastAccess = null
queue = [
(next) ->
# registering a new user
registerRequestParams = generateRegisterRequestParams
body :
email : email
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post registerRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# keeping juser last login date
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
juserLastLoginDate = user.lastLoginDate
next()
(next) ->
# keeping jsession last access
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
jsessionLastAccess = session.lastAccess
next()
(next) ->
# expecting successful login with newly registered username
loginRequestParams = generateLoginRequestParams
body :
username : username
password : PI:PASSWORD:<PASSWORD>END_PI
request.post loginRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
expect(body).to.be.equal ''
next()
(next) ->
# expecting juser last login date not to be same after login
JUser.one { username }, (err, user) ->
expect(err).to.not.exist
expect(juserLastLoginDate).not.to.be.equal user.lastLoginDate
next()
(next) ->
# expecting jsession last login date not to be same after login
JSession.one { username }, (err, session) ->
expect(err).to.not.exist
expect(jsessionLastAccess).not.to.be.equal session.lastAccess
next()
]
async.series queue, done
beforeTests()
runTests()
|
[
{
"context": "icense:\n#\n# The Central Randomizer 1.3 (C) 1997 by Paul Houle (paul@honeylocust.com)\n# See: http://www.honeylo",
"end": 149,
"score": 0.9998937249183655,
"start": 139,
"tag": "NAME",
"value": "Paul Houle"
},
{
"context": "he Central Randomizer 1.3 (C) 1997 by Paul Houle (paul@honeylocust.com)\n# See: http://www.honeylocust.com/javascript/ra",
"end": 171,
"score": 0.999931812286377,
"start": 151,
"tag": "EMAIL",
"value": "paul@honeylocust.com"
}
] | src/random/seeds/paul_houle.coffee | abe33/agt | 1 | {Cloneable, Sourcable, Formattable} = require '../../mixins'
# Original Implementation License:
#
# The Central Randomizer 1.3 (C) 1997 by Paul Houle (paul@honeylocust.com)
# See: http://www.honeylocust.com/javascript/randomizer.html
# Public:
module.exports =
class PaulHoule
@include Cloneable('seed')
@include Sourcable('chancejs.PaulHoule','seed')
@include Formattable('PaulHoule','seed')
### Public ###
constructor: (@seed) ->
get: ->
@seed = (@seed * 9301 + 49297) % 233280
@seed / 233280.0
| 144209 | {Cloneable, Sourcable, Formattable} = require '../../mixins'
# Original Implementation License:
#
# The Central Randomizer 1.3 (C) 1997 by <NAME> (<EMAIL>)
# See: http://www.honeylocust.com/javascript/randomizer.html
# Public:
module.exports =
class PaulHoule
@include Cloneable('seed')
@include Sourcable('chancejs.PaulHoule','seed')
@include Formattable('PaulHoule','seed')
### Public ###
constructor: (@seed) ->
get: ->
@seed = (@seed * 9301 + 49297) % 233280
@seed / 233280.0
| true | {Cloneable, Sourcable, Formattable} = require '../../mixins'
# Original Implementation License:
#
# The Central Randomizer 1.3 (C) 1997 by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# See: http://www.honeylocust.com/javascript/randomizer.html
# Public:
module.exports =
class PaulHoule
@include Cloneable('seed')
@include Sourcable('chancejs.PaulHoule','seed')
@include Formattable('PaulHoule','seed')
### Public ###
constructor: (@seed) ->
get: ->
@seed = (@seed * 9301 + 49297) % 233280
@seed / 233280.0
|
[
{
"context": "source.options\n key = index = keys.next()\n value = @state[\"option-#{index}",
"end": 5090,
"score": 0.6878712773323059,
"start": 5086,
"tag": "KEY",
"value": "next"
}
] | clients/widgets/product/list.cjsx | jacob22/accounting | 0 | /*
Copyright 2019 Open End AB
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
# This module expectes product-list.css to be loaded
define(['react', 'reactstrap', 'gettext', 'iter', 'signals', 'utils'], ->
[React, rs, gettext, iter, signals, utils] = arguments
Collapse = rs.Collapse
Card = rs.Card
CardBlock = rs.CardBlock
_ = gettext.gettext
ids = iter.count()
class Option extends React.Component
constructor: (props) ->
super(props)
@state =
id: "option-#{ids.next()}"
onChange: (event) =>
@props.onChange(@props.index, event.target.value)
render: ->
fieldClass = 'form-control col-9'
id = "#{@state.id}-field"
placeholder = ''
if @props.data_source.type == 'personnummer'
placeholder='YYMMDD-NNNN'
else if @props.data_source.mandatory
placeholder = _('This field is mandatory')
props =
id: id
className: fieldClass
onChange: @onChange
placeholder: placeholder
if @props.read_only
field = <input type='text' disabled {...props} />
else if @props.data_source.type == 'text'
field = <input type='text' {...props} />
else if @props.data_source.type == 'textarea'
field = <textarea {...props} />
else if @props.data_source.type == 'select'
keys = iter.count()
options = []
for option in JSON.parse(@props.data_source.typedata).options
options.push(
<option key=keys.next() value=option.name>
{option.name}
</option>
)
field = <select {...props}>
{options}
</select>
else if @props.data_source.type == 'personnummer'
field = <input type='text' {...props} />
labelClass = 'col-2 col-form-label'
if @props.data_source.mandatory
labelClass += ' mandatory'
<div className={"option form-inline #{'has-danger' if @props.invalid}"}>
<label
className=labelClass
htmlFor="#{@state.id}-field">{@props.data_source.label}</label>
{field}
</div>
class Row extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
if option.type == 'select'
for o in JSON.parse(option.typedata).options
@state["option-#{index}"] = o.name
break
_set_option: (index, value) =>
@setState("option-#{index}": value)
_add: =>
options = []
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
value = @state["option-#{index}"]
unless value?
value = null
options.push([option.label, value])
item =
id: @props.data_source.id
name: @props.data_source.name
price: @props.data_source.price
options: options
count: 1
@props.cart.add(item)
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
is_valid: ->
if @props.data_source.currentStock == 0
return false
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
data = @state["option-#{index}"]
unless option.is_valid(data)
return false
return true
render: ->
currency = utils.formatCurrency(@props.data_source.price, ':')
options = []
keys = iter.count()
mandatory = false
for option in @props.data_source.options
key = index = keys.next()
value = @state["option-#{index}"]
options.push(
<Option
index=index
key=key
data_source=option
read_only={@props.data_source.currentStock == 0}
invalid={value? and not option.is_valid(value)}
onChange=@_set_option />
)
mandatory = mandatory or option.mandatory
if mandatory
mandatory = <gettext.Message
className='mandatory-explanation'
message={_('Fields marked with
<span class="mandatory"></span>
are mandatory.')} />
else
mandatory = null
if @props.data_source.currentStock?
stock = <div className='stock pull-right'>
{_('Quantity remaining')}: {@props.data_source.currentStock}
</div>
else
stock = null
return <div className='product card'>
<div
onClick=@_toggle_collapse
className='row-header'>
<div className='card-header'>
<span className='price'>{currency}</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span className='name'>{@props.data_source.name}</span>
</div>
</div>
<Collapse isOpen=@state.open id="#{@state.id}-content"
onOpened=@_onOpened onClosed=@_onClosed >
<Card>
<CardBlock className='card-body'>
<p className='description'>
{@props.data_source.description}
</p>
<div className='options'>
{options}
</div>
<div className='pull-right'>
<button
className='btn'
disabled={not @is_valid()}
onClick=@_add>{_('Add')}</button>
</div>
{mandatory}
{stock}
</CardBlock>
</Card>
</Collapse>
</div>
class Section extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-section-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
top_level: ->
return !@props.data_source.name
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
render: ->
rows = []
for product in @props.data_source.products
rows.push(
<Row
key=product.id
data_source=product
cart=@props.cart
/>
)
product_count = @props.data_source.products.length
if @top_level()
return <div className='section'>
{rows}
</div>
else
return <div id=@state.id className='section card' >
<div onClick=@_toggle_collapse
className='section-name card-header'>
<span className='product-count'>
{product_count + ' ' + _('products')}
</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span>
{@props.data_source.name}
</span>
</div>
<Collapse
isOpen=@state.open
onOpened=@_onOpened
onClosed=@_onClosed>
<Card>
<CardBlock className='card-body'>
{rows}
</CardBlock>
</Card>
</Collapse>
</div>
class List extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-list-#{ids.next()}"
loaded: false
signals.connect(@props.data_source, 'refresh', @_update)
_update: (source) =>
@setState(loaded: true)
render: ->
if @state.loaded
sections = []
for section in @props.data_source.sections
sections.push(
<Section
key=section.name
data_source=section
cart=@props.cart
/>
)
content = sections
else
content = <p className='font-italic'>
{_('Loading...')}
</p>
return <div id=@state.id className='products'>
{content}
</div>
return {
List: List
Option: Option
Row: Row
Section: Section
}
)
| 207774 | /*
Copyright 2019 Open End AB
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
# This module expectes product-list.css to be loaded
define(['react', 'reactstrap', 'gettext', 'iter', 'signals', 'utils'], ->
[React, rs, gettext, iter, signals, utils] = arguments
Collapse = rs.Collapse
Card = rs.Card
CardBlock = rs.CardBlock
_ = gettext.gettext
ids = iter.count()
class Option extends React.Component
constructor: (props) ->
super(props)
@state =
id: "option-#{ids.next()}"
onChange: (event) =>
@props.onChange(@props.index, event.target.value)
render: ->
fieldClass = 'form-control col-9'
id = "#{@state.id}-field"
placeholder = ''
if @props.data_source.type == 'personnummer'
placeholder='YYMMDD-NNNN'
else if @props.data_source.mandatory
placeholder = _('This field is mandatory')
props =
id: id
className: fieldClass
onChange: @onChange
placeholder: placeholder
if @props.read_only
field = <input type='text' disabled {...props} />
else if @props.data_source.type == 'text'
field = <input type='text' {...props} />
else if @props.data_source.type == 'textarea'
field = <textarea {...props} />
else if @props.data_source.type == 'select'
keys = iter.count()
options = []
for option in JSON.parse(@props.data_source.typedata).options
options.push(
<option key=keys.next() value=option.name>
{option.name}
</option>
)
field = <select {...props}>
{options}
</select>
else if @props.data_source.type == 'personnummer'
field = <input type='text' {...props} />
labelClass = 'col-2 col-form-label'
if @props.data_source.mandatory
labelClass += ' mandatory'
<div className={"option form-inline #{'has-danger' if @props.invalid}"}>
<label
className=labelClass
htmlFor="#{@state.id}-field">{@props.data_source.label}</label>
{field}
</div>
class Row extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
if option.type == 'select'
for o in JSON.parse(option.typedata).options
@state["option-#{index}"] = o.name
break
_set_option: (index, value) =>
@setState("option-#{index}": value)
_add: =>
options = []
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
value = @state["option-#{index}"]
unless value?
value = null
options.push([option.label, value])
item =
id: @props.data_source.id
name: @props.data_source.name
price: @props.data_source.price
options: options
count: 1
@props.cart.add(item)
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
is_valid: ->
if @props.data_source.currentStock == 0
return false
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
data = @state["option-#{index}"]
unless option.is_valid(data)
return false
return true
render: ->
currency = utils.formatCurrency(@props.data_source.price, ':')
options = []
keys = iter.count()
mandatory = false
for option in @props.data_source.options
key = index = keys.<KEY>()
value = @state["option-#{index}"]
options.push(
<Option
index=index
key=key
data_source=option
read_only={@props.data_source.currentStock == 0}
invalid={value? and not option.is_valid(value)}
onChange=@_set_option />
)
mandatory = mandatory or option.mandatory
if mandatory
mandatory = <gettext.Message
className='mandatory-explanation'
message={_('Fields marked with
<span class="mandatory"></span>
are mandatory.')} />
else
mandatory = null
if @props.data_source.currentStock?
stock = <div className='stock pull-right'>
{_('Quantity remaining')}: {@props.data_source.currentStock}
</div>
else
stock = null
return <div className='product card'>
<div
onClick=@_toggle_collapse
className='row-header'>
<div className='card-header'>
<span className='price'>{currency}</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span className='name'>{@props.data_source.name}</span>
</div>
</div>
<Collapse isOpen=@state.open id="#{@state.id}-content"
onOpened=@_onOpened onClosed=@_onClosed >
<Card>
<CardBlock className='card-body'>
<p className='description'>
{@props.data_source.description}
</p>
<div className='options'>
{options}
</div>
<div className='pull-right'>
<button
className='btn'
disabled={not @is_valid()}
onClick=@_add>{_('Add')}</button>
</div>
{mandatory}
{stock}
</CardBlock>
</Card>
</Collapse>
</div>
class Section extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-section-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
top_level: ->
return !@props.data_source.name
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
render: ->
rows = []
for product in @props.data_source.products
rows.push(
<Row
key=product.id
data_source=product
cart=@props.cart
/>
)
product_count = @props.data_source.products.length
if @top_level()
return <div className='section'>
{rows}
</div>
else
return <div id=@state.id className='section card' >
<div onClick=@_toggle_collapse
className='section-name card-header'>
<span className='product-count'>
{product_count + ' ' + _('products')}
</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span>
{@props.data_source.name}
</span>
</div>
<Collapse
isOpen=@state.open
onOpened=@_onOpened
onClosed=@_onClosed>
<Card>
<CardBlock className='card-body'>
{rows}
</CardBlock>
</Card>
</Collapse>
</div>
class List extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-list-#{ids.next()}"
loaded: false
signals.connect(@props.data_source, 'refresh', @_update)
_update: (source) =>
@setState(loaded: true)
render: ->
if @state.loaded
sections = []
for section in @props.data_source.sections
sections.push(
<Section
key=section.name
data_source=section
cart=@props.cart
/>
)
content = sections
else
content = <p className='font-italic'>
{_('Loading...')}
</p>
return <div id=@state.id className='products'>
{content}
</div>
return {
List: List
Option: Option
Row: Row
Section: Section
}
)
| true | /*
Copyright 2019 Open End AB
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
# This module expectes product-list.css to be loaded
define(['react', 'reactstrap', 'gettext', 'iter', 'signals', 'utils'], ->
[React, rs, gettext, iter, signals, utils] = arguments
Collapse = rs.Collapse
Card = rs.Card
CardBlock = rs.CardBlock
_ = gettext.gettext
ids = iter.count()
class Option extends React.Component
constructor: (props) ->
super(props)
@state =
id: "option-#{ids.next()}"
onChange: (event) =>
@props.onChange(@props.index, event.target.value)
render: ->
fieldClass = 'form-control col-9'
id = "#{@state.id}-field"
placeholder = ''
if @props.data_source.type == 'personnummer'
placeholder='YYMMDD-NNNN'
else if @props.data_source.mandatory
placeholder = _('This field is mandatory')
props =
id: id
className: fieldClass
onChange: @onChange
placeholder: placeholder
if @props.read_only
field = <input type='text' disabled {...props} />
else if @props.data_source.type == 'text'
field = <input type='text' {...props} />
else if @props.data_source.type == 'textarea'
field = <textarea {...props} />
else if @props.data_source.type == 'select'
keys = iter.count()
options = []
for option in JSON.parse(@props.data_source.typedata).options
options.push(
<option key=keys.next() value=option.name>
{option.name}
</option>
)
field = <select {...props}>
{options}
</select>
else if @props.data_source.type == 'personnummer'
field = <input type='text' {...props} />
labelClass = 'col-2 col-form-label'
if @props.data_source.mandatory
labelClass += ' mandatory'
<div className={"option form-inline #{'has-danger' if @props.invalid}"}>
<label
className=labelClass
htmlFor="#{@state.id}-field">{@props.data_source.label}</label>
{field}
</div>
class Row extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
if option.type == 'select'
for o in JSON.parse(option.typedata).options
@state["option-#{index}"] = o.name
break
_set_option: (index, value) =>
@setState("option-#{index}": value)
_add: =>
options = []
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
value = @state["option-#{index}"]
unless value?
value = null
options.push([option.label, value])
item =
id: @props.data_source.id
name: @props.data_source.name
price: @props.data_source.price
options: options
count: 1
@props.cart.add(item)
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
is_valid: ->
if @props.data_source.currentStock == 0
return false
indexes = iter.count()
for option in @props.data_source.options
index = indexes.next()
data = @state["option-#{index}"]
unless option.is_valid(data)
return false
return true
render: ->
currency = utils.formatCurrency(@props.data_source.price, ':')
options = []
keys = iter.count()
mandatory = false
for option in @props.data_source.options
key = index = keys.PI:KEY:<KEY>END_PI()
value = @state["option-#{index}"]
options.push(
<Option
index=index
key=key
data_source=option
read_only={@props.data_source.currentStock == 0}
invalid={value? and not option.is_valid(value)}
onChange=@_set_option />
)
mandatory = mandatory or option.mandatory
if mandatory
mandatory = <gettext.Message
className='mandatory-explanation'
message={_('Fields marked with
<span class="mandatory"></span>
are mandatory.')} />
else
mandatory = null
if @props.data_source.currentStock?
stock = <div className='stock pull-right'>
{_('Quantity remaining')}: {@props.data_source.currentStock}
</div>
else
stock = null
return <div className='product card'>
<div
onClick=@_toggle_collapse
className='row-header'>
<div className='card-header'>
<span className='price'>{currency}</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span className='name'>{@props.data_source.name}</span>
</div>
</div>
<Collapse isOpen=@state.open id="#{@state.id}-content"
onOpened=@_onOpened onClosed=@_onClosed >
<Card>
<CardBlock className='card-body'>
<p className='description'>
{@props.data_source.description}
</p>
<div className='options'>
{options}
</div>
<div className='pull-right'>
<button
className='btn'
disabled={not @is_valid()}
onClick=@_add>{_('Add')}</button>
</div>
{mandatory}
{stock}
</CardBlock>
</Card>
</Collapse>
</div>
class Section extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-section-#{ids.next()}"
open: false
indicator: 'fa-chevron-down'
top_level: ->
return !@props.data_source.name
_toggle_collapse: =>
@setState(open: not @state.open)
_onOpened: =>
@setState(indicator: 'fa-chevron-up')
_onClosed: =>
@setState(indicator: 'fa-chevron-down')
render: ->
rows = []
for product in @props.data_source.products
rows.push(
<Row
key=product.id
data_source=product
cart=@props.cart
/>
)
product_count = @props.data_source.products.length
if @top_level()
return <div className='section'>
{rows}
</div>
else
return <div id=@state.id className='section card' >
<div onClick=@_toggle_collapse
className='section-name card-header'>
<span className='product-count'>
{product_count + ' ' + _('products')}
</span>
<i className={'fa mr-3 ' + @state.indicator}
aria-hidden></i>
<span>
{@props.data_source.name}
</span>
</div>
<Collapse
isOpen=@state.open
onOpened=@_onOpened
onClosed=@_onClosed>
<Card>
<CardBlock className='card-body'>
{rows}
</CardBlock>
</Card>
</Collapse>
</div>
class List extends React.Component
constructor: (props) ->
super(props)
@state =
id: "product-list-#{ids.next()}"
loaded: false
signals.connect(@props.data_source, 'refresh', @_update)
_update: (source) =>
@setState(loaded: true)
render: ->
if @state.loaded
sections = []
for section in @props.data_source.sections
sections.push(
<Section
key=section.name
data_source=section
cart=@props.cart
/>
)
content = sections
else
content = <p className='font-italic'>
{_('Loading...')}
</p>
return <div id=@state.id className='products'>
{content}
</div>
return {
List: List
Option: Option
Row: Row
Section: Section
}
)
|
[
{
"context": "reate CriscoModel and Aux\n instances\n\n # TODO(chris): Might need to curry with\n domainConfig if Cr",
"end": 770,
"score": 0.8009651899337769,
"start": 765,
"tag": "NAME",
"value": "chris"
}
] | src/core/middleware.action/init.coffee | classdojo/crisco | 0 | ###
A collection of the first N
steps of Crisco initialization.
###
###
Let's just define the ordered middleware here. Bind each
anonymous function to this context so it get's access to
the instance variables.
###
Middleware =
###
Step 1:
-Create a namespaced __crisco variable
container on the express req object.
-Call user registered deserializer
if it exists.
###
'1': (crisco, domain) ->
return (req, res, next) =>
req.__crisco = {}
h = (me) =>
if me?
req.__crisco.me = me
next()
m = @__c.getMiddleware "deserialize"
if m?
m.call(m, req, res, @__database, h)
else
next()
###
Step 2:
-Create CriscoModel and Aux
instances
# TODO(chris): Might need to curry with
domainConfig if CriscoModel needs
domain configurable options to bootstrap
itself.
###
'2': (domain, routeInfo) ->
return (req, res, next) =>
extendedRouteInfo = _.extend routeInfo, {req: req, res: res}
ca = @__primitiveFactory.getPrimitive "CriscoAction", domain, extendedRouteInfo
aux = @__primitiveFactory.getPrimitive "CriscoAux", domain, extendedRouteInfo
req.__crisco.action = ca
req.__crisco.aux = aux
next()
###
Class: CriscoActionInit
A collection of middleware
and it's initializer that
initialize Crisco Primitives
###
class CriscoActionInit
constructor: (crisco, database, primitiveFactory) ->
@__c = crisco
@__database = database
@__primitiveFactory = primitiveFactory
init: () ->
for step, route of Middleware
Middleware[step] = route.bind(@)
getExpressMiddleware: (domain, routeInfo) ->
return [
Middleware['1'](@__c, domain),
Middleware['2'](domain, routeInfo)
]
module.exports = CriscoActionInit
| 126369 | ###
A collection of the first N
steps of Crisco initialization.
###
###
Let's just define the ordered middleware here. Bind each
anonymous function to this context so it get's access to
the instance variables.
###
Middleware =
###
Step 1:
-Create a namespaced __crisco variable
container on the express req object.
-Call user registered deserializer
if it exists.
###
'1': (crisco, domain) ->
return (req, res, next) =>
req.__crisco = {}
h = (me) =>
if me?
req.__crisco.me = me
next()
m = @__c.getMiddleware "deserialize"
if m?
m.call(m, req, res, @__database, h)
else
next()
###
Step 2:
-Create CriscoModel and Aux
instances
# TODO(<NAME>): Might need to curry with
domainConfig if CriscoModel needs
domain configurable options to bootstrap
itself.
###
'2': (domain, routeInfo) ->
return (req, res, next) =>
extendedRouteInfo = _.extend routeInfo, {req: req, res: res}
ca = @__primitiveFactory.getPrimitive "CriscoAction", domain, extendedRouteInfo
aux = @__primitiveFactory.getPrimitive "CriscoAux", domain, extendedRouteInfo
req.__crisco.action = ca
req.__crisco.aux = aux
next()
###
Class: CriscoActionInit
A collection of middleware
and it's initializer that
initialize Crisco Primitives
###
class CriscoActionInit
constructor: (crisco, database, primitiveFactory) ->
@__c = crisco
@__database = database
@__primitiveFactory = primitiveFactory
init: () ->
for step, route of Middleware
Middleware[step] = route.bind(@)
getExpressMiddleware: (domain, routeInfo) ->
return [
Middleware['1'](@__c, domain),
Middleware['2'](domain, routeInfo)
]
module.exports = CriscoActionInit
| true | ###
A collection of the first N
steps of Crisco initialization.
###
###
Let's just define the ordered middleware here. Bind each
anonymous function to this context so it get's access to
the instance variables.
###
Middleware =
###
Step 1:
-Create a namespaced __crisco variable
container on the express req object.
-Call user registered deserializer
if it exists.
###
'1': (crisco, domain) ->
return (req, res, next) =>
req.__crisco = {}
h = (me) =>
if me?
req.__crisco.me = me
next()
m = @__c.getMiddleware "deserialize"
if m?
m.call(m, req, res, @__database, h)
else
next()
###
Step 2:
-Create CriscoModel and Aux
instances
# TODO(PI:NAME:<NAME>END_PI): Might need to curry with
domainConfig if CriscoModel needs
domain configurable options to bootstrap
itself.
###
'2': (domain, routeInfo) ->
return (req, res, next) =>
extendedRouteInfo = _.extend routeInfo, {req: req, res: res}
ca = @__primitiveFactory.getPrimitive "CriscoAction", domain, extendedRouteInfo
aux = @__primitiveFactory.getPrimitive "CriscoAux", domain, extendedRouteInfo
req.__crisco.action = ca
req.__crisco.aux = aux
next()
###
Class: CriscoActionInit
A collection of middleware
and it's initializer that
initialize Crisco Primitives
###
class CriscoActionInit
constructor: (crisco, database, primitiveFactory) ->
@__c = crisco
@__database = database
@__primitiveFactory = primitiveFactory
init: () ->
for step, route of Middleware
Middleware[step] = route.bind(@)
getExpressMiddleware: (domain, routeInfo) ->
return [
Middleware['1'](@__c, domain),
Middleware['2'](domain, routeInfo)
]
module.exports = CriscoActionInit
|
[
{
"context": "facts = [\n \"There used to be a street named after Chuck Norris, but it was changed because nobody crosses Chuck ",
"end": 100,
"score": 0.959043562412262,
"start": 88,
"tag": "NAME",
"value": "Chuck Norris"
},
{
"context": "the courage to tell him yet.\",\n \"Chuck Norris and Superman once fought each other on a bet. The loser had",
"end": 371,
"score": 0.8813669085502625,
"start": 366,
"tag": "NAME",
"value": "Super"
},
{
"context": " can imagine a color he's never seen before.\",\n \"Superman tightened the tire lugs nuts on his car, Chuck No",
"end": 731,
"score": 0.9220798015594482,
"start": 723,
"tag": "NAME",
"value": "Superman"
},
{
"context": ", he checks his closet for Chuck Norris.\",\n \"When Alexander Bell invented the telephone he had 3 missed calls from",
"end": 1040,
"score": 0.98825603723526,
"start": 1026,
"tag": "NAME",
"value": "Alexander Bell"
},
{
"context": ".........the rattle snake died\",\n \"When Chuck Norris does a pushup, he isn't lifting himself up, he's ",
"end": 1930,
"score": 0.5573790073394775,
"start": 1927,
"tag": "NAME",
"value": "ris"
}
] | scripts/entertain.coffee | atf-hackathon/nick | 0 | # Description:
# File for facts
#
facts = [
"There used to be a street named after Chuck Norris, but it was changed because nobody crosses Chuck Norris and lives.",
"Chuck Norris has already been to Mars; that's why there are no signs of life.",
"Chuck Norris died 20 years ago, Death just hasn't built up the courage to tell him yet.",
"Chuck Norris and Superman once fought each other on a bet. The loser had to start wearing his underwear on the outside of his pants.",
"Some magicans can walk on water, Chuck Norris can swim through land.",
"Chuck Norris counted to infinity - twice.",
"Chuck Norris is the reason why Waldo is hiding.",
"Chuck Norris can imagine a color he's never seen before.",
"Superman tightened the tire lugs nuts on his car, Chuck Norris came along and gave it a couple more turns.",
"Death once had a near-Chuck Norris experience",
"Chuck Norris can slam a revolving door.",
"When the Boogeyman goes to sleep every night, he checks his closet for Chuck Norris.",
"When Alexander Bell invented the telephone he had 3 missed calls from Chuck Norris",
"Chuck Norris once urinated in a semi truck's gas tank as a joke....that truck is now known as Optimus Prime.",
"Chuck Norris once kicked a horse in the chin. Its decendants are known today as Giraffes.",
"Chuck Norris doesn't flush the toilet, he scares the sh*t out of it",
"Chuck Norris can win a game of Connect Four in only three moves.",
"Chuck Norris will never have a heart attack. His heart isn't nearly foolish enough to attack him.",
"There is no theory of evolution. Just a list of animals Chuck Norris allows to live.",
"Chuck Norris can cut through a hot knife with butter",
"Chuck Norris was in all the Star Wars movies. He was the force.",
"Chuck Norris once got bit by a rattle snake........ After three days of pain and agony ..................the rattle snake died",
"When Chuck Norris does a pushup, he isn't lifting himself up, he's pushing the Earth down.",
"Fear of spiders is aracnaphobia, fear of tight spaces is chlaustraphobia, fear of Chuck Norris is called Logic",
"Chuck Norris doesn’t wear a watch. HE decides what time it is.",
"Chuck Norris can light a fire by rubbing two ice-cubes together.",
"The original title for Alien vs. Predator was Alien and Predator vs Chuck Norris.",
"The film was cancelled shortly after going into preproduction. No one would pay nine dollars to see a movie fourteen seconds long.",
"Chuck Norris doesn't read books. He stares them down until he gets the information he wants.",
"Chuck Norris made a Happy Meal cry.",
"Chuck Norris is the only one who can kick you in the back of the face.",
"Some people wear Superman pajamas. Superman wears Chuck Norris pajamas.",
"Google won't search for Chuck Norris because it knows you don't find Chuck Norris, he finds you.",
"Chuck Norris can strike the same lightning twice.",
"Chuck Norriss' calandar goes from march 31st to april 2nd cause nobody fools Chuck Norris",
"Chuck Norris won the Tour De France on a stationary bike.",
"Chuck Norris can kill two stones with one bird.",
"Chuck Norris makes onions cry.",
"Chuck Norris irons his trousers with them still on.",
"Chuck Norris Talks About himself in the fourth Person.",
"Chuck Norris doesn't have Twitter, because he's already following you.",
"When Chuck Norris plays Monopoly, it affects the actual world economy.",
"Chuck Norris can set ants on fire with a magnifying glass. At night.",
"Chuck Norris is the only man to ever defeat a brick wall in a game of tennis.",
"Chuck Norris does not sleep. He waits.",
"Chuck Norris can divide by zero",
"Chuck Norris CAN believe it's not butter.",
"Oxygen needs Chuck Norris to survive.",
"Chuck Norris' hand is the only hand that can beat a Royal Flush.",
"Chuck Norris doesn't have a reflection in the mirror because it's afraid to look at Chuck Norris."
];
module.exports = (robot) -> robot.hear /\/nick entertain/i, (res) ->
res.send res.random facts
| 48635 | # Description:
# File for facts
#
facts = [
"There used to be a street named after <NAME>, but it was changed because nobody crosses Chuck Norris and lives.",
"Chuck Norris has already been to Mars; that's why there are no signs of life.",
"Chuck Norris died 20 years ago, Death just hasn't built up the courage to tell him yet.",
"Chuck Norris and <NAME>man once fought each other on a bet. The loser had to start wearing his underwear on the outside of his pants.",
"Some magicans can walk on water, Chuck Norris can swim through land.",
"Chuck Norris counted to infinity - twice.",
"Chuck Norris is the reason why Waldo is hiding.",
"Chuck Norris can imagine a color he's never seen before.",
"<NAME> tightened the tire lugs nuts on his car, Chuck Norris came along and gave it a couple more turns.",
"Death once had a near-Chuck Norris experience",
"Chuck Norris can slam a revolving door.",
"When the Boogeyman goes to sleep every night, he checks his closet for Chuck Norris.",
"When <NAME> invented the telephone he had 3 missed calls from Chuck Norris",
"Chuck Norris once urinated in a semi truck's gas tank as a joke....that truck is now known as Optimus Prime.",
"Chuck Norris once kicked a horse in the chin. Its decendants are known today as Giraffes.",
"Chuck Norris doesn't flush the toilet, he scares the sh*t out of it",
"Chuck Norris can win a game of Connect Four in only three moves.",
"Chuck Norris will never have a heart attack. His heart isn't nearly foolish enough to attack him.",
"There is no theory of evolution. Just a list of animals Chuck Norris allows to live.",
"Chuck Norris can cut through a hot knife with butter",
"Chuck Norris was in all the Star Wars movies. He was the force.",
"Chuck Norris once got bit by a rattle snake........ After three days of pain and agony ..................the rattle snake died",
"When Chuck Nor<NAME> does a pushup, he isn't lifting himself up, he's pushing the Earth down.",
"Fear of spiders is aracnaphobia, fear of tight spaces is chlaustraphobia, fear of Chuck Norris is called Logic",
"Chuck Norris doesn’t wear a watch. HE decides what time it is.",
"Chuck Norris can light a fire by rubbing two ice-cubes together.",
"The original title for Alien vs. Predator was Alien and Predator vs Chuck Norris.",
"The film was cancelled shortly after going into preproduction. No one would pay nine dollars to see a movie fourteen seconds long.",
"Chuck Norris doesn't read books. He stares them down until he gets the information he wants.",
"Chuck Norris made a Happy Meal cry.",
"Chuck Norris is the only one who can kick you in the back of the face.",
"Some people wear Superman pajamas. Superman wears Chuck Norris pajamas.",
"Google won't search for Chuck Norris because it knows you don't find Chuck Norris, he finds you.",
"Chuck Norris can strike the same lightning twice.",
"Chuck Norriss' calandar goes from march 31st to april 2nd cause nobody fools Chuck Norris",
"Chuck Norris won the Tour De France on a stationary bike.",
"Chuck Norris can kill two stones with one bird.",
"Chuck Norris makes onions cry.",
"Chuck Norris irons his trousers with them still on.",
"Chuck Norris Talks About himself in the fourth Person.",
"Chuck Norris doesn't have Twitter, because he's already following you.",
"When Chuck Norris plays Monopoly, it affects the actual world economy.",
"Chuck Norris can set ants on fire with a magnifying glass. At night.",
"Chuck Norris is the only man to ever defeat a brick wall in a game of tennis.",
"Chuck Norris does not sleep. He waits.",
"Chuck Norris can divide by zero",
"Chuck Norris CAN believe it's not butter.",
"Oxygen needs Chuck Norris to survive.",
"Chuck Norris' hand is the only hand that can beat a Royal Flush.",
"Chuck Norris doesn't have a reflection in the mirror because it's afraid to look at Chuck Norris."
];
module.exports = (robot) -> robot.hear /\/nick entertain/i, (res) ->
res.send res.random facts
| true | # Description:
# File for facts
#
facts = [
"There used to be a street named after PI:NAME:<NAME>END_PI, but it was changed because nobody crosses Chuck Norris and lives.",
"Chuck Norris has already been to Mars; that's why there are no signs of life.",
"Chuck Norris died 20 years ago, Death just hasn't built up the courage to tell him yet.",
"Chuck Norris and PI:NAME:<NAME>END_PIman once fought each other on a bet. The loser had to start wearing his underwear on the outside of his pants.",
"Some magicans can walk on water, Chuck Norris can swim through land.",
"Chuck Norris counted to infinity - twice.",
"Chuck Norris is the reason why Waldo is hiding.",
"Chuck Norris can imagine a color he's never seen before.",
"PI:NAME:<NAME>END_PI tightened the tire lugs nuts on his car, Chuck Norris came along and gave it a couple more turns.",
"Death once had a near-Chuck Norris experience",
"Chuck Norris can slam a revolving door.",
"When the Boogeyman goes to sleep every night, he checks his closet for Chuck Norris.",
"When PI:NAME:<NAME>END_PI invented the telephone he had 3 missed calls from Chuck Norris",
"Chuck Norris once urinated in a semi truck's gas tank as a joke....that truck is now known as Optimus Prime.",
"Chuck Norris once kicked a horse in the chin. Its decendants are known today as Giraffes.",
"Chuck Norris doesn't flush the toilet, he scares the sh*t out of it",
"Chuck Norris can win a game of Connect Four in only three moves.",
"Chuck Norris will never have a heart attack. His heart isn't nearly foolish enough to attack him.",
"There is no theory of evolution. Just a list of animals Chuck Norris allows to live.",
"Chuck Norris can cut through a hot knife with butter",
"Chuck Norris was in all the Star Wars movies. He was the force.",
"Chuck Norris once got bit by a rattle snake........ After three days of pain and agony ..................the rattle snake died",
"When Chuck NorPI:NAME:<NAME>END_PI does a pushup, he isn't lifting himself up, he's pushing the Earth down.",
"Fear of spiders is aracnaphobia, fear of tight spaces is chlaustraphobia, fear of Chuck Norris is called Logic",
"Chuck Norris doesn’t wear a watch. HE decides what time it is.",
"Chuck Norris can light a fire by rubbing two ice-cubes together.",
"The original title for Alien vs. Predator was Alien and Predator vs Chuck Norris.",
"The film was cancelled shortly after going into preproduction. No one would pay nine dollars to see a movie fourteen seconds long.",
"Chuck Norris doesn't read books. He stares them down until he gets the information he wants.",
"Chuck Norris made a Happy Meal cry.",
"Chuck Norris is the only one who can kick you in the back of the face.",
"Some people wear Superman pajamas. Superman wears Chuck Norris pajamas.",
"Google won't search for Chuck Norris because it knows you don't find Chuck Norris, he finds you.",
"Chuck Norris can strike the same lightning twice.",
"Chuck Norriss' calandar goes from march 31st to april 2nd cause nobody fools Chuck Norris",
"Chuck Norris won the Tour De France on a stationary bike.",
"Chuck Norris can kill two stones with one bird.",
"Chuck Norris makes onions cry.",
"Chuck Norris irons his trousers with them still on.",
"Chuck Norris Talks About himself in the fourth Person.",
"Chuck Norris doesn't have Twitter, because he's already following you.",
"When Chuck Norris plays Monopoly, it affects the actual world economy.",
"Chuck Norris can set ants on fire with a magnifying glass. At night.",
"Chuck Norris is the only man to ever defeat a brick wall in a game of tennis.",
"Chuck Norris does not sleep. He waits.",
"Chuck Norris can divide by zero",
"Chuck Norris CAN believe it's not butter.",
"Oxygen needs Chuck Norris to survive.",
"Chuck Norris' hand is the only hand that can beat a Royal Flush.",
"Chuck Norris doesn't have a reflection in the mirror because it's afraid to look at Chuck Norris."
];
module.exports = (robot) -> robot.hear /\/nick entertain/i, (res) ->
res.send res.random facts
|
[
{
"context": "d'\n username: config.im.client.id\n password: config.im.client.secret\n data = \n grant_type: 'password'\n username",
"end": 282,
"score": 0.9969449043273926,
"start": 259,
"tag": "PASSWORD",
"value": "config.im.client.secret"
},
{
"context": "ord'\n username: config.im.user.id\n password: config.im.user.secret\n scope: process.env.OAUTH2_SCOPE\n http\n .p",
"end": 387,
"score": 0.999323844909668,
"start": 366,
"tag": "PASSWORD",
"value": "config.im.user.secret"
},
{
"context": " user:\n name: process.env.USER\n pass: process.env.PASS\n oauth2:\n url: process.env.TOKENURL\n im:\n ",
"end": 1241,
"score": 0.9846355319023132,
"start": 1225,
"tag": "PASSWORD",
"value": "process.env.PASS"
}
] | test/specs/dp/config.coffee | twhtanghk/wdio.inhse | 0 | fs = require 'fs'
Promise = require 'bluebird'
http = Promise.promisifyAll require 'needle'
token = (client, user) ->
config = module.exports
opts =
'Content-Type': 'application/x-www-form-urlencoded'
username: config.im.client.id
password: config.im.client.secret
data =
grant_type: 'password'
username: config.im.user.id
password: config.im.user.secret
scope: process.env.OAUTH2_SCOPE
http
.postAsync config.oauth2.url, data, opts
.then (res) ->
res.body.access_token
msg = (token, body, file) ->
config = module.exports
data =
to: config.im.to
body: body
file:
buffer: file
filename: 'screenDump.png'
content_type: 'image/png'
opts =
multipart: true
headers:
Authorization: "Bearer #{token}"
http
.postAsync config.im.url, data, opts
.then (res) ->
if res.statusCode != 201
console.log "im: #{res.statusCode} #{res.statusMessage}"
errHandler = (body, screenDump) ->
config = module.exports
token config.im.client, config.im.user
.then (token) ->
msg token, body, screenDump
module.exports =
portal:
url: process.env.URL
user:
name: process.env.USER
pass: process.env.PASS
oauth2:
url: process.env.TOKENURL
im:
url: process.env.IMURL
client:
id: process.env.CLIENT_ID
secret: process.env.CLIENT_PASS
user:
id: process.env.IMUSER
secret: process.env.IMPASS
to: process.env.NOTIFY
errHandler: errHandler
| 82268 | fs = require 'fs'
Promise = require 'bluebird'
http = Promise.promisifyAll require 'needle'
token = (client, user) ->
config = module.exports
opts =
'Content-Type': 'application/x-www-form-urlencoded'
username: config.im.client.id
password: <PASSWORD>
data =
grant_type: 'password'
username: config.im.user.id
password: <PASSWORD>
scope: process.env.OAUTH2_SCOPE
http
.postAsync config.oauth2.url, data, opts
.then (res) ->
res.body.access_token
msg = (token, body, file) ->
config = module.exports
data =
to: config.im.to
body: body
file:
buffer: file
filename: 'screenDump.png'
content_type: 'image/png'
opts =
multipart: true
headers:
Authorization: "Bearer #{token}"
http
.postAsync config.im.url, data, opts
.then (res) ->
if res.statusCode != 201
console.log "im: #{res.statusCode} #{res.statusMessage}"
errHandler = (body, screenDump) ->
config = module.exports
token config.im.client, config.im.user
.then (token) ->
msg token, body, screenDump
module.exports =
portal:
url: process.env.URL
user:
name: process.env.USER
pass: <PASSWORD>
oauth2:
url: process.env.TOKENURL
im:
url: process.env.IMURL
client:
id: process.env.CLIENT_ID
secret: process.env.CLIENT_PASS
user:
id: process.env.IMUSER
secret: process.env.IMPASS
to: process.env.NOTIFY
errHandler: errHandler
| true | fs = require 'fs'
Promise = require 'bluebird'
http = Promise.promisifyAll require 'needle'
token = (client, user) ->
config = module.exports
opts =
'Content-Type': 'application/x-www-form-urlencoded'
username: config.im.client.id
password: PI:PASSWORD:<PASSWORD>END_PI
data =
grant_type: 'password'
username: config.im.user.id
password: PI:PASSWORD:<PASSWORD>END_PI
scope: process.env.OAUTH2_SCOPE
http
.postAsync config.oauth2.url, data, opts
.then (res) ->
res.body.access_token
msg = (token, body, file) ->
config = module.exports
data =
to: config.im.to
body: body
file:
buffer: file
filename: 'screenDump.png'
content_type: 'image/png'
opts =
multipart: true
headers:
Authorization: "Bearer #{token}"
http
.postAsync config.im.url, data, opts
.then (res) ->
if res.statusCode != 201
console.log "im: #{res.statusCode} #{res.statusMessage}"
errHandler = (body, screenDump) ->
config = module.exports
token config.im.client, config.im.user
.then (token) ->
msg token, body, screenDump
module.exports =
portal:
url: process.env.URL
user:
name: process.env.USER
pass: PI:PASSWORD:<PASSWORD>END_PI
oauth2:
url: process.env.TOKENURL
im:
url: process.env.IMURL
client:
id: process.env.CLIENT_ID
secret: process.env.CLIENT_PASS
user:
id: process.env.IMUSER
secret: process.env.IMPASS
to: process.env.NOTIFY
errHandler: errHandler
|
[
{
"context": "\n\ndnschain\nhttp://dnschain.net\n\nCopyright (c) 2013 Greg Slepak\nLicensed under the BSD 3-Clause license.\n\n###\n\n# ",
"end": 65,
"score": 0.9998682737350464,
"start": 54,
"tag": "NAME",
"value": "Greg Slepak"
}
] | src/lib/stacked-scheduler.coffee | wartron/dnschain | 1 | ###
dnschain
http://dnschain.net
Copyright (c) 2013 Greg Slepak
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
class StackedScheduler
constructor: ({@stackedDelay}) ->
@stackedDelay ?= 2000 # 2 seconds by default
@tasks = {}
@nextRunTime = Date.now()
@taskCounter = 0
cancelAll: (runCallback=false)->
for key, task of @tasks
clearTimeout(task.tid)
task.callback() if runCallback
delete @tasks[key]
schedule: (callback) ->
diffMillis = Date.now() - @nextRunTime
@nextRunTime += diffMillis + @stackedDelay
if @stackedDelay is 0 or diffMillis >= @stackedDelay
process.nextTick callback
else
nonce = @taskCounter++
cbAndCleanup = =>
delete @tasks[nonce]
callback()
@tasks[nonce] =
callback: callback # for 'cancelAll'
tid: setTimeout(cbAndCleanup, diffMillis) | 25237 | ###
dnschain
http://dnschain.net
Copyright (c) 2013 <NAME>
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
class StackedScheduler
constructor: ({@stackedDelay}) ->
@stackedDelay ?= 2000 # 2 seconds by default
@tasks = {}
@nextRunTime = Date.now()
@taskCounter = 0
cancelAll: (runCallback=false)->
for key, task of @tasks
clearTimeout(task.tid)
task.callback() if runCallback
delete @tasks[key]
schedule: (callback) ->
diffMillis = Date.now() - @nextRunTime
@nextRunTime += diffMillis + @stackedDelay
if @stackedDelay is 0 or diffMillis >= @stackedDelay
process.nextTick callback
else
nonce = @taskCounter++
cbAndCleanup = =>
delete @tasks[nonce]
callback()
@tasks[nonce] =
callback: callback # for 'cancelAll'
tid: setTimeout(cbAndCleanup, diffMillis) | true | ###
dnschain
http://dnschain.net
Copyright (c) 2013 PI:NAME:<NAME>END_PI
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
class StackedScheduler
constructor: ({@stackedDelay}) ->
@stackedDelay ?= 2000 # 2 seconds by default
@tasks = {}
@nextRunTime = Date.now()
@taskCounter = 0
cancelAll: (runCallback=false)->
for key, task of @tasks
clearTimeout(task.tid)
task.callback() if runCallback
delete @tasks[key]
schedule: (callback) ->
diffMillis = Date.now() - @nextRunTime
@nextRunTime += diffMillis + @stackedDelay
if @stackedDelay is 0 or diffMillis >= @stackedDelay
process.nextTick callback
else
nonce = @taskCounter++
cbAndCleanup = =>
delete @tasks[nonce]
callback()
@tasks[nonce] =
callback: callback # for 'cancelAll'
tid: setTimeout(cbAndCleanup, diffMillis) |
[
{
"context": "ileoverview Tests for no-array-index-key\n# @author Joe Lencioni\n###\n\n'use strict'\n\n# ----------------------------",
"end": 72,
"score": 0.9997994303703308,
"start": 60,
"tag": "NAME",
"value": "Joe Lencioni"
},
{
"context": "(bar, i) => React.createElement('Foo', { key: 'foo-' + i + '-bar' }))\"\n errors: [message: 'Do not u",
"end": 4420,
"score": 0.6579412817955017,
"start": 4420,
"tag": "KEY",
"value": ""
},
{
"context": " => React.createElement('Foo', { key: 'foo-' + i + '-bar' }))\"\n errors: [message: 'Do not use Array ind",
"end": 4434,
"score": 0.762136697769165,
"start": 4429,
"tag": "KEY",
"value": "'-bar"
}
] | src/tests/rules/no-array-index-key.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-array-index-key
# @author Joe Lencioni
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-array-index-key'
{RuleTester} = require 'eslint'
path = require 'path'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-array-index-key', rule,
valid: [
code: '<Foo key="foo" />'
,
code: '<Foo key={i} />'
,
code: '<Foo key />'
,
code: '<Foo key={"foo-#{i}"} />'
,
code: "<Foo key={'foo-' + i} />"
,
code: 'foo.bar((baz, i) => <Foo key={i} />)'
,
code: 'foo.bar((bar, i) => <Foo key={"foo-#{i}"} />)'
,
code: "foo.bar((bar, i) => <Foo key={'foo-' + i} />)"
,
code: 'foo.map((baz) => <Foo key={baz.id} />)'
,
code: 'foo.map((baz, i) => <Foo key={baz.id} />)'
,
code: "foo.map((baz, i) => <Foo key={'foo' + baz.id} />)"
,
code:
'foo.map((baz, i) => React.cloneElement(someChild, { ...someChild.props }))'
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: item.id
'''
,
code: 'foo.map((baz, i) => <Foo key />)'
,
code: 'foo.reduce(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
]
invalid: [
code: 'foo.map (bar, i) -> <Foo key={i} />'
errors: [message: 'Do not use Array index in keys']
,
code: '[{}, {}].map((bar, i) => <Foo key={i} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, anything) => <Foo key={anything} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, i) => <Foo key={"foo-#{i}"} />)'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i} />)"
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i + '-bar'} />)"
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (baz, i) ->
React.cloneElement someChild, {
...someChild.props, key: i
}
'''
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: i
'''
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.forEach((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.filter((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.some((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.every((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.find((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.findIndex((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
'foo.map((bar, i) => React.createElement(\'Foo\', { key: "foo-#{i}" }))'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: 'foo-' + i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.map((bar, i) => React.createElement('Foo', { key: 'foo-' + i + '-bar' }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.forEach((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.filter((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.some((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.every((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.find((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.findIndex((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
]
| 199015 | ###*
# @fileoverview Tests for no-array-index-key
# @author <NAME>
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-array-index-key'
{RuleTester} = require 'eslint'
path = require 'path'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-array-index-key', rule,
valid: [
code: '<Foo key="foo" />'
,
code: '<Foo key={i} />'
,
code: '<Foo key />'
,
code: '<Foo key={"foo-#{i}"} />'
,
code: "<Foo key={'foo-' + i} />"
,
code: 'foo.bar((baz, i) => <Foo key={i} />)'
,
code: 'foo.bar((bar, i) => <Foo key={"foo-#{i}"} />)'
,
code: "foo.bar((bar, i) => <Foo key={'foo-' + i} />)"
,
code: 'foo.map((baz) => <Foo key={baz.id} />)'
,
code: 'foo.map((baz, i) => <Foo key={baz.id} />)'
,
code: "foo.map((baz, i) => <Foo key={'foo' + baz.id} />)"
,
code:
'foo.map((baz, i) => React.cloneElement(someChild, { ...someChild.props }))'
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: item.id
'''
,
code: 'foo.map((baz, i) => <Foo key />)'
,
code: 'foo.reduce(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
]
invalid: [
code: 'foo.map (bar, i) -> <Foo key={i} />'
errors: [message: 'Do not use Array index in keys']
,
code: '[{}, {}].map((bar, i) => <Foo key={i} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, anything) => <Foo key={anything} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, i) => <Foo key={"foo-#{i}"} />)'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i} />)"
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i + '-bar'} />)"
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (baz, i) ->
React.cloneElement someChild, {
...someChild.props, key: i
}
'''
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: i
'''
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.forEach((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.filter((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.some((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.every((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.find((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.findIndex((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
'foo.map((bar, i) => React.createElement(\'Foo\', { key: "foo-#{i}" }))'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: 'foo-' + i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.map((bar, i) => React.createElement('Foo', { key: 'foo<KEY>-' + i + <KEY>' }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.forEach((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.filter((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.some((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.every((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.find((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.findIndex((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
]
| true | ###*
# @fileoverview Tests for no-array-index-key
# @author PI:NAME:<NAME>END_PI
###
'use strict'
# -----------------------------------------------------------------------------
# Requirements
# -----------------------------------------------------------------------------
rule = require 'eslint-plugin-react/lib/rules/no-array-index-key'
{RuleTester} = require 'eslint'
path = require 'path'
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
### eslint-disable coffee/no-template-curly-in-string ###
ruleTester.run 'no-array-index-key', rule,
valid: [
code: '<Foo key="foo" />'
,
code: '<Foo key={i} />'
,
code: '<Foo key />'
,
code: '<Foo key={"foo-#{i}"} />'
,
code: "<Foo key={'foo-' + i} />"
,
code: 'foo.bar((baz, i) => <Foo key={i} />)'
,
code: 'foo.bar((bar, i) => <Foo key={"foo-#{i}"} />)'
,
code: "foo.bar((bar, i) => <Foo key={'foo-' + i} />)"
,
code: 'foo.map((baz) => <Foo key={baz.id} />)'
,
code: 'foo.map((baz, i) => <Foo key={baz.id} />)'
,
code: "foo.map((baz, i) => <Foo key={'foo' + baz.id} />)"
,
code:
'foo.map((baz, i) => React.cloneElement(someChild, { ...someChild.props }))'
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: item.id
'''
,
code: 'foo.map((baz, i) => <Foo key />)'
,
code: 'foo.reduce(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b) => a.concat(<Foo key={b.id} />)), [])'
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={b.id} />)), [])'
]
invalid: [
code: 'foo.map (bar, i) -> <Foo key={i} />'
errors: [message: 'Do not use Array index in keys']
,
code: '[{}, {}].map((bar, i) => <Foo key={i} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, anything) => <Foo key={anything} />)'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.map((bar, i) => <Foo key={"foo-#{i}"} />)'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i} />)"
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => <Foo key={'foo-' + i + '-bar'} />)"
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (baz, i) ->
React.cloneElement someChild, {
...someChild.props, key: i
}
'''
errors: [message: 'Do not use Array index in keys']
,
code: '''
foo.map (item, i) =>
React.cloneElement someChild,
key: i
'''
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.forEach((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.filter((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.some((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.every((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.find((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.findIndex((bar, i) => baz.push(<Foo key={i} />))'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduce(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: 'foo.reduceRight(((a, b, i) => a.concat(<Foo key={i} />)), [])'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
'foo.map((bar, i) => React.createElement(\'Foo\', { key: "foo-#{i}" }))'
errors: [message: 'Do not use Array index in keys']
,
code: "foo.map((bar, i) => React.createElement('Foo', { key: 'foo-' + i }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.map((bar, i) => React.createElement('Foo', { key: 'fooPI:KEY:<KEY>END_PI-' + i + PI:KEY:<KEY>END_PI' }))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.forEach((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.filter((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.some((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.every((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.find((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
,
code:
"foo.findIndex((bar, i) => baz.push(React.createElement('Foo', { key: i })))"
errors: [message: 'Do not use Array index in keys']
]
|
[
{
"context": "\"\n data:\n email: email\n passwd: passwd\n success: (response) =>\n # Save the s",
"end": 1984,
"score": 0.8274534344673157,
"start": 1978,
"tag": "PASSWORD",
"value": "passwd"
},
{
"context": " name: name\n email: email\n passwd: passwd\n success: (data) =>\n trackEvent \"User",
"end": 4053,
"score": 0.9816839694976807,
"start": 4047,
"tag": "PASSWORD",
"value": "passwd"
}
] | src/coffee/user/account.coffee | agiza/mondrian | 226 | ###
The logged-in account
strings
email: user's email address
session_token: secret token used to verify their logged-in session
lists
services: which services they have access to
default:
'local'
possibly also:
'dropbox'
(more to come)
active: if they should get full account features
subscribed: if they actually have an active card on file
###
ui.account =
email: ""
session_token: ""
services: ['local']
valueOf: -> @email or "anon"
uiAnonymous: ->
# Hide and disable things not available to anonymous users.
services.dropbox.tease().disable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.show()
ui.menu.menus.register.show()
uiLoggedIn: ->
services.dropbox.tease().enable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.groupHide()
ui.menu.menus.account.text(@email).groupShow()
checkSession: ->
# See if the user is logged in. If so, set up the UI to reflect that.
@session_token = localStorage.getItem("session_token")
# TODO Hackish. Why is this here?
if @session_token
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/persist-session"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
if response.anon?
@uiAnonymous()
else
@processLogin response
trackEvent "User", "Persist session"
error: =>
@uiAnonymous()
)
else
@uiAnonymous()
login: (email, passwd) ->
$("#login-mg input").each ->
$(@).disable()
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/login"
type: "POST"
dataType: "json"
data:
email: email
passwd: passwd
success: (response) =>
# Save the session_token for later <3
@processLogin response
if response.trial_remaining? > 0
ui.menu.menus.account.openDropdown()
$("#login-mg input").each ->
$(@).enable()
# Track to GA
trackEvent "User", "Login"
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-login").error(data.error)
trackEvent "User", "Login error", data.error
complete: ->
$("#login-mg input").each ->
$(@).enable()
)
processLogin: (response) ->
$.extend(@, response)
# Store the session token locally
localStorage.setItem("session_token", @session_token)
ui.menu.menus.login.groupHide()
ui.menu.menus.register.groupHide()
ui.menu.menus.account.show().text(@email)
ui.menu.closeAllDropdowns()
@uiLoggedIn()
#ui.file.getNewestVersion()
if response.services?
for s in response.services
services[s].activate()
else
# Advertise all the non-default services.
# For now it's just Dropbox.
services.dropbox.tease()
logout: ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/logout"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
@session_token = undefined
localStorage.removeItem("session_token")
# Track to GA
trackEvent "User", "Logout"
@uiAnonymous()
ui.menu.menus.account.groupHide()
ui.menu.menus.login.groupShow()
ui.menu.menus.register.groupShow()
checkServices: ->
$.getJSON "#{SETTINGS.MEOWSET.ENDPOINT}/user/check-services",
{ session_token: @session_token },
(data) ->
if data.dropbox
services.dropbox.activate()
create: (name, email, passwd) ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/register"
type: "POST"
dataType: "json"
data:
name: name
email: email
passwd: passwd
success: (data) =>
trackEvent "User", "Create", "(#{name} , #{email})"
@login email, passwd
ui.menu.closeAllDropdowns()
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-registration").error(data.error)
setup.push ->
ui.account.checkSession()
ui.refreshUtilities() # Hackish spot for this
| 144025 | ###
The logged-in account
strings
email: user's email address
session_token: secret token used to verify their logged-in session
lists
services: which services they have access to
default:
'local'
possibly also:
'dropbox'
(more to come)
active: if they should get full account features
subscribed: if they actually have an active card on file
###
ui.account =
email: ""
session_token: ""
services: ['local']
valueOf: -> @email or "anon"
uiAnonymous: ->
# Hide and disable things not available to anonymous users.
services.dropbox.tease().disable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.show()
ui.menu.menus.register.show()
uiLoggedIn: ->
services.dropbox.tease().enable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.groupHide()
ui.menu.menus.account.text(@email).groupShow()
checkSession: ->
# See if the user is logged in. If so, set up the UI to reflect that.
@session_token = localStorage.getItem("session_token")
# TODO Hackish. Why is this here?
if @session_token
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/persist-session"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
if response.anon?
@uiAnonymous()
else
@processLogin response
trackEvent "User", "Persist session"
error: =>
@uiAnonymous()
)
else
@uiAnonymous()
login: (email, passwd) ->
$("#login-mg input").each ->
$(@).disable()
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/login"
type: "POST"
dataType: "json"
data:
email: email
passwd: <PASSWORD>
success: (response) =>
# Save the session_token for later <3
@processLogin response
if response.trial_remaining? > 0
ui.menu.menus.account.openDropdown()
$("#login-mg input").each ->
$(@).enable()
# Track to GA
trackEvent "User", "Login"
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-login").error(data.error)
trackEvent "User", "Login error", data.error
complete: ->
$("#login-mg input").each ->
$(@).enable()
)
processLogin: (response) ->
$.extend(@, response)
# Store the session token locally
localStorage.setItem("session_token", @session_token)
ui.menu.menus.login.groupHide()
ui.menu.menus.register.groupHide()
ui.menu.menus.account.show().text(@email)
ui.menu.closeAllDropdowns()
@uiLoggedIn()
#ui.file.getNewestVersion()
if response.services?
for s in response.services
services[s].activate()
else
# Advertise all the non-default services.
# For now it's just Dropbox.
services.dropbox.tease()
logout: ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/logout"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
@session_token = undefined
localStorage.removeItem("session_token")
# Track to GA
trackEvent "User", "Logout"
@uiAnonymous()
ui.menu.menus.account.groupHide()
ui.menu.menus.login.groupShow()
ui.menu.menus.register.groupShow()
checkServices: ->
$.getJSON "#{SETTINGS.MEOWSET.ENDPOINT}/user/check-services",
{ session_token: @session_token },
(data) ->
if data.dropbox
services.dropbox.activate()
create: (name, email, passwd) ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/register"
type: "POST"
dataType: "json"
data:
name: name
email: email
passwd: <PASSWORD>
success: (data) =>
trackEvent "User", "Create", "(#{name} , #{email})"
@login email, passwd
ui.menu.closeAllDropdowns()
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-registration").error(data.error)
setup.push ->
ui.account.checkSession()
ui.refreshUtilities() # Hackish spot for this
| true | ###
The logged-in account
strings
email: user's email address
session_token: secret token used to verify their logged-in session
lists
services: which services they have access to
default:
'local'
possibly also:
'dropbox'
(more to come)
active: if they should get full account features
subscribed: if they actually have an active card on file
###
ui.account =
email: ""
session_token: ""
services: ['local']
valueOf: -> @email or "anon"
uiAnonymous: ->
# Hide and disable things not available to anonymous users.
services.dropbox.tease().disable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.show()
ui.menu.menus.register.show()
uiLoggedIn: ->
services.dropbox.tease().enable()
ui.menu.items.shareAsLink.enable()
ui.menu.items.downloadSVG.enable()
ui.menu.menus.login.groupHide()
ui.menu.menus.account.text(@email).groupShow()
checkSession: ->
# See if the user is logged in. If so, set up the UI to reflect that.
@session_token = localStorage.getItem("session_token")
# TODO Hackish. Why is this here?
if @session_token
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/persist-session"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
if response.anon?
@uiAnonymous()
else
@processLogin response
trackEvent "User", "Persist session"
error: =>
@uiAnonymous()
)
else
@uiAnonymous()
login: (email, passwd) ->
$("#login-mg input").each ->
$(@).disable()
$.ajax(
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/login"
type: "POST"
dataType: "json"
data:
email: email
passwd: PI:PASSWORD:<PASSWORD>END_PI
success: (response) =>
# Save the session_token for later <3
@processLogin response
if response.trial_remaining? > 0
ui.menu.menus.account.openDropdown()
$("#login-mg input").each ->
$(@).enable()
# Track to GA
trackEvent "User", "Login"
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-login").error(data.error)
trackEvent "User", "Login error", data.error
complete: ->
$("#login-mg input").each ->
$(@).enable()
)
processLogin: (response) ->
$.extend(@, response)
# Store the session token locally
localStorage.setItem("session_token", @session_token)
ui.menu.menus.login.groupHide()
ui.menu.menus.register.groupHide()
ui.menu.menus.account.show().text(@email)
ui.menu.closeAllDropdowns()
@uiLoggedIn()
#ui.file.getNewestVersion()
if response.services?
for s in response.services
services[s].activate()
else
# Advertise all the non-default services.
# For now it's just Dropbox.
services.dropbox.tease()
logout: ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/logout"
type: "POST"
dataType: "json"
data:
session_token: @session_token
success: (response) =>
@session_token = undefined
localStorage.removeItem("session_token")
# Track to GA
trackEvent "User", "Logout"
@uiAnonymous()
ui.menu.menus.account.groupHide()
ui.menu.menus.login.groupShow()
ui.menu.menus.register.groupShow()
checkServices: ->
$.getJSON "#{SETTINGS.MEOWSET.ENDPOINT}/user/check-services",
{ session_token: @session_token },
(data) ->
if data.dropbox
services.dropbox.activate()
create: (name, email, passwd) ->
$.ajax
url: "#{SETTINGS.MEOWSET.ENDPOINT}/user/register"
type: "POST"
dataType: "json"
data:
name: name
email: email
passwd: PI:PASSWORD:<PASSWORD>END_PI
success: (data) =>
trackEvent "User", "Create", "(#{name} , #{email})"
@login email, passwd
ui.menu.closeAllDropdowns()
error: (data) =>
data = JSON.parse(data.responseText)
$("#submit-registration").error(data.error)
setup.push ->
ui.account.checkSession()
ui.refreshUtilities() # Hackish spot for this
|
[
{
"context": "###\nCopyright (c) 2014, Groupon\nAll rights reserved.\n\nRedistribution and use in s",
"end": 31,
"score": 0.9859025478363037,
"start": 24,
"tag": "NAME",
"value": "Groupon"
}
] | src/client/gscreen.coffee | Mefiso/greenscreen | 729 | ###
Copyright (c) 2014, Groupon
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
window.GScreen = angular.module "GScreen", ["ng", "ngResource", "ngRoute"]
require "./routes"
require "./controllers/alert-form"
require "./controllers/main"
require "./controllers/channels"
require "./controllers/channel-form"
require "./controllers/chromecasts"
require "./controllers/chromecast-form"
require "./controllers/receiver"
require "./controllers/screen"
require "./controllers/takeover-form"
require "./directives/flash-container"
require "./directives/real-link"
require "./resources/alert"
require "./resources/channel"
require "./resources/chromecast"
require "./resources/takeover"
require "./services/cast-away"
require "./services/flash"
require "./services/local-device"
require "./services/sockets"
| 55230 | ###
Copyright (c) 2014, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
window.GScreen = angular.module "GScreen", ["ng", "ngResource", "ngRoute"]
require "./routes"
require "./controllers/alert-form"
require "./controllers/main"
require "./controllers/channels"
require "./controllers/channel-form"
require "./controllers/chromecasts"
require "./controllers/chromecast-form"
require "./controllers/receiver"
require "./controllers/screen"
require "./controllers/takeover-form"
require "./directives/flash-container"
require "./directives/real-link"
require "./resources/alert"
require "./resources/channel"
require "./resources/chromecast"
require "./resources/takeover"
require "./services/cast-away"
require "./services/flash"
require "./services/local-device"
require "./services/sockets"
| true | ###
Copyright (c) 2014, PI:NAME:<NAME>END_PI
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
window.GScreen = angular.module "GScreen", ["ng", "ngResource", "ngRoute"]
require "./routes"
require "./controllers/alert-form"
require "./controllers/main"
require "./controllers/channels"
require "./controllers/channel-form"
require "./controllers/chromecasts"
require "./controllers/chromecast-form"
require "./controllers/receiver"
require "./controllers/screen"
require "./controllers/takeover-form"
require "./directives/flash-container"
require "./directives/real-link"
require "./resources/alert"
require "./resources/channel"
require "./resources/chromecast"
require "./resources/takeover"
require "./services/cast-away"
require "./services/flash"
require "./services/local-device"
require "./services/sockets"
|
[
{
"context": "sages = []\n\ntemplate.users = []\n\ndefaultNames = [\"Killer Whale\", \"Giraffe\", \"Rabbit\", \"Polar Bear\", \"Cheetah\", \"",
"end": 281,
"score": 0.9995099902153015,
"start": 269,
"tag": "NAME",
"value": "Killer Whale"
},
{
"context": "late.users = []\n\ndefaultNames = [\"Killer Whale\", \"Giraffe\", \"Rabbit\", \"Polar Bear\", \"Cheetah\", \"Snow Leopar",
"end": 292,
"score": 0.9775581359863281,
"start": 285,
"tag": "NAME",
"value": "Giraffe"
},
{
"context": " \"Wolf\", \"Dolphin\", \"Tiger\", \"Cat\", \"Shinigami\", \"Korra\", \"Aang\", \"Izumi\", \"Katara\"]\n\ntemplate.userName =",
"end": 501,
"score": 0.8462864756584167,
"start": 496,
"tag": "NAME",
"value": "Korra"
},
{
"context": "\", \"Cat\", \"Shinigami\", \"Korra\", \"Aang\", \"Izumi\", \"Katara\"]\n\ntemplate.userName = prompt \"Enter a nick:",
"end": 523,
"score": 0.6551021933555603,
"start": 522,
"tag": "NAME",
"value": "K"
}
] | public/polymer.coffee | sopu/kai-jinora | 0 | template = document.querySelector('#template')
socket = io.connect document.location.origin,
reconnectionDelay: 200
reconnectionDelayMax: 1000
template.announcement = ""
template.status = 'connected'
template.messages = []
template.users = []
defaultNames = ["Killer Whale", "Giraffe", "Rabbit", "Polar Bear", "Cheetah", "Snow Leopard", "Eagle", "Fox", "Panda", "Salamander", "Jackal", "Elephant ", "Lion", "Horse", "Monkey", "Penguin ", "Wolf", "Dolphin", "Tiger", "Cat", "Shinigami", "Korra", "Aang", "Izumi", "Katara"]
template.userName = prompt "Enter a nick:"
template.avatar = "http://eightbitavatar.herokuapp.com/?id=" + escape(template.userName) + "&s=male&size=80"
sendMessage = (msg)->
socket.emit 'chat:msg',
message: msg
nick: template.userName
avatar: template.avatar
showMessage = (msg)->
template.messages.push msg
template.async ()->
chatDiv = document.querySelector('.chat-list');
chatDiv.scrollTop = chatDiv.scrollHeight;
template.sendMyMessage = () ->
$input = $("#input")
if socket.socket.connected == false
alert 'Please wait while we reconnect'
else if $input.val().trim() != ''
sendMessage $input.val()
$input.val ''
template.checkKey = (e) ->
if e.which == 13
template.sendMyMessage()
e.preventDefault()
socket.on 'disconnect', ->
template.status = 'disconnected'
socket.on 'reconnect', ->
template.status = 'connected'
socket.on 'connect', ->
template.status = 'connected'
socket.emit 'chat:demand'
socket.emit 'announcement:demand'
socket.emit 'presence:demand'
socket.on 'chat:msg', (msg)->
defaultName = defaultNames[(Math.random() * defaultNames.length) >>> 0]
if msg.invalidNick
setTimeout () ->
msg.nick = template.userName = prompt('Sorry! You can\'t have this username.\nPlease enter another username', defaultName) or defaultName
sendMessage msg.message
, 1
else
showMessage msg
socket.on 'announcement:data', (data)->
if data['text'].length > 2
$("#announcement-text")[0].innerHTML = data['text']
$("#announcement-area")[0].style.display = "block"
else
$("#announcement-area")[0].style.display = "none"
$("#chat-heading")[0].innerHTML = data['heading']
document.title = data['pageTitle']
socket.on 'chat:log', (log)->
log.map showMessage
socket.on 'presence:list', (list)->
template.users = list
# Set focus on the input element. Doesn't seem to work without using setTimeout.
setTimeout ->
$("#input").focus()
,1
| 24301 | template = document.querySelector('#template')
socket = io.connect document.location.origin,
reconnectionDelay: 200
reconnectionDelayMax: 1000
template.announcement = ""
template.status = 'connected'
template.messages = []
template.users = []
defaultNames = ["<NAME>", "<NAME>", "Rabbit", "Polar Bear", "Cheetah", "Snow Leopard", "Eagle", "Fox", "Panda", "Salamander", "Jackal", "Elephant ", "Lion", "Horse", "Monkey", "Penguin ", "Wolf", "Dolphin", "Tiger", "Cat", "Shinigami", "<NAME>", "Aang", "Izumi", "<NAME>atara"]
template.userName = prompt "Enter a nick:"
template.avatar = "http://eightbitavatar.herokuapp.com/?id=" + escape(template.userName) + "&s=male&size=80"
sendMessage = (msg)->
socket.emit 'chat:msg',
message: msg
nick: template.userName
avatar: template.avatar
showMessage = (msg)->
template.messages.push msg
template.async ()->
chatDiv = document.querySelector('.chat-list');
chatDiv.scrollTop = chatDiv.scrollHeight;
template.sendMyMessage = () ->
$input = $("#input")
if socket.socket.connected == false
alert 'Please wait while we reconnect'
else if $input.val().trim() != ''
sendMessage $input.val()
$input.val ''
template.checkKey = (e) ->
if e.which == 13
template.sendMyMessage()
e.preventDefault()
socket.on 'disconnect', ->
template.status = 'disconnected'
socket.on 'reconnect', ->
template.status = 'connected'
socket.on 'connect', ->
template.status = 'connected'
socket.emit 'chat:demand'
socket.emit 'announcement:demand'
socket.emit 'presence:demand'
socket.on 'chat:msg', (msg)->
defaultName = defaultNames[(Math.random() * defaultNames.length) >>> 0]
if msg.invalidNick
setTimeout () ->
msg.nick = template.userName = prompt('Sorry! You can\'t have this username.\nPlease enter another username', defaultName) or defaultName
sendMessage msg.message
, 1
else
showMessage msg
socket.on 'announcement:data', (data)->
if data['text'].length > 2
$("#announcement-text")[0].innerHTML = data['text']
$("#announcement-area")[0].style.display = "block"
else
$("#announcement-area")[0].style.display = "none"
$("#chat-heading")[0].innerHTML = data['heading']
document.title = data['pageTitle']
socket.on 'chat:log', (log)->
log.map showMessage
socket.on 'presence:list', (list)->
template.users = list
# Set focus on the input element. Doesn't seem to work without using setTimeout.
setTimeout ->
$("#input").focus()
,1
| true | template = document.querySelector('#template')
socket = io.connect document.location.origin,
reconnectionDelay: 200
reconnectionDelayMax: 1000
template.announcement = ""
template.status = 'connected'
template.messages = []
template.users = []
defaultNames = ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "Rabbit", "Polar Bear", "Cheetah", "Snow Leopard", "Eagle", "Fox", "Panda", "Salamander", "Jackal", "Elephant ", "Lion", "Horse", "Monkey", "Penguin ", "Wolf", "Dolphin", "Tiger", "Cat", "Shinigami", "PI:NAME:<NAME>END_PI", "Aang", "Izumi", "PI:NAME:<NAME>END_PIatara"]
template.userName = prompt "Enter a nick:"
template.avatar = "http://eightbitavatar.herokuapp.com/?id=" + escape(template.userName) + "&s=male&size=80"
sendMessage = (msg)->
socket.emit 'chat:msg',
message: msg
nick: template.userName
avatar: template.avatar
showMessage = (msg)->
template.messages.push msg
template.async ()->
chatDiv = document.querySelector('.chat-list');
chatDiv.scrollTop = chatDiv.scrollHeight;
template.sendMyMessage = () ->
$input = $("#input")
if socket.socket.connected == false
alert 'Please wait while we reconnect'
else if $input.val().trim() != ''
sendMessage $input.val()
$input.val ''
template.checkKey = (e) ->
if e.which == 13
template.sendMyMessage()
e.preventDefault()
socket.on 'disconnect', ->
template.status = 'disconnected'
socket.on 'reconnect', ->
template.status = 'connected'
socket.on 'connect', ->
template.status = 'connected'
socket.emit 'chat:demand'
socket.emit 'announcement:demand'
socket.emit 'presence:demand'
socket.on 'chat:msg', (msg)->
defaultName = defaultNames[(Math.random() * defaultNames.length) >>> 0]
if msg.invalidNick
setTimeout () ->
msg.nick = template.userName = prompt('Sorry! You can\'t have this username.\nPlease enter another username', defaultName) or defaultName
sendMessage msg.message
, 1
else
showMessage msg
socket.on 'announcement:data', (data)->
if data['text'].length > 2
$("#announcement-text")[0].innerHTML = data['text']
$("#announcement-area")[0].style.display = "block"
else
$("#announcement-area")[0].style.display = "none"
$("#chat-heading")[0].innerHTML = data['heading']
document.title = data['pageTitle']
socket.on 'chat:log', (log)->
log.map showMessage
socket.on 'presence:list', (list)->
template.users = list
# Set focus on the input element. Doesn't seem to work without using setTimeout.
setTimeout ->
$("#input").focus()
,1
|
[
{
"context": "api key for replygif.net, defaults to public key \"39YAprx5Yi\"\n#\n# Commands:\n# hubot replygif <tag> - Embed a",
"end": 212,
"score": 0.9994430541992188,
"start": 202,
"tag": "KEY",
"value": "39YAprx5Yi"
},
{
"context": "orthand for the 'replygif' command\n#\n# Author:\n# altschuler (previous non-api version by sumeetjain, meatball",
"end": 583,
"score": 0.9997187256813049,
"start": 573,
"tag": "USERNAME",
"value": "altschuler"
},
{
"context": "uthor:\n# altschuler (previous non-api version by sumeetjain, meatballhat)\n\napiKey = process.env.HUBOT_REPLYGI",
"end": 623,
"score": 0.9996685981750488,
"start": 613,
"tag": "USERNAME",
"value": "sumeetjain"
},
{
"context": "ltschuler (previous non-api version by sumeetjain, meatballhat)\n\napiKey = process.env.HUBOT_REPLYGIF_API_KEY or ",
"end": 636,
"score": 0.9976286888122559,
"start": 625,
"tag": "USERNAME",
"value": "meatballhat"
},
{
"context": "\n\napiKey = process.env.HUBOT_REPLYGIF_API_KEY or \"39YAprx5Yi\"\n\napiUrl = \"http://replygif.net/api/gifs?api-key=",
"end": 697,
"score": 0.9994155168533325,
"start": 687,
"tag": "KEY",
"value": "39YAprx5Yi"
}
] | src/scripts/replygif.coffee | contolini/hubot-scripts | 1,450 | # Description:
# Show ReplyGifs based on tags. See http://replygif.net.
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_REPLYGIF_API_KEY: the api key for replygif.net, defaults to public key "39YAprx5Yi"
#
# Commands:
# hubot replygif <tag> - Embed a random ReplyGif with the given tag.
# hubot replygif me <tag> - Same as `hubot replygif <tag>`.
# hubot replygif id <id> - Embed the ReplyGif with the given id
# hubot replygif me id <id> - Same as `hubot replygif id <id>`.
#
# Notes:
# Use 'rg' as shorthand for the 'replygif' command
#
# Author:
# altschuler (previous non-api version by sumeetjain, meatballhat)
apiKey = process.env.HUBOT_REPLYGIF_API_KEY or "39YAprx5Yi"
apiUrl = "http://replygif.net/api/gifs?api-key=#{apiKey}"
module.exports = (robot) ->
apiCall = (msg, failMsg, query) ->
robot.http(apiUrl + query).get() (err, res, body) ->
try
gifs = JSON.parse body
if not gifs? or not gifs.length
msg.send failMsg
else
msg.send (msg.random gifs).file
robot.hear /.*replygif\.net\/(i\/)?(\d+)(?!.*\.gif).*/i, (msg) ->
id = msg.match[2]
msg.send "http://replygif.net/i/#{id}.gif"
robot.respond /(replygif|rg)( me)? ([\w|\ ]+)/i, (msg) ->
tag = msg.match[3]
if tag is "id" then return # hubot's looking for an id
apiCall msg, "I don't know that reaction", "&tag=#{tag}"
robot.respond /(replygif|rg)( me)? id (\d+)/i, (msg) ->
id = msg.match[3]
apiCall msg, "I don't any gifs with that id", "&id=#{id}"
| 150548 | # Description:
# Show ReplyGifs based on tags. See http://replygif.net.
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_REPLYGIF_API_KEY: the api key for replygif.net, defaults to public key "<KEY>"
#
# Commands:
# hubot replygif <tag> - Embed a random ReplyGif with the given tag.
# hubot replygif me <tag> - Same as `hubot replygif <tag>`.
# hubot replygif id <id> - Embed the ReplyGif with the given id
# hubot replygif me id <id> - Same as `hubot replygif id <id>`.
#
# Notes:
# Use 'rg' as shorthand for the 'replygif' command
#
# Author:
# altschuler (previous non-api version by sumeetjain, meatballhat)
apiKey = process.env.HUBOT_REPLYGIF_API_KEY or "<KEY>"
apiUrl = "http://replygif.net/api/gifs?api-key=#{apiKey}"
module.exports = (robot) ->
apiCall = (msg, failMsg, query) ->
robot.http(apiUrl + query).get() (err, res, body) ->
try
gifs = JSON.parse body
if not gifs? or not gifs.length
msg.send failMsg
else
msg.send (msg.random gifs).file
robot.hear /.*replygif\.net\/(i\/)?(\d+)(?!.*\.gif).*/i, (msg) ->
id = msg.match[2]
msg.send "http://replygif.net/i/#{id}.gif"
robot.respond /(replygif|rg)( me)? ([\w|\ ]+)/i, (msg) ->
tag = msg.match[3]
if tag is "id" then return # hubot's looking for an id
apiCall msg, "I don't know that reaction", "&tag=#{tag}"
robot.respond /(replygif|rg)( me)? id (\d+)/i, (msg) ->
id = msg.match[3]
apiCall msg, "I don't any gifs with that id", "&id=#{id}"
| true | # Description:
# Show ReplyGifs based on tags. See http://replygif.net.
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_REPLYGIF_API_KEY: the api key for replygif.net, defaults to public key "PI:KEY:<KEY>END_PI"
#
# Commands:
# hubot replygif <tag> - Embed a random ReplyGif with the given tag.
# hubot replygif me <tag> - Same as `hubot replygif <tag>`.
# hubot replygif id <id> - Embed the ReplyGif with the given id
# hubot replygif me id <id> - Same as `hubot replygif id <id>`.
#
# Notes:
# Use 'rg' as shorthand for the 'replygif' command
#
# Author:
# altschuler (previous non-api version by sumeetjain, meatballhat)
apiKey = process.env.HUBOT_REPLYGIF_API_KEY or "PI:KEY:<KEY>END_PI"
apiUrl = "http://replygif.net/api/gifs?api-key=#{apiKey}"
module.exports = (robot) ->
apiCall = (msg, failMsg, query) ->
robot.http(apiUrl + query).get() (err, res, body) ->
try
gifs = JSON.parse body
if not gifs? or not gifs.length
msg.send failMsg
else
msg.send (msg.random gifs).file
robot.hear /.*replygif\.net\/(i\/)?(\d+)(?!.*\.gif).*/i, (msg) ->
id = msg.match[2]
msg.send "http://replygif.net/i/#{id}.gif"
robot.respond /(replygif|rg)( me)? ([\w|\ ]+)/i, (msg) ->
tag = msg.match[3]
if tag is "id" then return # hubot's looking for an id
apiCall msg, "I don't know that reaction", "&tag=#{tag}"
robot.respond /(replygif|rg)( me)? id (\d+)/i, (msg) ->
id = msg.match[3]
apiCall msg, "I don't any gifs with that id", "&id=#{id}"
|
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.9999221563339233,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/services/test_in_progress_helper.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.service 'TestInProgressHelper', [
'AmplificationChartHelper'
'$rootScope'
'Experiment'
'$q'
'Status'
(AmplificationChartHelper, $rootScope, Experiment, $q, Status) ->
directivesCount = 0
status = null
experiment = null
holding = false
experimentQues = {}
isFetchingExp = false
$rootScope.$on 'status:data:updated', (e, data) =>
status = data
@set_holding status, experiment
@is_holding = -> holding
@set_holding = (data, experiment) ->
return false if !experiment
return false if !experiment.protocol
return false if !experiment.protocol.stages
return false if !data
return false if !data.experiment_controller
stages = experiment.protocol.stages
steps = stages[stages.length-1].stage.steps
# max_cycle = parseInt(AmplificationChartHelper.getMaxExperimentCycle(experiment))
duration = parseInt(steps[steps.length-1].step.delta_duration_s)
# current_stage = parseInt(data.experiment_controller.experiment.stage.number)
# current_step = parseInt(data.experiment_controller.experiment.step.number)
# current_cycle = parseInt(data.experiment_controller.experiment.stage.cycle)
state = data.experiment_controller.machine.state
holding = state is 'complete' and duration is 0
# console.log holding
holding
@timeRemaining = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
if data.experiment_controller.machine.state is 'running'
exp = data.experiment_controller.experiment
time = (exp.estimated_duration*1+exp.paused_duration*1)-exp.run_duration*1
if time < 0 then time = 0
time
else
0
@timePercentage = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
return 0 if data.experiment_controller.machine.state is 'idle'
timeRemaining = @timeRemaining data
exp = data.experiment_controller.experiment
time = exp.run_duration/(exp.estimated_duration*1+exp.paused_duration*1)
if time < 0 then time = 0
if time > 1 then time = 1
return time*100
return
] | 108515 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.service 'TestInProgressHelper', [
'AmplificationChartHelper'
'$rootScope'
'Experiment'
'$q'
'Status'
(AmplificationChartHelper, $rootScope, Experiment, $q, Status) ->
directivesCount = 0
status = null
experiment = null
holding = false
experimentQues = {}
isFetchingExp = false
$rootScope.$on 'status:data:updated', (e, data) =>
status = data
@set_holding status, experiment
@is_holding = -> holding
@set_holding = (data, experiment) ->
return false if !experiment
return false if !experiment.protocol
return false if !experiment.protocol.stages
return false if !data
return false if !data.experiment_controller
stages = experiment.protocol.stages
steps = stages[stages.length-1].stage.steps
# max_cycle = parseInt(AmplificationChartHelper.getMaxExperimentCycle(experiment))
duration = parseInt(steps[steps.length-1].step.delta_duration_s)
# current_stage = parseInt(data.experiment_controller.experiment.stage.number)
# current_step = parseInt(data.experiment_controller.experiment.step.number)
# current_cycle = parseInt(data.experiment_controller.experiment.stage.cycle)
state = data.experiment_controller.machine.state
holding = state is 'complete' and duration is 0
# console.log holding
holding
@timeRemaining = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
if data.experiment_controller.machine.state is 'running'
exp = data.experiment_controller.experiment
time = (exp.estimated_duration*1+exp.paused_duration*1)-exp.run_duration*1
if time < 0 then time = 0
time
else
0
@timePercentage = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
return 0 if data.experiment_controller.machine.state is 'idle'
timeRemaining = @timeRemaining data
exp = data.experiment_controller.experiment
time = exp.run_duration/(exp.estimated_duration*1+exp.paused_duration*1)
if time < 0 then time = 0
if time > 1 then time = 1
return time*100
return
] | true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.service 'TestInProgressHelper', [
'AmplificationChartHelper'
'$rootScope'
'Experiment'
'$q'
'Status'
(AmplificationChartHelper, $rootScope, Experiment, $q, Status) ->
directivesCount = 0
status = null
experiment = null
holding = false
experimentQues = {}
isFetchingExp = false
$rootScope.$on 'status:data:updated', (e, data) =>
status = data
@set_holding status, experiment
@is_holding = -> holding
@set_holding = (data, experiment) ->
return false if !experiment
return false if !experiment.protocol
return false if !experiment.protocol.stages
return false if !data
return false if !data.experiment_controller
stages = experiment.protocol.stages
steps = stages[stages.length-1].stage.steps
# max_cycle = parseInt(AmplificationChartHelper.getMaxExperimentCycle(experiment))
duration = parseInt(steps[steps.length-1].step.delta_duration_s)
# current_stage = parseInt(data.experiment_controller.experiment.stage.number)
# current_step = parseInt(data.experiment_controller.experiment.step.number)
# current_cycle = parseInt(data.experiment_controller.experiment.stage.cycle)
state = data.experiment_controller.machine.state
holding = state is 'complete' and duration is 0
# console.log holding
holding
@timeRemaining = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
if data.experiment_controller.machine.state is 'running'
exp = data.experiment_controller.experiment
time = (exp.estimated_duration*1+exp.paused_duration*1)-exp.run_duration*1
if time < 0 then time = 0
time
else
0
@timePercentage = (data) ->
return 0 if !data
return 0 if !data.experiment_controller
return 0 if data.experiment_controller.machine.state is 'idle'
timeRemaining = @timeRemaining data
exp = data.experiment_controller.experiment
time = exp.run_duration/(exp.estimated_duration*1+exp.paused_duration*1)
if time < 0 then time = 0
if time > 1 then time = 1
return time*100
return
] |
[
{
"context": " and: \"và\"\n back: \"trở lại\",\n changePassword: \"Đổi mật khẩu\"\n choosePassword: \"Chọn một mật khẩu\"\n clickAgr",
"end": 127,
"score": 0.9991682767868042,
"start": 115,
"tag": "PASSWORD",
"value": "Đổi mật khẩu"
},
{
"context": "changePassword: \"Đổi mật khẩu\"\n choosePassword: \"Chọn một mật khẩu\"\n clickAgree: \"Bằng cách nhấn vào Đăng ký, bạn đ",
"end": 165,
"score": 0.9992122650146484,
"start": 148,
"tag": "PASSWORD",
"value": "Chọn một mật khẩu"
},
{
"context": "reateAccount: \"Tạo Tài khoản\"\n currentPassword: \"Mật khẩu hiện tại\"\n dontHaveAnAccount: \"Chưa có tài khoản?\"\n emai",
"end": 323,
"score": 0.9991183876991272,
"start": 306,
"tag": "PASSWORD",
"value": "Mật khẩu hiện tại"
},
{
"context": "Email\"\n emailResetLink: \"Gửi\"\n forgotPassword: \"Quên mật khẩu?\"\n ifYouAlreadyHaveAnAccount: \"Nếu bạn đã có tài",
"end": 472,
"score": 0.9946049451828003,
"start": 459,
"tag": "PASSWORD",
"value": "Quên mật khẩu"
},
{
"context": "ccount: \"Nếu bạn đã có tài khoản\"\n newPassword: \"Mật khẩu mới\"\n newPasswordAgain: \"Mật khẩu mới (nhập lại)\"\n ",
"end": 558,
"score": 0.9989482760429382,
"start": 546,
"tag": "PASSWORD",
"value": "Mật khẩu mới"
},
{
"context": " newPassword: \"Mật khẩu mới\"\n newPasswordAgain: \"Mật khẩu mới (nhập lại)\"\n optional: \"Tùy chọn\"\n OR: \"Hoặc\"\n ",
"end": 593,
"score": 0.9843587875366211,
"start": 581,
"tag": "PASSWORD",
"value": "Mật khẩu mới"
},
{
"context": "\n optional: \"Tùy chọn\"\n OR: \"Hoặc\"\n password: \"Mật khẩu\"\n passwordAgain: \"Mật khẩu (nhập lại)\"\n privacy",
"end": 663,
"score": 0.9991956949234009,
"start": 655,
"tag": "PASSWORD",
"value": "Mật khẩu"
},
{
"context": ": \"Hoặc\"\n password: \"Mật khẩu\"\n passwordAgain: \"Mật khẩu (nhập lại)\"\n privacyPolicy: \"Chính sách bảo mật\"",
"end": 691,
"score": 0.9982075691223145,
"start": 683,
"tag": "PASSWORD",
"value": "Mật khẩu"
},
{
"context": "ch bảo mật\"\n remove: \"xóa\"\n resetYourPassword: \"Lấy lại mật khẩu\"\n setPassword: \"Thiết lập mật khẩu\"\n sign: \"Ký\"",
"end": 796,
"score": 0.9988712072372437,
"start": 780,
"tag": "PASSWORD",
"value": "Lấy lại mật khẩu"
},
{
"context": "tYourPassword: \"Lấy lại mật khẩu\"\n setPassword: \"Thiết lập mật khẩu\"\n sign: \"Ký\"\n signIn: \"Đăng nhập\"\n signin: \"đă",
"end": 832,
"score": 0.9992629885673523,
"start": 814,
"tag": "PASSWORD",
"value": "Thiết lập mật khẩu"
},
{
"context": "erms: \"Điều khoản sử dụng\"\n updateYourPassword: \"Cập nhật mật khẩu\"\n username: \"Tên đăng nhập\"\n usernameOrEmail: \"",
"end": 1089,
"score": 0.9991215467453003,
"start": 1072,
"tag": "PASSWORD",
"value": "Cập nhật mật khẩu"
},
{
"context": "ateYourPassword: \"Cập nhật mật khẩu\"\n username: \"Tên đăng nhập\"\n usernameOrEmail: \"Tên đăng nhập hoặc email\"\n ",
"end": 1117,
"score": 0.9817906022071838,
"start": 1104,
"tag": "USERNAME",
"value": "Tên đăng nhập"
},
{
"context": "d: \"Email đã được xác minh\"\n passwordChanged: \"Đã đổi mật khẩu\"\n passwordReset: \"Lất lại mật khẩu\"\n\n\n error:",
"end": 1309,
"score": 0.9987775087356567,
"start": 1294,
"tag": "PASSWORD",
"value": "Đã đổi mật khẩu"
},
{
"context": "ordChanged: \"Đã đổi mật khẩu\"\n passwordReset: \"Lất lại mật khẩu\"\n\n\n error:\n emailRequired: \"Email phải có.\"\n ",
"end": 1347,
"score": 0.9986979365348816,
"start": 1331,
"tag": "PASSWORD",
"value": "Lất lại mật khẩu"
},
{
"context": "il không phù hợp.\"\n \"Invalid login token\": \"Mã đăng nhập không đúng\"\n \"Login forbidden\": \"Đăng nhập bị cấm\"\n ",
"end": 2246,
"score": 0.7726976871490479,
"start": 2224,
"tag": "PASSWORD",
"value": "ã đăng nhập không đúng"
},
{
"context": "-- accounts-password\n \"Incorrect password\": \"Mật khẩu sai\"\n \"Invalid email\": \"Email sai\"\n \"Must b",
"end": 3287,
"score": 0.9962241649627686,
"start": 3275,
"tag": "PASSWORD",
"value": "Mật khẩu sai"
},
{
"context": "ng nhập hoặc email\"\n \"old password format\": \"định dạng mật khẩu cũ\"\n \"Password may not be empty\": \"mật khẩu khô",
"end": 3497,
"score": 0.9987480044364929,
"start": 3476,
"tag": "PASSWORD",
"value": "định dạng mật khẩu cũ"
},
{
"context": " \"User has no password set\": \"Người dùng chưa có mật khẩu\"\n \"User not found\": \"Không tìm thấy người ",
"end": 3799,
"score": 0.5447746515274048,
"start": 3794,
"tag": "PASSWORD",
"value": "ật kh"
}
] | t9n/vi.coffee | tnedich/meteor-accounts-t9n | 0 | #Language: Vietnamese
#Translators: olragon
vi =
add: "thêm"
and: "và"
back: "trở lại",
changePassword: "Đổi mật khẩu"
choosePassword: "Chọn một mật khẩu"
clickAgree: "Bằng cách nhấn vào Đăng ký, bạn đã đồng ý với"
configure: "Cấu hình"
createAccount: "Tạo Tài khoản"
currentPassword: "Mật khẩu hiện tại"
dontHaveAnAccount: "Chưa có tài khoản?"
email: "Email"
emailAddress: "Địa chỉ Email"
emailResetLink: "Gửi"
forgotPassword: "Quên mật khẩu?"
ifYouAlreadyHaveAnAccount: "Nếu bạn đã có tài khoản"
newPassword: "Mật khẩu mới"
newPasswordAgain: "Mật khẩu mới (nhập lại)"
optional: "Tùy chọn"
OR: "Hoặc"
password: "Mật khẩu"
passwordAgain: "Mật khẩu (nhập lại)"
privacyPolicy: "Chính sách bảo mật"
remove: "xóa"
resetYourPassword: "Lấy lại mật khẩu"
setPassword: "Thiết lập mật khẩu"
sign: "Ký"
signIn: "Đăng nhập"
signin: "đăng nhập"
signOut: "Đăng xuất"
signUp: "Đăng ký"
signupCode: "Mã đăng ký"
signUpWithYourEmailAddress: "Đăng ký với email của bạn"
terms: "Điều khoản sử dụng"
updateYourPassword: "Cập nhật mật khẩu"
username: "Tên đăng nhập"
usernameOrEmail: "Tên đăng nhập hoặc email"
with: "với"
info:
emailSent: "Email đã được gửi đi!"
emailVerified: "Email đã được xác minh"
passwordChanged: "Đã đổi mật khẩu"
passwordReset: "Lất lại mật khẩu"
error:
emailRequired: "Email phải có."
minChar: "Mật khẩu phải có ít nhất 7 ký tự."
pwdsDontMatch: "Mật khẩu không giống nhau"
pwOneDigit: "Mật khẩu phải có ít nhất 1 chữ số."
pwOneLetter: "Mật khẩu phải có 1 ký tự chữ."
signInRequired: "Phải đăng nhập."
signupCodeIncorrect: "Mã số đăng ký sai."
signupCodeRequired: "Phải có mã số đăng ký."
usernameIsEmail: "Tên đăng nhập không thể là địa chỉ email."
usernameRequired: "Phải có tên đăng nhập."
accounts:
#---- accounts-base
#@" + domain + " email required": "Bắt buộc dùng email @" + domain
"A login handler should return a result or undefined": "Bộ xử lý đăng nhập phải trả về một kết quả hoặc undefined"
"Email already exists.": "Email đã tồn tại."
"Email doesn't match the criteria.": "Email không phù hợp."
"Invalid login token": "Mã đăng nhập không đúng"
"Login forbidden": "Đăng nhập bị cấm"
#"Service " + options.service + " already configured": "Dịch vụ " + options.service + " đã được cấu hình"
"Service unknown": "Chưa biết Dịch vụ"
"Unrecognized options for login request": "Tùy chọn không được công nhận đối với yêu cầu đăng nhập"
"User validation failed": "Xác nhận người dùng thất bại"
"Username already exists.": "Tên đăng nhập đã tồn tại."
"You are not logged in.": "Bạn chưa đăng nhập."
"You've been logged out by the server. Please log in again.": "Bạn đã bị đăng xuất bởi máy chủ. Vui lòng đăng nhập lại."
"Your session has expired. Please log in again.": "Thời gian đăng nhập đã hết. Vui lòng đăng nhập lại."
#---- accounts-oauth
"No matching login attempt found": "Không tìm thấy đăng nhập phù hợp"
#---- accounts-password-client
"Password is old. Please reset your password.": "Mật khẩu đã cũ. Vui lòng lấy lại mật khẩu."
#---- accounts-password
"Incorrect password": "Mật khẩu sai"
"Invalid email": "Email sai"
"Must be logged in": "Phải đăng nhập"
"Need to set a username or email": "Phải điền tên đăng nhập hoặc email"
"old password format": "định dạng mật khẩu cũ"
"Password may not be empty": "mật khẩu không được để trống"
"Signups forbidden": "Đăng ký đã bị cấm"
"Token expired": "Hết phiên đăng nhập"
"Token has invalid email address": "Phiên đăng nhập chứa địa chỉ email sai"
"User has no password set": "Người dùng chưa có mật khẩu"
"User not found": "Không tìm thấy người dùng"
"Verify email link expired": "Đường dẫn xác nhận email đã hết hạn"
"Verify email link is for unknown address": "Đường dẫn xác nhận email là cho địa chỉ chưa xác định"
#---- match
"Match failed": "Không đúng"
#---- Misc...
"Unknown error": "Lỗi chưa được biết"
T9n.map "vi", vi | 16239 | #Language: Vietnamese
#Translators: olragon
vi =
add: "thêm"
and: "và"
back: "trở lại",
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Bằng cách nhấn vào Đăng ký, bạn đã đồng ý với"
configure: "Cấu hình"
createAccount: "Tạo Tài khoản"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "Chưa có tài khoản?"
email: "Email"
emailAddress: "Địa chỉ Email"
emailResetLink: "Gửi"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Nếu bạn đã có tài khoản"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD> (nhập lại)"
optional: "Tùy chọn"
OR: "Hoặc"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD> (nhập lại)"
privacyPolicy: "Chính sách bảo mật"
remove: "xóa"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Ký"
signIn: "Đăng nhập"
signin: "đăng nhập"
signOut: "Đăng xuất"
signUp: "Đăng ký"
signupCode: "Mã đăng ký"
signUpWithYourEmailAddress: "Đăng ký với email của bạn"
terms: "Điều khoản sử dụng"
updateYourPassword: "<PASSWORD>"
username: "Tên đăng nhập"
usernameOrEmail: "Tên đăng nhập hoặc email"
with: "với"
info:
emailSent: "Email đã được gửi đi!"
emailVerified: "Email đã được xác minh"
passwordChanged: "<PASSWORD>"
passwordReset: "<PASSWORD>"
error:
emailRequired: "Email phải có."
minChar: "Mật khẩu phải có ít nhất 7 ký tự."
pwdsDontMatch: "Mật khẩu không giống nhau"
pwOneDigit: "Mật khẩu phải có ít nhất 1 chữ số."
pwOneLetter: "Mật khẩu phải có 1 ký tự chữ."
signInRequired: "Phải đăng nhập."
signupCodeIncorrect: "Mã số đăng ký sai."
signupCodeRequired: "Phải có mã số đăng ký."
usernameIsEmail: "Tên đăng nhập không thể là địa chỉ email."
usernameRequired: "Phải có tên đăng nhập."
accounts:
#---- accounts-base
#@" + domain + " email required": "Bắt buộc dùng email @" + domain
"A login handler should return a result or undefined": "Bộ xử lý đăng nhập phải trả về một kết quả hoặc undefined"
"Email already exists.": "Email đã tồn tại."
"Email doesn't match the criteria.": "Email không phù hợp."
"Invalid login token": "M<PASSWORD>"
"Login forbidden": "Đăng nhập bị cấm"
#"Service " + options.service + " already configured": "Dịch vụ " + options.service + " đã được cấu hình"
"Service unknown": "Chưa biết Dịch vụ"
"Unrecognized options for login request": "Tùy chọn không được công nhận đối với yêu cầu đăng nhập"
"User validation failed": "Xác nhận người dùng thất bại"
"Username already exists.": "Tên đăng nhập đã tồn tại."
"You are not logged in.": "Bạn chưa đăng nhập."
"You've been logged out by the server. Please log in again.": "Bạn đã bị đăng xuất bởi máy chủ. Vui lòng đăng nhập lại."
"Your session has expired. Please log in again.": "Thời gian đăng nhập đã hết. Vui lòng đăng nhập lại."
#---- accounts-oauth
"No matching login attempt found": "Không tìm thấy đăng nhập phù hợp"
#---- accounts-password-client
"Password is old. Please reset your password.": "Mật khẩu đã cũ. Vui lòng lấy lại mật khẩu."
#---- accounts-password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Email sai"
"Must be logged in": "Phải đăng nhập"
"Need to set a username or email": "Phải điền tên đăng nhập hoặc email"
"old password format": "<PASSWORD>"
"Password may not be empty": "mật khẩu không được để trống"
"Signups forbidden": "Đăng ký đã bị cấm"
"Token expired": "Hết phiên đăng nhập"
"Token has invalid email address": "Phiên đăng nhập chứa địa chỉ email sai"
"User has no password set": "Người dùng chưa có m<PASSWORD>ẩu"
"User not found": "Không tìm thấy người dùng"
"Verify email link expired": "Đường dẫn xác nhận email đã hết hạn"
"Verify email link is for unknown address": "Đường dẫn xác nhận email là cho địa chỉ chưa xác định"
#---- match
"Match failed": "Không đúng"
#---- Misc...
"Unknown error": "Lỗi chưa được biết"
T9n.map "vi", vi | true | #Language: Vietnamese
#Translators: olragon
vi =
add: "thêm"
and: "và"
back: "trở lại",
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Bằng cách nhấn vào Đăng ký, bạn đã đồng ý với"
configure: "Cấu hình"
createAccount: "Tạo Tài khoản"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "Chưa có tài khoản?"
email: "Email"
emailAddress: "Địa chỉ Email"
emailResetLink: "Gửi"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Nếu bạn đã có tài khoản"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI (nhập lại)"
optional: "Tùy chọn"
OR: "Hoặc"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI (nhập lại)"
privacyPolicy: "Chính sách bảo mật"
remove: "xóa"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Ký"
signIn: "Đăng nhập"
signin: "đăng nhập"
signOut: "Đăng xuất"
signUp: "Đăng ký"
signupCode: "Mã đăng ký"
signUpWithYourEmailAddress: "Đăng ký với email của bạn"
terms: "Điều khoản sử dụng"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
username: "Tên đăng nhập"
usernameOrEmail: "Tên đăng nhập hoặc email"
with: "với"
info:
emailSent: "Email đã được gửi đi!"
emailVerified: "Email đã được xác minh"
passwordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
passwordReset: "PI:PASSWORD:<PASSWORD>END_PI"
error:
emailRequired: "Email phải có."
minChar: "Mật khẩu phải có ít nhất 7 ký tự."
pwdsDontMatch: "Mật khẩu không giống nhau"
pwOneDigit: "Mật khẩu phải có ít nhất 1 chữ số."
pwOneLetter: "Mật khẩu phải có 1 ký tự chữ."
signInRequired: "Phải đăng nhập."
signupCodeIncorrect: "Mã số đăng ký sai."
signupCodeRequired: "Phải có mã số đăng ký."
usernameIsEmail: "Tên đăng nhập không thể là địa chỉ email."
usernameRequired: "Phải có tên đăng nhập."
accounts:
#---- accounts-base
#@" + domain + " email required": "Bắt buộc dùng email @" + domain
"A login handler should return a result or undefined": "Bộ xử lý đăng nhập phải trả về một kết quả hoặc undefined"
"Email already exists.": "Email đã tồn tại."
"Email doesn't match the criteria.": "Email không phù hợp."
"Invalid login token": "MPI:PASSWORD:<PASSWORD>END_PI"
"Login forbidden": "Đăng nhập bị cấm"
#"Service " + options.service + " already configured": "Dịch vụ " + options.service + " đã được cấu hình"
"Service unknown": "Chưa biết Dịch vụ"
"Unrecognized options for login request": "Tùy chọn không được công nhận đối với yêu cầu đăng nhập"
"User validation failed": "Xác nhận người dùng thất bại"
"Username already exists.": "Tên đăng nhập đã tồn tại."
"You are not logged in.": "Bạn chưa đăng nhập."
"You've been logged out by the server. Please log in again.": "Bạn đã bị đăng xuất bởi máy chủ. Vui lòng đăng nhập lại."
"Your session has expired. Please log in again.": "Thời gian đăng nhập đã hết. Vui lòng đăng nhập lại."
#---- accounts-oauth
"No matching login attempt found": "Không tìm thấy đăng nhập phù hợp"
#---- accounts-password-client
"Password is old. Please reset your password.": "Mật khẩu đã cũ. Vui lòng lấy lại mật khẩu."
#---- accounts-password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Email sai"
"Must be logged in": "Phải đăng nhập"
"Need to set a username or email": "Phải điền tên đăng nhập hoặc email"
"old password format": "PI:PASSWORD:<PASSWORD>END_PI"
"Password may not be empty": "mật khẩu không được để trống"
"Signups forbidden": "Đăng ký đã bị cấm"
"Token expired": "Hết phiên đăng nhập"
"Token has invalid email address": "Phiên đăng nhập chứa địa chỉ email sai"
"User has no password set": "Người dùng chưa có mPI:PASSWORD:<PASSWORD>END_PIẩu"
"User not found": "Không tìm thấy người dùng"
"Verify email link expired": "Đường dẫn xác nhận email đã hết hạn"
"Verify email link is for unknown address": "Đường dẫn xác nhận email là cho địa chỉ chưa xác định"
#---- match
"Match failed": "Không đúng"
#---- Misc...
"Unknown error": "Lỗi chưa được biết"
T9n.map "vi", vi |
[
{
"context": "h\n nr = 0\n R = []\n keys = [ '^word', '^fun', '^text', '^something', ]\n last_idx = ",
"end": 3909,
"score": 0.9268066883087158,
"start": 3903,
"tag": "KEY",
"value": "'^word"
},
{
"context": " = 0\n R = []\n keys = [ '^word', '^fun', '^text', '^something', ]\n last_idx = keys.lengt",
"end": 3919,
"score": 0.7752907872200012,
"start": 3912,
"tag": "KEY",
"value": "'^fun',"
},
{
"context": "\n R = []\n keys = [ '^word', '^fun', '^text', '^something', ]\n last_idx = keys.length - 1\n ",
"end": 3926,
"score": 0.7921303510665894,
"start": 3920,
"tag": "KEY",
"value": "'^text"
},
{
"context": " = []\n keys = [ '^word', '^fun', '^text', '^something', ]\n last_idx = keys.length - 1\n for word in w",
"end": 3940,
"score": 0.7452158331871033,
"start": 3929,
"tag": "KEY",
"value": "'^something"
}
] | src/data-providers.coffee | loveencounterflow/hengist | 0 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'BENCHMARKS'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ jr } = CND
assign = Object.assign
after = ( time_s, f ) -> setTimeout f, time_s * 1000
#...........................................................................................................
nf = require 'number-format.js'
#...........................................................................................................
# H = require '../helpers'
# DATAMILL = require '../..'
@types = require './types'
{ isa
validate
declare
first_of
last_of
size_of
type_of } = @types
# VNR = require '../vnr'
# $fresh = true
# first = Symbol 'first'
# last = Symbol 'last'
PATH = require 'path'
FS = require 'fs'
#-----------------------------------------------------------------------------------------------------------
@_cache = {}
@_get_key = ( name ) -> name + ' ' + jr [ arguments..., ][ 1 .. ]
#-----------------------------------------------------------------------------------------------------------
@get_integer_numbers = ( n = 10 ) ->
cachekey = @_get_key 'get_integer_numbers', arguments...
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
return @_cache[ cachekey ] = [ 1 .. n ]
#-----------------------------------------------------------------------------------------------------------
@get_random_words = ( n = 10, path = null, fresh = false ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_words', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
CP = require 'child_process'
R = ( ( CP.execSync "shuf -n #{n} #{path}" ).toString 'utf-8' ).split '\n'
R = ( word.replace /'s$/g, '' for word in R )
R = ( word for word in R when word isnt '' )
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_text = ( n = 10, path = null ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_text', arguments...
return R if ( R = @_cache[ cachekey ] )?
R = @get_random_words n, path
R = ( ( if Math.random() > 0.7 then '' else word ) for word in R )
R = R.join '\n'
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_datoms = ( n = 10, path = null ) ->
HOLLERITH = require 'hollerith-codec'
@as_hollerith = ( x ) => HOLLERITH.encode x
@from_hollerith = ( x ) => HOLLERITH.decode x
PD = require 'pipedreams11'
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_datoms', arguments...
return R if ( R = @_cache[ cachekey ] )?
words = @get_random_words n, path
nr = 0
R = []
keys = [ '^word', '^fun', '^text', '^something', ]
last_idx = keys.length - 1
for word in words
nr++
$vnr = [ nr, ]
vnr_blob = @as_hollerith $vnr
$vnr_hex = vnr_blob.toString 'hex'
key = keys[ CND.random_integer 0, last_idx ]
if Math.random() > 0.75 then R.push PD.new_datom key, word, { $vnr, $vnr_hex, $stamped: true, }
else R.push PD.new_datom key, word, { $vnr, $vnr_hex, }
CND.shuffle R
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_svg_pathdata = ->
return ( FS.readFileSync ( PATH.join __dirname, '../src/tests/svgttf-test-data.txt' ), 'utf-8' ).split /\n/
#-----------------------------------------------------------------------------------------------------------
@get_random_nested_objects = ( n = 10, path = null, fresh = false ) ->
cachekey = @_get_key 'get_random_datoms', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
fresh = true
words = @get_random_words word_count, null, fresh
R = []
for _ in [ 1 .. n ]
CND.shuffle words
word_count = CND.random_integer 3, 7
subset_of_words = words[ 1 .. word_count ]
entry = {}
for word in subset_of_words
entry[ word ] = 42
R.push entry
return @_cache[ cachekey ] = R
| 56069 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'BENCHMARKS'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ jr } = CND
assign = Object.assign
after = ( time_s, f ) -> setTimeout f, time_s * 1000
#...........................................................................................................
nf = require 'number-format.js'
#...........................................................................................................
# H = require '../helpers'
# DATAMILL = require '../..'
@types = require './types'
{ isa
validate
declare
first_of
last_of
size_of
type_of } = @types
# VNR = require '../vnr'
# $fresh = true
# first = Symbol 'first'
# last = Symbol 'last'
PATH = require 'path'
FS = require 'fs'
#-----------------------------------------------------------------------------------------------------------
@_cache = {}
@_get_key = ( name ) -> name + ' ' + jr [ arguments..., ][ 1 .. ]
#-----------------------------------------------------------------------------------------------------------
@get_integer_numbers = ( n = 10 ) ->
cachekey = @_get_key 'get_integer_numbers', arguments...
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
return @_cache[ cachekey ] = [ 1 .. n ]
#-----------------------------------------------------------------------------------------------------------
@get_random_words = ( n = 10, path = null, fresh = false ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_words', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
CP = require 'child_process'
R = ( ( CP.execSync "shuf -n #{n} #{path}" ).toString 'utf-8' ).split '\n'
R = ( word.replace /'s$/g, '' for word in R )
R = ( word for word in R when word isnt '' )
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_text = ( n = 10, path = null ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_text', arguments...
return R if ( R = @_cache[ cachekey ] )?
R = @get_random_words n, path
R = ( ( if Math.random() > 0.7 then '' else word ) for word in R )
R = R.join '\n'
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_datoms = ( n = 10, path = null ) ->
HOLLERITH = require 'hollerith-codec'
@as_hollerith = ( x ) => HOLLERITH.encode x
@from_hollerith = ( x ) => HOLLERITH.decode x
PD = require 'pipedreams11'
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_datoms', arguments...
return R if ( R = @_cache[ cachekey ] )?
words = @get_random_words n, path
nr = 0
R = []
keys = [ <KEY>', <KEY> <KEY>', <KEY>', ]
last_idx = keys.length - 1
for word in words
nr++
$vnr = [ nr, ]
vnr_blob = @as_hollerith $vnr
$vnr_hex = vnr_blob.toString 'hex'
key = keys[ CND.random_integer 0, last_idx ]
if Math.random() > 0.75 then R.push PD.new_datom key, word, { $vnr, $vnr_hex, $stamped: true, }
else R.push PD.new_datom key, word, { $vnr, $vnr_hex, }
CND.shuffle R
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_svg_pathdata = ->
return ( FS.readFileSync ( PATH.join __dirname, '../src/tests/svgttf-test-data.txt' ), 'utf-8' ).split /\n/
#-----------------------------------------------------------------------------------------------------------
@get_random_nested_objects = ( n = 10, path = null, fresh = false ) ->
cachekey = @_get_key 'get_random_datoms', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
fresh = true
words = @get_random_words word_count, null, fresh
R = []
for _ in [ 1 .. n ]
CND.shuffle words
word_count = CND.random_integer 3, 7
subset_of_words = words[ 1 .. word_count ]
entry = {}
for word in subset_of_words
entry[ word ] = 42
R.push entry
return @_cache[ cachekey ] = R
| true |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'BENCHMARKS'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ jr } = CND
assign = Object.assign
after = ( time_s, f ) -> setTimeout f, time_s * 1000
#...........................................................................................................
nf = require 'number-format.js'
#...........................................................................................................
# H = require '../helpers'
# DATAMILL = require '../..'
@types = require './types'
{ isa
validate
declare
first_of
last_of
size_of
type_of } = @types
# VNR = require '../vnr'
# $fresh = true
# first = Symbol 'first'
# last = Symbol 'last'
PATH = require 'path'
FS = require 'fs'
#-----------------------------------------------------------------------------------------------------------
@_cache = {}
@_get_key = ( name ) -> name + ' ' + jr [ arguments..., ][ 1 .. ]
#-----------------------------------------------------------------------------------------------------------
@get_integer_numbers = ( n = 10 ) ->
cachekey = @_get_key 'get_integer_numbers', arguments...
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
return @_cache[ cachekey ] = [ 1 .. n ]
#-----------------------------------------------------------------------------------------------------------
@get_random_words = ( n = 10, path = null, fresh = false ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_words', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
validate.cardinal n
CP = require 'child_process'
R = ( ( CP.execSync "shuf -n #{n} #{path}" ).toString 'utf-8' ).split '\n'
R = ( word.replace /'s$/g, '' for word in R )
R = ( word for word in R when word isnt '' )
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_text = ( n = 10, path = null ) ->
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_text', arguments...
return R if ( R = @_cache[ cachekey ] )?
R = @get_random_words n, path
R = ( ( if Math.random() > 0.7 then '' else word ) for word in R )
R = R.join '\n'
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_random_datoms = ( n = 10, path = null ) ->
HOLLERITH = require 'hollerith-codec'
@as_hollerith = ( x ) => HOLLERITH.encode x
@from_hollerith = ( x ) => HOLLERITH.decode x
PD = require 'pipedreams11'
path ?= '/usr/share/dict/portuguese'
cachekey = @_get_key 'get_random_datoms', arguments...
return R if ( R = @_cache[ cachekey ] )?
words = @get_random_words n, path
nr = 0
R = []
keys = [ PI:KEY:<KEY>END_PI', PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI', PI:KEY:<KEY>END_PI', ]
last_idx = keys.length - 1
for word in words
nr++
$vnr = [ nr, ]
vnr_blob = @as_hollerith $vnr
$vnr_hex = vnr_blob.toString 'hex'
key = keys[ CND.random_integer 0, last_idx ]
if Math.random() > 0.75 then R.push PD.new_datom key, word, { $vnr, $vnr_hex, $stamped: true, }
else R.push PD.new_datom key, word, { $vnr, $vnr_hex, }
CND.shuffle R
return @_cache[ cachekey ] = R
#-----------------------------------------------------------------------------------------------------------
@get_svg_pathdata = ->
return ( FS.readFileSync ( PATH.join __dirname, '../src/tests/svgttf-test-data.txt' ), 'utf-8' ).split /\n/
#-----------------------------------------------------------------------------------------------------------
@get_random_nested_objects = ( n = 10, path = null, fresh = false ) ->
cachekey = @_get_key 'get_random_datoms', arguments...
delete @_cache[ cachekey ] if fresh
return R if ( R = @_cache[ cachekey ] )?
fresh = true
words = @get_random_words word_count, null, fresh
R = []
for _ in [ 1 .. n ]
CND.shuffle words
word_count = CND.random_integer 3, 7
subset_of_words = words[ 1 .. word_count ]
entry = {}
for word in subset_of_words
entry[ word ] = 42
R.push entry
return @_cache[ cachekey ] = R
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.