entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": " \"8fea081c\"\n actor: { displayName: \"Assaf\" }\n verb: \"posted\"\n published: new ",
"end": 531,
"score": 0.9479221105575562,
"start": 526,
"tag": "NAME",
"value": "Assaf"
},
{
"context": " assert.equal activity.actor.displayName, ... | server/test/api_activity.coffee | assaf/vanity.js | 2 | Helper = require("./helper") # must be at top
assert = require("assert")
Async = require("async")
{ EventEmitter } = require("events")
request = require("request")
Activity = require("../models/activity")
EventSource = require("./event_source")
describe "API activity", ->
before Helper.once
# -- Creating an activity --
describe "post", ->
statusCode = body = headers = null
params =
id: "8fea081c"
actor: { displayName: "Assaf" }
verb: "posted"
published: new Date(1332348384734).toISOString()
before Helper.newIndex
describe "valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should create activity", (done)->
Activity.get "8fea081c", (error, activity)->
assert activity
assert.equal activity.actor.displayName, "Assaf"
done()
it "should return 201", ->
assert.equal statusCode, 201
it "should return location of new activity", ->
assert.equal headers["location"], "/v1/activity/8fea081c"
it "should return empty document", ->
assert.equal body, " "
describe "not valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: { }, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no body", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no token", ->
before (done)->
request.post "http://localhost:3003/v1/activity", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Getting an activity --
describe "get activity", ->
before Helper.newIndex
before (done)->
params =
id: "fe936972"
actor: { displayName: "Assaf" }
verb: "posted"
labels: ["image", "funny"]
Activity.create params, done
describe "", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity/fe936972?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return the activity", ->
activity = JSON.parse(body)
assert.equal activity.id, "fe936972"
assert.equal activity.actor.displayName, "Assaf"
it "should include content", ->
activity = JSON.parse(body)
assert.equal activity.content, "Assaf posted."
it "should include HTML representation", ->
activity = JSON.parse(body)
assert /<div/.test(activity.html)
it "should include activity view URL", ->
activity = JSON.parse(body)
assert.equal activity.url, "/activity/fe936972"
it "should include title", ->
activity = JSON.parse(body)
assert.equal activity.title, "Assaf posted."
it "should include labels", ->
activity = JSON.parse(body)
assert activity.labels.include("image")
assert activity.labels.include("funny")
describe "no such activity", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 404", ->
assert.equal statusCode, 404
it "should return an error message", ->
assert.equal body, "Not Found"
describe "no token", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Listing all activities --
describe "list activities", ->
statusCode = body = headers = null
before Helper.newIndex
before (done)->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
, ->
Activity.index().refresh done
describe "", ->
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return results count", ->
{ totalItems } = JSON.parse(body)
assert.equal totalItems, 3
it "should return activities", ->
{ items } = JSON.parse(body)
for activity in items
assert activity.actor?.displayName
it "should return most recent activity first", ->
{ items } = JSON.parse(body)
names = items.map("actor").map("displayName")
assert.deepEqual names, ["David", "Jerome", "Assaf"]
it "should include HTML representation", ->
{ items } = JSON.parse(body)
for activity in items
assert /^<div/.test(activity.html)
it "should include activity view URL", ->
{ items } = JSON.parse(body)
for activity in items
assert /^\/activity\/[0-9a-f]{8}$/.test(activity.url)
it "should include title", ->
{ items } = JSON.parse(body)
for activity in items
assert /(Assaf|David|Jerome) (started|continued|completed)\./.test(activity.title)
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "query", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&query=NOT+assaf"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only matching activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "David"
assert.equal items[1].actor.displayName, "Jerome"
it "should not return link to next result set", ->
{ next } = JSON.parse(body)
assert !next
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "limit", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&limit=2"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "David"
assert.equal items[1].actor.displayName, "Jerome"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=2&offset=2"
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
describe "offset", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&offset=1&limit=1"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities, from offset", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "Jerome"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=1&offset=2"
it "should return link to previous result set", ->
{ prev } = JSON.parse(body)
assert.equal prev, "/v1/activity?limit=1&offset=0"
describe "start", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published at/after start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "David"
assert.equal items[1].actor.displayName, "Jerome"
describe "end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&end=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published before start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "Assaf"
describe "start/end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published between start/end time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "Jerome"
describe "no token", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Activity stream --
describe "activities stream", ->
# Collect events sent to event source.
events = []
before Helper.newIndex
before (done)->
# Fire up the event source, we need to be connected to receive anything.
event_source = new EventSource("http://localhost:3003/v1/activity/stream?access_token=secret")
# Wait until we're connected, then create activities and have then sent to event source.
event_source.onopen = ->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
# Process activities as they come in.
event_source.addEventListener "activity", (event)->
events.push event
# We only wait for the first three events
if events.length == 3
event_source.close()
done()
it "should receive all three events", ->
assert.equal events.length, 3
# Can't guarantee order of events, must sort
names = events.map((event)-> JSON.parse(event.data).actor.displayName).sort()
assert.deepEqual names, ["Assaf", "David", "Jerome"]
it "events should include url, title and content", ->
for event in events
activity = JSON.parse(event.data)
assert /\/activity\//.test(activity.url)
assert /(Assaf|David|Jerome) (started|continued|completed)\./.test(activity.title)
assert /<div/.test(activity.html)
# -- Deleting an activity --
describe "delete", ->
statusCode = null
before Helper.newIndex
before (done)->
activities = [
{ id: "015f13c4", actor: { displayName: "Assaf" }, verb: "posted" },
{ id: "75b12975", actor: { displayName: "Assaf" }, verb: "tested" }
]
Async.forEach activities, (activity, done)->
Activity.create activity, done
, done
it "should delete activity", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", ->
Activity.get "015f13c4", (error, doc)->
assert !error && !doc
done()
it "should return 204", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", (_, response)->
assert.equal response.statusCode, 204
done()
it "should not fail if no such activity", (done)->
request.del "http://localhost:3003/v1/activity/nosuch?access_token=secret", (error, response)->
assert.equal response.statusCode, 204
done()
it "should not delete unrelated activity", (done)->
Activity.get "75b12975", (error, doc)->
assert doc && doc.actor
done()
describe "no token", ->
before (done)->
request.del "http://localhost:3003/v1/activity/015f13c4", (_, response)->
{ statusCode } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
| 37620 | Helper = require("./helper") # must be at top
assert = require("assert")
Async = require("async")
{ EventEmitter } = require("events")
request = require("request")
Activity = require("../models/activity")
EventSource = require("./event_source")
describe "API activity", ->
before Helper.once
# -- Creating an activity --
describe "post", ->
statusCode = body = headers = null
params =
id: "8fea081c"
actor: { displayName: "<NAME>" }
verb: "posted"
published: new Date(1332348384734).toISOString()
before Helper.newIndex
describe "valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should create activity", (done)->
Activity.get "8fea081c", (error, activity)->
assert activity
assert.equal activity.actor.displayName, "<NAME>"
done()
it "should return 201", ->
assert.equal statusCode, 201
it "should return location of new activity", ->
assert.equal headers["location"], "/v1/activity/8fea081c"
it "should return empty document", ->
assert.equal body, " "
describe "not valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: { }, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no body", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no token", ->
before (done)->
request.post "http://localhost:3003/v1/activity", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Getting an activity --
describe "get activity", ->
before Helper.newIndex
before (done)->
params =
id: "fe936972"
actor: { displayName: "<NAME>" }
verb: "posted"
labels: ["image", "funny"]
Activity.create params, done
describe "", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity/fe936972?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return the activity", ->
activity = JSON.parse(body)
assert.equal activity.id, "fe936972"
assert.equal activity.actor.displayName, "<NAME>"
it "should include content", ->
activity = JSON.parse(body)
assert.equal activity.content, "Assaf posted."
it "should include HTML representation", ->
activity = JSON.parse(body)
assert /<div/.test(activity.html)
it "should include activity view URL", ->
activity = JSON.parse(body)
assert.equal activity.url, "/activity/fe936972"
it "should include title", ->
activity = JSON.parse(body)
assert.equal activity.title, "Assaf posted."
it "should include labels", ->
activity = JSON.parse(body)
assert activity.labels.include("image")
assert activity.labels.include("funny")
describe "no such activity", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 404", ->
assert.equal statusCode, 404
it "should return an error message", ->
assert.equal body, "Not Found"
describe "no token", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Listing all activities --
describe "list activities", ->
statusCode = body = headers = null
before Helper.newIndex
before (done)->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
, ->
Activity.index().refresh done
describe "", ->
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return results count", ->
{ totalItems } = JSON.parse(body)
assert.equal totalItems, 3
it "should return activities", ->
{ items } = JSON.parse(body)
for activity in items
assert activity.actor?.displayName
it "should return most recent activity first", ->
{ items } = JSON.parse(body)
names = items.map("actor").map("displayName")
assert.deepEqual names, ["<NAME>", "<NAME>", "<NAME>"]
it "should include HTML representation", ->
{ items } = JSON.parse(body)
for activity in items
assert /^<div/.test(activity.html)
it "should include activity view URL", ->
{ items } = JSON.parse(body)
for activity in items
assert /^\/activity\/[0-9a-f]{8}$/.test(activity.url)
it "should include title", ->
{ items } = JSON.parse(body)
for activity in items
assert /(<NAME>|<NAME>|<NAME>) (started|continued|completed)\./.test(activity.title)
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "query", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&query=NOT+assaf"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only matching activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "<NAME>"
assert.equal items[1].actor.displayName, "<NAME>"
it "should not return link to next result set", ->
{ next } = JSON.parse(body)
assert !next
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "limit", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&limit=2"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "<NAME>"
assert.equal items[1].actor.displayName, "<NAME>"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=2&offset=2"
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
describe "offset", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&offset=1&limit=1"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities, from offset", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "<NAME>"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=1&offset=2"
it "should return link to previous result set", ->
{ prev } = JSON.parse(body)
assert.equal prev, "/v1/activity?limit=1&offset=0"
describe "start", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published at/after start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "<NAME>"
assert.equal items[1].actor.displayName, "<NAME>"
describe "end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&end=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published before start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "<NAME>"
describe "start/end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published between start/end time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "<NAME>"
describe "no token", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Activity stream --
describe "activities stream", ->
# Collect events sent to event source.
events = []
before Helper.newIndex
before (done)->
# Fire up the event source, we need to be connected to receive anything.
event_source = new EventSource("http://localhost:3003/v1/activity/stream?access_token=secret")
# Wait until we're connected, then create activities and have then sent to event source.
event_source.onopen = ->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
# Process activities as they come in.
event_source.addEventListener "activity", (event)->
events.push event
# We only wait for the first three events
if events.length == 3
event_source.close()
done()
it "should receive all three events", ->
assert.equal events.length, 3
# Can't guarantee order of events, must sort
names = events.map((event)-> JSON.parse(event.data).actor.displayName).sort()
assert.deepEqual names, ["<NAME>", "<NAME>", "<NAME>"]
it "events should include url, title and content", ->
for event in events
activity = JSON.parse(event.data)
assert /\/activity\//.test(activity.url)
assert /(<NAME>|<NAME>|<NAME>) (started|continued|completed)\./.test(activity.title)
assert /<div/.test(activity.html)
# -- Deleting an activity --
describe "delete", ->
statusCode = null
before Helper.newIndex
before (done)->
activities = [
{ id: "015f13c4", actor: { displayName: "<NAME>" }, verb: "posted" },
{ id: "75b12975", actor: { displayName: "<NAME>" }, verb: "tested" }
]
Async.forEach activities, (activity, done)->
Activity.create activity, done
, done
it "should delete activity", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", ->
Activity.get "015f13c4", (error, doc)->
assert !error && !doc
done()
it "should return 204", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", (_, response)->
assert.equal response.statusCode, 204
done()
it "should not fail if no such activity", (done)->
request.del "http://localhost:3003/v1/activity/nosuch?access_token=secret", (error, response)->
assert.equal response.statusCode, 204
done()
it "should not delete unrelated activity", (done)->
Activity.get "75b12975", (error, doc)->
assert doc && doc.actor
done()
describe "no token", ->
before (done)->
request.del "http://localhost:3003/v1/activity/015f13c4", (_, response)->
{ statusCode } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
| true | Helper = require("./helper") # must be at top
assert = require("assert")
Async = require("async")
{ EventEmitter } = require("events")
request = require("request")
Activity = require("../models/activity")
EventSource = require("./event_source")
describe "API activity", ->
before Helper.once
# -- Creating an activity --
describe "post", ->
statusCode = body = headers = null
params =
id: "8fea081c"
actor: { displayName: "PI:NAME:<NAME>END_PI" }
verb: "posted"
published: new Date(1332348384734).toISOString()
before Helper.newIndex
describe "valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should create activity", (done)->
Activity.get "8fea081c", (error, activity)->
assert activity
assert.equal activity.actor.displayName, "PI:NAME:<NAME>END_PI"
done()
it "should return 201", ->
assert.equal statusCode, 201
it "should return location of new activity", ->
assert.equal headers["location"], "/v1/activity/8fea081c"
it "should return empty document", ->
assert.equal body, " "
describe "not valid", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", json: { }, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no body", ->
before (done)->
request.post "http://localhost:3003/v1/activity?access_token=secret", (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 400", ->
assert.equal statusCode, 400
it "should return error message", ->
assert.equal body, "Activity requires verb"
describe "no token", ->
before (done)->
request.post "http://localhost:3003/v1/activity", json: params, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Getting an activity --
describe "get activity", ->
before Helper.newIndex
before (done)->
params =
id: "fe936972"
actor: { displayName: "PI:NAME:<NAME>END_PI" }
verb: "posted"
labels: ["image", "funny"]
Activity.create params, done
describe "", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity/fe936972?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return the activity", ->
activity = JSON.parse(body)
assert.equal activity.id, "fe936972"
assert.equal activity.actor.displayName, "PI:NAME:<NAME>END_PI"
it "should include content", ->
activity = JSON.parse(body)
assert.equal activity.content, "Assaf posted."
it "should include HTML representation", ->
activity = JSON.parse(body)
assert /<div/.test(activity.html)
it "should include activity view URL", ->
activity = JSON.parse(body)
assert.equal activity.url, "/activity/fe936972"
it "should include title", ->
activity = JSON.parse(body)
assert.equal activity.title, "Assaf posted."
it "should include labels", ->
activity = JSON.parse(body)
assert activity.labels.include("image")
assert activity.labels.include("funny")
describe "no such activity", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 404", ->
assert.equal statusCode, 404
it "should return an error message", ->
assert.equal body, "Not Found"
describe "no token", ->
statusCode = body = headers = null
before (done)->
headers = { "Accept": "*/*" }
request.get "http://localhost:3003/v1/activity/f0000002", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Listing all activities --
describe "list activities", ->
statusCode = body = headers = null
before Helper.newIndex
before (done)->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
, ->
Activity.index().refresh done
describe "", ->
before (done)->
headers = { "Accept": "application/json" }
request.get "http://localhost:3003/v1/activity?access_token=secret", headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 200", ->
assert.equal statusCode, 200
it "should return a JSON document", ->
assert /application\/json/.test(headers['content-type'])
it "should return results count", ->
{ totalItems } = JSON.parse(body)
assert.equal totalItems, 3
it "should return activities", ->
{ items } = JSON.parse(body)
for activity in items
assert activity.actor?.displayName
it "should return most recent activity first", ->
{ items } = JSON.parse(body)
names = items.map("actor").map("displayName")
assert.deepEqual names, ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]
it "should include HTML representation", ->
{ items } = JSON.parse(body)
for activity in items
assert /^<div/.test(activity.html)
it "should include activity view URL", ->
{ items } = JSON.parse(body)
for activity in items
assert /^\/activity\/[0-9a-f]{8}$/.test(activity.url)
it "should include title", ->
{ items } = JSON.parse(body)
for activity in items
assert /(PI:NAME:<NAME>END_PI|PI:NAME:<NAME>END_PI|PI:NAME:<NAME>END_PI) (started|continued|completed)\./.test(activity.title)
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "query", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&query=NOT+assaf"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only matching activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
assert.equal items[1].actor.displayName, "PI:NAME:<NAME>END_PI"
it "should not return link to next result set", ->
{ next } = JSON.parse(body)
assert !next
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
it "should return JSON url to full collection", ->
{ url } = JSON.parse(body)
assert.equal url, "/v1/activity"
describe "limit", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&limit=2"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
assert.equal items[1].actor.displayName, "PI:NAME:<NAME>END_PI"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=2&offset=2"
it "should not return link to previous result set", ->
{ prev } = JSON.parse(body)
assert !prev
describe "offset", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&offset=1&limit=1"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only N most recent activities, from offset", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
it "should return link to next result set", ->
{ next } = JSON.parse(body)
assert.equal next, "/v1/activity?limit=1&offset=2"
it "should return link to previous result set", ->
{ prev } = JSON.parse(body)
assert.equal prev, "/v1/activity?limit=1&offset=0"
describe "start", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published at/after start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 2
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
assert.equal items[1].actor.displayName, "PI:NAME:<NAME>END_PI"
describe "end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&end=2011-03-18T18:51:00Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published before start time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
describe "start/end", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?access_token=secret&start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return only activities published between start/end time", ->
{ items } = JSON.parse(body)
assert.equal items.length, 1
assert.equal items[0].actor.displayName, "PI:NAME:<NAME>END_PI"
describe "no token", ->
before (done)->
headers = { "Accept": "application/json" }
url = "http://localhost:3003/v1/activity?start=2011-03-18T18:50:30Z&end=2011-03-18T18:51:30Z"
request.get url, headers: headers, (_, response)->
{ statusCode, headers, body } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
# -- Activity stream --
describe "activities stream", ->
# Collect events sent to event source.
events = []
before Helper.newIndex
before (done)->
# Fire up the event source, we need to be connected to receive anything.
event_source = new EventSource("http://localhost:3003/v1/activity/stream?access_token=secret")
# Wait until we're connected, then create activities and have then sent to event source.
event_source.onopen = ->
file = require("fs").readFileSync("#{__dirname}/fixtures/activities.json")
Async.forEach JSON.parse(file), (activity, done)->
Activity.create activity, done
# Process activities as they come in.
event_source.addEventListener "activity", (event)->
events.push event
# We only wait for the first three events
if events.length == 3
event_source.close()
done()
it "should receive all three events", ->
assert.equal events.length, 3
# Can't guarantee order of events, must sort
names = events.map((event)-> JSON.parse(event.data).actor.displayName).sort()
assert.deepEqual names, ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]
it "events should include url, title and content", ->
for event in events
activity = JSON.parse(event.data)
assert /\/activity\//.test(activity.url)
assert /(PI:NAME:<NAME>END_PI|PI:NAME:<NAME>END_PI|PI:NAME:<NAME>END_PI) (started|continued|completed)\./.test(activity.title)
assert /<div/.test(activity.html)
# -- Deleting an activity --
describe "delete", ->
statusCode = null
before Helper.newIndex
before (done)->
activities = [
{ id: "015f13c4", actor: { displayName: "PI:NAME:<NAME>END_PI" }, verb: "posted" },
{ id: "75b12975", actor: { displayName: "PI:NAME:<NAME>END_PI" }, verb: "tested" }
]
Async.forEach activities, (activity, done)->
Activity.create activity, done
, done
it "should delete activity", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", ->
Activity.get "015f13c4", (error, doc)->
assert !error && !doc
done()
it "should return 204", (done)->
request.del "http://localhost:3003/v1/activity/015f13c4?access_token=secret", (_, response)->
assert.equal response.statusCode, 204
done()
it "should not fail if no such activity", (done)->
request.del "http://localhost:3003/v1/activity/nosuch?access_token=secret", (error, response)->
assert.equal response.statusCode, 204
done()
it "should not delete unrelated activity", (done)->
Activity.get "75b12975", (error, doc)->
assert doc && doc.actor
done()
describe "no token", ->
before (done)->
request.del "http://localhost:3003/v1/activity/015f13c4", (_, response)->
{ statusCode } = response
done()
it "should return 401", ->
assert.equal statusCode, 401
|
[
{
"context": "Neznámý blok'\n\tformat:\n\t\tbold: \"Tučně\"\n\t\titalic: \"Kurzíva\"\n\t\tlink: \"Odkaz\"\n\t\tlinkPlaceholder: \"Zadejte nebo",
"end": 152,
"score": 0.8290058970451355,
"start": 145,
"tag": "NAME",
"value": "Kurzíva"
}
] | locales/cs.coffee | Teyras/goated | 0 | G = window.Goated ?= {}
G.locale ?= {}
G.locale.cs =
addBlock: 'Přidat blok'
unknownBlock: 'Neznámý blok'
format:
bold: "Tučně"
italic: "Kurzíva"
link: "Odkaz"
linkPlaceholder: "Zadejte nebo vložte adresu..."
blocks:
'goated-text':
title: 'Text'
placeholder: 'Text'
'goated-heading':
title: 'Nadpis'
placeholder: 'Nadpis'
config:
level: 'Úroveň'
'goated-list':
title: 'Seznam'
config:
ordered: 'Číslovaný seznam'
'goated-image':
title: 'Obrázek'
config:
align: 'Umístění'
alignLeft: 'Obtékat zprava'
alignBlock: 'Samostatně'
alignRight: 'Obtékat zleva'
title: 'Titulek'
url: 'Adresa obrázku (zmenšený)'
full: 'Adresa obrázku (plná velikost)'
sameWindow: 'Otevírat zvětšený obrázek přímo'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-file':
title: 'Soubor'
config:
title: 'Popisek'
url: 'Adresa souboru'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-album':
title: 'Album obrázků'
config:
upload: 'Pokud chcete nahrát soubory, přetáhněte je sem'
title: 'Popisek'
| 96478 | G = window.Goated ?= {}
G.locale ?= {}
G.locale.cs =
addBlock: 'Přidat blok'
unknownBlock: 'Neznámý blok'
format:
bold: "Tučně"
italic: "<NAME>"
link: "Odkaz"
linkPlaceholder: "Zadejte nebo vložte adresu..."
blocks:
'goated-text':
title: 'Text'
placeholder: 'Text'
'goated-heading':
title: 'Nadpis'
placeholder: 'Nadpis'
config:
level: 'Úroveň'
'goated-list':
title: 'Seznam'
config:
ordered: 'Číslovaný seznam'
'goated-image':
title: 'Obrázek'
config:
align: 'Umístění'
alignLeft: 'Obtékat zprava'
alignBlock: 'Samostatně'
alignRight: 'Obtékat zleva'
title: 'Titulek'
url: 'Adresa obrázku (zmenšený)'
full: 'Adresa obrázku (plná velikost)'
sameWindow: 'Otevírat zvětšený obrázek přímo'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-file':
title: 'Soubor'
config:
title: 'Popisek'
url: 'Adresa souboru'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-album':
title: 'Album obrázků'
config:
upload: 'Pokud chcete nahrát soubory, přetáhněte je sem'
title: 'Popisek'
| true | G = window.Goated ?= {}
G.locale ?= {}
G.locale.cs =
addBlock: 'Přidat blok'
unknownBlock: 'Neznámý blok'
format:
bold: "Tučně"
italic: "PI:NAME:<NAME>END_PI"
link: "Odkaz"
linkPlaceholder: "Zadejte nebo vložte adresu..."
blocks:
'goated-text':
title: 'Text'
placeholder: 'Text'
'goated-heading':
title: 'Nadpis'
placeholder: 'Nadpis'
config:
level: 'Úroveň'
'goated-list':
title: 'Seznam'
config:
ordered: 'Číslovaný seznam'
'goated-image':
title: 'Obrázek'
config:
align: 'Umístění'
alignLeft: 'Obtékat zprava'
alignBlock: 'Samostatně'
alignRight: 'Obtékat zleva'
title: 'Titulek'
url: 'Adresa obrázku (zmenšený)'
full: 'Adresa obrázku (plná velikost)'
sameWindow: 'Otevírat zvětšený obrázek přímo'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-file':
title: 'Soubor'
config:
title: 'Popisek'
url: 'Adresa souboru'
upload: 'Pokud chcete nahrát soubor, přetáhněte ho sem'
'goated-album':
title: 'Album obrázků'
config:
upload: 'Pokud chcete nahrát soubory, přetáhněte je sem'
title: 'Popisek'
|
[
{
"context": "'log').clear()\n\n sponsor: (->\n {\n name: \"Blue Box\"\n url: \"http://bluebox.net\"\n }\n ).proper",
"end": 1973,
"score": 0.9964684247970581,
"start": 1965,
"tag": "NAME",
"value": "Blue Box"
}
] | app/models/job.coffee | 2947721120/travis-web | 0 | `import { durationFrom, configKeys, compact } from 'travis/utils/helpers'`
`import configKeysMap from 'travis/utils/keys-map'`
`import Ember from 'ember'`
`import Model from 'travis/models/model'`
`import Log from 'travis/models/log'`
`import DurationCalculations from 'travis/utils/duration-calculations'`
Job = Model.extend DurationCalculations,
ajax: Ember.inject.service()
logId: DS.attr()
queue: DS.attr()
state: DS.attr()
number: DS.attr()
_startedAt: DS.attr()
_finishedAt: DS.attr()
allowFailure: DS.attr('boolean')
tags: DS.attr()
repositoryPrivate: DS.attr()
repositorySlug: DS.attr()
repo: DS.belongsTo('repo', async: true)
build: DS.belongsTo('build', async: true)
commit: DS.belongsTo('commit', async: true)
branch: Ember.computed.alias('build.branch')
annotations: DS.hasMany('annotation')
_config: DS.attr('object')
log: ( ->
@set('isLogAccessed', true)
Log.create(job: this, ajax: @get('ajax'))
).property()
startedAt: (->
unless @get('notStarted')
@get('_startedAt')
).property('_startedAt', 'notStarted')
finishedAt: (->
unless @get('notStarted')
@get('_finishedAt')
).property('_finishedAt', 'notStarted')
repoSlug: (->
@get('repositorySlug')
).property('repositorySlug')
config: (->
if config = @get('_config')
compact(config)
else if @get('currentState.stateName') != 'root.loading'
return if @get('isFetchingConfig')
@set 'isFetchingConfig', true
@reload()
).property('_config')
isFinished: (->
@get('state') in ['passed', 'failed', 'errored', 'canceled']
).property('state')
notStarted: (->
@get('state') in ['queued', 'created', 'received']
).property('state')
clearLog: ->
# This is needed if we don't want to fetch log just to clear it
if @get('isLogAccessed')
@get('log').clear()
sponsor: (->
{
name: "Blue Box"
url: "http://bluebox.net"
}
).property()
configValues: (->
config = @get('config')
keys = @get('build.rawConfigKeys')
if config && keys
keys.map (key) -> config[key]
else
[]
).property('config', 'build.rawConfigKeys.length')
canCancel: (->
!@get('isFinished')
).property('state')
canRestart: Ember.computed.alias('isFinished')
cancel: (->
@get('ajax').post "/jobs/#{@get('id')}/cancel"
)
removeLog: ->
@get('ajax').patch("/jobs/#{@get('id')}/log").then =>
@reloadLog()
reloadLog: ->
@clearLog()
@get('log').fetch()
restart: ->
@get('ajax').post "/jobs/#{@get('id')}/restart"
appendLog: (part) ->
@get('log').append part
subscribe: ->
return if @get('subscribed')
@set('subscribed', true)
if Travis.pusher
Travis.pusher.subscribe "job-#{@get('id')}"
unsubscribe: ->
return unless @get('subscribed')
@set('subscribed', false)
if Travis.pusher
Travis.pusher.unsubscribe "job-#{@get('id')}"
onStateChange: (->
@unsubscribe() if @get('state') == 'finished' && Travis.pusher
).observes('state')
# TODO: such formattings should be done in controller, but in order
# to use it there easily, I would have to refactor job and build
# controllers
formattedFinishedAt: (->
if finishedAt = @get('finishedAt')
moment(finishedAt).format('lll')
).property('finishedAt')
canRemoveLog: (->
!@get('log.removed')
).property('log.removed')
slug: (->
"#{@get('repo.slug')} ##{@get('number')}"
).property()
isLegacyInfrastructure: (->
if @get('queue') == 'builds.linux'
true
).property('queue')
displayGceNotice: (->
if @get('queue') == 'builds.gce' && @get('config.dist') == 'precise'
true
else
false
).property('queue', 'config.dist')
`export default Job`
| 104840 | `import { durationFrom, configKeys, compact } from 'travis/utils/helpers'`
`import configKeysMap from 'travis/utils/keys-map'`
`import Ember from 'ember'`
`import Model from 'travis/models/model'`
`import Log from 'travis/models/log'`
`import DurationCalculations from 'travis/utils/duration-calculations'`
Job = Model.extend DurationCalculations,
ajax: Ember.inject.service()
logId: DS.attr()
queue: DS.attr()
state: DS.attr()
number: DS.attr()
_startedAt: DS.attr()
_finishedAt: DS.attr()
allowFailure: DS.attr('boolean')
tags: DS.attr()
repositoryPrivate: DS.attr()
repositorySlug: DS.attr()
repo: DS.belongsTo('repo', async: true)
build: DS.belongsTo('build', async: true)
commit: DS.belongsTo('commit', async: true)
branch: Ember.computed.alias('build.branch')
annotations: DS.hasMany('annotation')
_config: DS.attr('object')
log: ( ->
@set('isLogAccessed', true)
Log.create(job: this, ajax: @get('ajax'))
).property()
startedAt: (->
unless @get('notStarted')
@get('_startedAt')
).property('_startedAt', 'notStarted')
finishedAt: (->
unless @get('notStarted')
@get('_finishedAt')
).property('_finishedAt', 'notStarted')
repoSlug: (->
@get('repositorySlug')
).property('repositorySlug')
config: (->
if config = @get('_config')
compact(config)
else if @get('currentState.stateName') != 'root.loading'
return if @get('isFetchingConfig')
@set 'isFetchingConfig', true
@reload()
).property('_config')
isFinished: (->
@get('state') in ['passed', 'failed', 'errored', 'canceled']
).property('state')
notStarted: (->
@get('state') in ['queued', 'created', 'received']
).property('state')
clearLog: ->
# This is needed if we don't want to fetch log just to clear it
if @get('isLogAccessed')
@get('log').clear()
sponsor: (->
{
name: "<NAME>"
url: "http://bluebox.net"
}
).property()
configValues: (->
config = @get('config')
keys = @get('build.rawConfigKeys')
if config && keys
keys.map (key) -> config[key]
else
[]
).property('config', 'build.rawConfigKeys.length')
canCancel: (->
!@get('isFinished')
).property('state')
canRestart: Ember.computed.alias('isFinished')
cancel: (->
@get('ajax').post "/jobs/#{@get('id')}/cancel"
)
removeLog: ->
@get('ajax').patch("/jobs/#{@get('id')}/log").then =>
@reloadLog()
reloadLog: ->
@clearLog()
@get('log').fetch()
restart: ->
@get('ajax').post "/jobs/#{@get('id')}/restart"
appendLog: (part) ->
@get('log').append part
subscribe: ->
return if @get('subscribed')
@set('subscribed', true)
if Travis.pusher
Travis.pusher.subscribe "job-#{@get('id')}"
unsubscribe: ->
return unless @get('subscribed')
@set('subscribed', false)
if Travis.pusher
Travis.pusher.unsubscribe "job-#{@get('id')}"
onStateChange: (->
@unsubscribe() if @get('state') == 'finished' && Travis.pusher
).observes('state')
# TODO: such formattings should be done in controller, but in order
# to use it there easily, I would have to refactor job and build
# controllers
formattedFinishedAt: (->
if finishedAt = @get('finishedAt')
moment(finishedAt).format('lll')
).property('finishedAt')
canRemoveLog: (->
!@get('log.removed')
).property('log.removed')
slug: (->
"#{@get('repo.slug')} ##{@get('number')}"
).property()
isLegacyInfrastructure: (->
if @get('queue') == 'builds.linux'
true
).property('queue')
displayGceNotice: (->
if @get('queue') == 'builds.gce' && @get('config.dist') == 'precise'
true
else
false
).property('queue', 'config.dist')
`export default Job`
| true | `import { durationFrom, configKeys, compact } from 'travis/utils/helpers'`
`import configKeysMap from 'travis/utils/keys-map'`
`import Ember from 'ember'`
`import Model from 'travis/models/model'`
`import Log from 'travis/models/log'`
`import DurationCalculations from 'travis/utils/duration-calculations'`
Job = Model.extend DurationCalculations,
ajax: Ember.inject.service()
logId: DS.attr()
queue: DS.attr()
state: DS.attr()
number: DS.attr()
_startedAt: DS.attr()
_finishedAt: DS.attr()
allowFailure: DS.attr('boolean')
tags: DS.attr()
repositoryPrivate: DS.attr()
repositorySlug: DS.attr()
repo: DS.belongsTo('repo', async: true)
build: DS.belongsTo('build', async: true)
commit: DS.belongsTo('commit', async: true)
branch: Ember.computed.alias('build.branch')
annotations: DS.hasMany('annotation')
_config: DS.attr('object')
log: ( ->
@set('isLogAccessed', true)
Log.create(job: this, ajax: @get('ajax'))
).property()
startedAt: (->
unless @get('notStarted')
@get('_startedAt')
).property('_startedAt', 'notStarted')
finishedAt: (->
unless @get('notStarted')
@get('_finishedAt')
).property('_finishedAt', 'notStarted')
repoSlug: (->
@get('repositorySlug')
).property('repositorySlug')
config: (->
if config = @get('_config')
compact(config)
else if @get('currentState.stateName') != 'root.loading'
return if @get('isFetchingConfig')
@set 'isFetchingConfig', true
@reload()
).property('_config')
isFinished: (->
@get('state') in ['passed', 'failed', 'errored', 'canceled']
).property('state')
notStarted: (->
@get('state') in ['queued', 'created', 'received']
).property('state')
clearLog: ->
# This is needed if we don't want to fetch log just to clear it
if @get('isLogAccessed')
@get('log').clear()
sponsor: (->
{
name: "PI:NAME:<NAME>END_PI"
url: "http://bluebox.net"
}
).property()
configValues: (->
config = @get('config')
keys = @get('build.rawConfigKeys')
if config && keys
keys.map (key) -> config[key]
else
[]
).property('config', 'build.rawConfigKeys.length')
canCancel: (->
!@get('isFinished')
).property('state')
canRestart: Ember.computed.alias('isFinished')
cancel: (->
@get('ajax').post "/jobs/#{@get('id')}/cancel"
)
removeLog: ->
@get('ajax').patch("/jobs/#{@get('id')}/log").then =>
@reloadLog()
reloadLog: ->
@clearLog()
@get('log').fetch()
restart: ->
@get('ajax').post "/jobs/#{@get('id')}/restart"
appendLog: (part) ->
@get('log').append part
subscribe: ->
return if @get('subscribed')
@set('subscribed', true)
if Travis.pusher
Travis.pusher.subscribe "job-#{@get('id')}"
unsubscribe: ->
return unless @get('subscribed')
@set('subscribed', false)
if Travis.pusher
Travis.pusher.unsubscribe "job-#{@get('id')}"
onStateChange: (->
@unsubscribe() if @get('state') == 'finished' && Travis.pusher
).observes('state')
# TODO: such formattings should be done in controller, but in order
# to use it there easily, I would have to refactor job and build
# controllers
formattedFinishedAt: (->
if finishedAt = @get('finishedAt')
moment(finishedAt).format('lll')
).property('finishedAt')
canRemoveLog: (->
!@get('log.removed')
).property('log.removed')
slug: (->
"#{@get('repo.slug')} ##{@get('number')}"
).property()
isLegacyInfrastructure: (->
if @get('queue') == 'builds.linux'
true
).property('queue')
displayGceNotice: (->
if @get('queue') == 'builds.gce' && @get('config.dist') == 'precise'
true
else
false
).property('queue', 'config.dist')
`export default Job`
|
[
{
"context": "ter (c) -> c.confirmed\n post = new Post(name: \"test\")\n post.confirmedComments\n post.comments = ",
"end": 2918,
"score": 0.883811891078949,
"start": 2914,
"tag": "NAME",
"value": "test"
},
{
"context": "body: 'Hello', confirmed: true }, { id: 8, body: 'Monk... | test/model/has_many.spec.coffee | jnicklas/serenade.js | 1 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model.hasMany', ->
it 'allows objects to be added and retrieved', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment1 = new Comment(body: 'Hello')
comment2 = new Comment(body: 'Monkey')
post = new Post(comments: [comment1, comment2])
expect(post.comments[0]).to.eql(comment1)
expect(post.comments[1]).to.eql(comment2)
it 'uses the given constructor', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Comment)
expect(post.comments[1].constructor).to.eql(Comment)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'creates plain objects if there is no constructor given', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Object)
expect(post.comments[1].constructor).to.eql(Object)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'updates the ids property as it changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ id: 4 }, { id: 3 }])
expect(post.commentsIds[0]).to.eql(4)
expect(post.commentsIds[1]).to.eql(3)
post.comments = [{id: 12}]
expect(post.commentsIds[0]).to.eql(12)
expect(post.commentsIds[1]).to.not.exist
it 'is updated if the ids property changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment = new Comment(id: 5, body: 'Hello')
comment = new Comment(id: 8, body: 'World')
comment = new Comment(id: 9, body: 'Cat')
post = new Post(commentsIds: [5,8], body: 'Hello')
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('World')
post.commentsIds = [8,9]
expect(post.comments[0].body).to.eql('World')
expect(post.comments[1].body).to.eql('Cat')
it 'can observe changes to items in the collection', ->
class Comment extends Serenade.Model
@attribute 'confirmed'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
@property 'confirmedComments', dependsOn: 'comments:confirmed', get: ->
@comments.filter (c) -> c.confirmed
post = new Post(name: "test")
post.confirmedComments
post.comments = [{ id: 5, body: 'Hello', confirmed: true }, { id: 8, body: 'Monkey', confirmed: false }]
comment = post.comments[1]
expect(-> comment.confirmed = true).to.emit(post["@confirmedComments"])
it 'can set itself on its inverse relation', ->
class Comment extends Serenade.Model
@belongsTo "post"
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
it 'can handle circular inverse associations', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'does not push itself twice to its inverse association', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post()
post.comments.push({})
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'adds a count property', ->
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post()
post.comments = [{ body: "Hello" }, { body: "World" }]
expect(post.commentsCount).to.eql(2)
expect(-> post.comments.push({ body: "Test" })).to.emit(post["@commentsCount"])
| 218033 | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model.hasMany', ->
it 'allows objects to be added and retrieved', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment1 = new Comment(body: 'Hello')
comment2 = new Comment(body: 'Monkey')
post = new Post(comments: [comment1, comment2])
expect(post.comments[0]).to.eql(comment1)
expect(post.comments[1]).to.eql(comment2)
it 'uses the given constructor', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Comment)
expect(post.comments[1].constructor).to.eql(Comment)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'creates plain objects if there is no constructor given', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Object)
expect(post.comments[1].constructor).to.eql(Object)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'updates the ids property as it changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ id: 4 }, { id: 3 }])
expect(post.commentsIds[0]).to.eql(4)
expect(post.commentsIds[1]).to.eql(3)
post.comments = [{id: 12}]
expect(post.commentsIds[0]).to.eql(12)
expect(post.commentsIds[1]).to.not.exist
it 'is updated if the ids property changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment = new Comment(id: 5, body: 'Hello')
comment = new Comment(id: 8, body: 'World')
comment = new Comment(id: 9, body: 'Cat')
post = new Post(commentsIds: [5,8], body: 'Hello')
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('World')
post.commentsIds = [8,9]
expect(post.comments[0].body).to.eql('World')
expect(post.comments[1].body).to.eql('Cat')
it 'can observe changes to items in the collection', ->
class Comment extends Serenade.Model
@attribute 'confirmed'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
@property 'confirmedComments', dependsOn: 'comments:confirmed', get: ->
@comments.filter (c) -> c.confirmed
post = new Post(name: "<NAME>")
post.confirmedComments
post.comments = [{ id: 5, body: 'Hello', confirmed: true }, { id: 8, body: '<NAME>', confirmed: false }]
comment = post.comments[1]
expect(-> comment.confirmed = true).to.emit(post["@confirmedComments"])
it 'can set itself on its inverse relation', ->
class Comment extends Serenade.Model
@belongsTo "post"
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
it 'can handle circular inverse associations', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'does not push itself twice to its inverse association', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post()
post.comments.push({})
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'adds a count property', ->
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post()
post.comments = [{ body: "Hello" }, { body: "World" }]
expect(post.commentsCount).to.eql(2)
expect(-> post.comments.push({ body: "Test" })).to.emit(post["@commentsCount"])
| true | require './../spec_helper'
Serenade = require('../../lib/serenade')
describe 'Serenade.Model.hasMany', ->
it 'allows objects to be added and retrieved', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment1 = new Comment(body: 'Hello')
comment2 = new Comment(body: 'Monkey')
post = new Post(comments: [comment1, comment2])
expect(post.comments[0]).to.eql(comment1)
expect(post.comments[1]).to.eql(comment2)
it 'uses the given constructor', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Comment)
expect(post.comments[1].constructor).to.eql(Comment)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'creates plain objects if there is no constructor given', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post(comments: [{ body: 'Hello' }, { body: 'Monkey' }])
expect(post.comments[0].constructor).to.eql(Object)
expect(post.comments[1].constructor).to.eql(Object)
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('Monkey')
it 'updates the ids property as it changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
post = new Post(comments: [{ id: 4 }, { id: 3 }])
expect(post.commentsIds[0]).to.eql(4)
expect(post.commentsIds[1]).to.eql(3)
post.comments = [{id: 12}]
expect(post.commentsIds[0]).to.eql(12)
expect(post.commentsIds[1]).to.not.exist
it 'is updated if the ids property changes', ->
class Comment extends Serenade.Model
@attribute 'body'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
comment = new Comment(id: 5, body: 'Hello')
comment = new Comment(id: 8, body: 'World')
comment = new Comment(id: 9, body: 'Cat')
post = new Post(commentsIds: [5,8], body: 'Hello')
expect(post.comments[0].body).to.eql('Hello')
expect(post.comments[1].body).to.eql('World')
post.commentsIds = [8,9]
expect(post.comments[0].body).to.eql('World')
expect(post.comments[1].body).to.eql('Cat')
it 'can observe changes to items in the collection', ->
class Comment extends Serenade.Model
@attribute 'confirmed'
class Post extends Serenade.Model
@hasMany 'comments', as: -> Comment
@property 'confirmedComments', dependsOn: 'comments:confirmed', get: ->
@comments.filter (c) -> c.confirmed
post = new Post(name: "PI:NAME:<NAME>END_PI")
post.confirmedComments
post.comments = [{ id: 5, body: 'Hello', confirmed: true }, { id: 8, body: 'PI:NAME:<NAME>END_PI', confirmed: false }]
comment = post.comments[1]
expect(-> comment.confirmed = true).to.emit(post["@confirmedComments"])
it 'can set itself on its inverse relation', ->
class Comment extends Serenade.Model
@belongsTo "post"
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
it 'can handle circular inverse associations', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post(comments: [{ body: "hey" }])
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'does not push itself twice to its inverse association', ->
class Comment extends Serenade.Model
@belongsTo "post", inverseOf: "comments", as: -> Post
class Post extends Serenade.Model
@hasMany 'comments', inverseOf: "post", as: -> Comment
post = new Post()
post.comments.push({})
expect(post.comments[0].post).to.eql(post)
expect(post.comments.length).to.eql(1)
it 'adds a count property', ->
class Post extends Serenade.Model
@hasMany 'comments'
post = new Post()
post.comments = [{ body: "Hello" }, { body: "World" }]
expect(post.commentsCount).to.eql(2)
expect(-> post.comments.push({ body: "Test" })).to.emit(post["@commentsCount"])
|
[
{
"context": "ount.should.be.equal 1\n\n coll.find({name: 'loki'}).count (err, count) ->\n return done(er",
"end": 3212,
"score": 0.5362040996551514,
"start": 3208,
"tag": "NAME",
"value": "loki"
}
] | test/runner.coffee | clark-zhao-hs/mongodb-migrations | 0 | path = require 'path'
{ MigrationsRunner } = require '../src/migrations-runner'
testsCommon = require './common'
describe 'MigrationsRunner', ->
dbConfig = null
coll = null
beforeEach (done) ->
testsCommon.beforeEach (res) ->
{db, config} = res
dbConfig = config
coll = db.collection 'test'
coll.remove {}, ->
done()
it 'should set default migrations collection', (done) ->
config1 =
host: 'localhost'
port: 27017
db: '_mm'
m1 = new MigrationsRunner config1, null
m1._collName.should.be.equal('_migrations')
config2 =
host: 'localhost'
port: 27017
db: '_mm'
collection: '_custom'
m2 = new MigrationsRunner config2, null
m2._collName.should.be.equal('_custom')
done()
it 'should run migrations up in order', (done) ->
migrations = [
{
id: '1'
up: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '3'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations down in order', (done) ->
migrations = [
{
id: '3'
down: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '1'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations.slice().reverse(), (err, res) ->
return done(err) if err
runner.runDown migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'tobi'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({ok: 1}).count (err, count) ->
return done(err) if err
count.should.be.equal 3
done()
it 'should run migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal(1)
runner.runDownFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal 0
done()
it 'should run specific migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runSpecificUpFromDir dir, ['1', 'test3'], (err, res) ->
return done(err) if err
coll.find({}).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].name.should.be.equal('tobi')
docs[0].ok.should.be.equal(1)
done()
it 'should run specific migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
runner.runSpecificDownFromDir dir, ['test3', '2-test2.js'], (err, res) ->
return done(err) if err
coll.find(name: $in: ['tobi', 'loki']).sort(name: 1).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(2)
docs[0].name.should.be.equal('loki')
docs[0].ok.should.be.equal(2)
docs[1].name.should.be.equal('tobi')
docs[1].ok.should.be.equal(2)
done()
| 224218 | path = require 'path'
{ MigrationsRunner } = require '../src/migrations-runner'
testsCommon = require './common'
describe 'MigrationsRunner', ->
dbConfig = null
coll = null
beforeEach (done) ->
testsCommon.beforeEach (res) ->
{db, config} = res
dbConfig = config
coll = db.collection 'test'
coll.remove {}, ->
done()
it 'should set default migrations collection', (done) ->
config1 =
host: 'localhost'
port: 27017
db: '_mm'
m1 = new MigrationsRunner config1, null
m1._collName.should.be.equal('_migrations')
config2 =
host: 'localhost'
port: 27017
db: '_mm'
collection: '_custom'
m2 = new MigrationsRunner config2, null
m2._collName.should.be.equal('_custom')
done()
it 'should run migrations up in order', (done) ->
migrations = [
{
id: '1'
up: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '3'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations down in order', (done) ->
migrations = [
{
id: '3'
down: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '1'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations.slice().reverse(), (err, res) ->
return done(err) if err
runner.runDown migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'tobi'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({name: '<NAME>'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({ok: 1}).count (err, count) ->
return done(err) if err
count.should.be.equal 3
done()
it 'should run migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal(1)
runner.runDownFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal 0
done()
it 'should run specific migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runSpecificUpFromDir dir, ['1', 'test3'], (err, res) ->
return done(err) if err
coll.find({}).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].name.should.be.equal('tobi')
docs[0].ok.should.be.equal(1)
done()
it 'should run specific migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
runner.runSpecificDownFromDir dir, ['test3', '2-test2.js'], (err, res) ->
return done(err) if err
coll.find(name: $in: ['tobi', 'loki']).sort(name: 1).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(2)
docs[0].name.should.be.equal('loki')
docs[0].ok.should.be.equal(2)
docs[1].name.should.be.equal('tobi')
docs[1].ok.should.be.equal(2)
done()
| true | path = require 'path'
{ MigrationsRunner } = require '../src/migrations-runner'
testsCommon = require './common'
describe 'MigrationsRunner', ->
dbConfig = null
coll = null
beforeEach (done) ->
testsCommon.beforeEach (res) ->
{db, config} = res
dbConfig = config
coll = db.collection 'test'
coll.remove {}, ->
done()
it 'should set default migrations collection', (done) ->
config1 =
host: 'localhost'
port: 27017
db: '_mm'
m1 = new MigrationsRunner config1, null
m1._collName.should.be.equal('_migrations')
config2 =
host: 'localhost'
port: 27017
db: '_mm'
collection: '_custom'
m2 = new MigrationsRunner config2, null
m2._collName.should.be.equal('_custom')
done()
it 'should run migrations up in order', (done) ->
migrations = [
{
id: '1'
up: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '3'
up: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations down in order', (done) ->
migrations = [
{
id: '3'
down: (done) ->
@db.collection('test').insert({ x: 2, runnerTest: true }, done)
}
{
id: '2'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $mul: x: 2 }, done)
}
{
id: '1'
down: (done) ->
@db.collection('test').update({ runnerTest: true }, { $inc: x: 3 }, done)
}
]
runner = new MigrationsRunner dbConfig
runner.runUp migrations.slice().reverse(), (err, res) ->
return done(err) if err
runner.runDown migrations, (err, res) ->
return done(err) if err
(!!res).should.be.ok()
res['1'].status.should.be.equal('ok')
res['2'].status.should.be.equal('ok')
res['3'].status.should.be.equal('ok')
coll.find({}).toArray (err, docs) ->
return done(err) if err
(docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].x.should.be.equal(7)
done()
return
it 'should run migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'tobi'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({name: 'PI:NAME:<NAME>END_PI'}).count (err, count) ->
return done(err) if err
count.should.be.equal 1
coll.find({ok: 1}).count (err, count) ->
return done(err) if err
count.should.be.equal 3
done()
it 'should run migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal(1)
runner.runDownFromDir dir, (err, res) ->
return done(err) if err
coll.find({name: 'loki'}).count (err, count) ->
return done(err) if err
count.should.be.equal 0
done()
it 'should run specific migrations up from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runSpecificUpFromDir dir, ['1', 'test3'], (err, res) ->
return done(err) if err
coll.find({}).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(1)
docs[0].name.should.be.equal('tobi')
docs[0].ok.should.be.equal(1)
done()
it 'should run specific migrations down from directory', (done) ->
dir = path.join __dirname, 'migrations'
runner = new MigrationsRunner dbConfig
runner.runUpFromDir dir, (err, res) ->
return done(err) if err
runner.runSpecificDownFromDir dir, ['test3', '2-test2.js'], (err, res) ->
return done(err) if err
coll.find(name: $in: ['tobi', 'loki']).sort(name: 1).toArray (err, docs) ->
return done(err) if err
(!!docs).should.be.ok()
docs.length.should.be.equal(2)
docs[0].name.should.be.equal('loki')
docs[0].ok.should.be.equal(2)
docs[1].name.should.be.equal('tobi')
docs[1].ok.should.be.equal(2)
done()
|
[
{
"context": "# Copyright (c) 2015 Jesse Grosjean. All rights reserved.\n\nVelocity = require 'veloci",
"end": 35,
"score": 0.9996825456619263,
"start": 21,
"tag": "NAME",
"value": "Jesse Grosjean"
}
] | atom/packages/foldingtext-for-atom/lib/editor/animations/insert-animation.coffee | prookie/dotfiles-1 | 0 | # Copyright (c) 2015 Jesse Grosjean. All rights reserved.
Velocity = require 'velocity-animate'
module.exports =
class InsertAnimation
@id = 'InsertAnimation'
constructor: (id, item, itemRenderer) ->
@itemRenderer = itemRenderer
@_id = id
@_item = item
@_insertLI = null
@_targetHeight = 0
fastForward: ->
@_insertLI?.style.height = @_targetHeight + 'px'
complete: ->
@itemRenderer.completedAnimation @_id
if @_insertLI
Velocity @_insertLI, 'stop', true
@_insertLI.style.height = null
@_insertLI.style.overflowY = null
insert: (LI, context) ->
targetHeight = LI.clientHeight
@_insertLI = LI
@_targetHeight = targetHeight
Velocity
e: LI
p:
height: targetHeight
o:
easing: context.easing
duration: context.duration
begin: (elements) ->
LI.style.height = '0px'
LI.style.overflowY = 'hidden'
complete: (elements) =>
@complete() | 138169 | # Copyright (c) 2015 <NAME>. All rights reserved.
Velocity = require 'velocity-animate'
module.exports =
class InsertAnimation
@id = 'InsertAnimation'
constructor: (id, item, itemRenderer) ->
@itemRenderer = itemRenderer
@_id = id
@_item = item
@_insertLI = null
@_targetHeight = 0
fastForward: ->
@_insertLI?.style.height = @_targetHeight + 'px'
complete: ->
@itemRenderer.completedAnimation @_id
if @_insertLI
Velocity @_insertLI, 'stop', true
@_insertLI.style.height = null
@_insertLI.style.overflowY = null
insert: (LI, context) ->
targetHeight = LI.clientHeight
@_insertLI = LI
@_targetHeight = targetHeight
Velocity
e: LI
p:
height: targetHeight
o:
easing: context.easing
duration: context.duration
begin: (elements) ->
LI.style.height = '0px'
LI.style.overflowY = 'hidden'
complete: (elements) =>
@complete() | true | # Copyright (c) 2015 PI:NAME:<NAME>END_PI. All rights reserved.
Velocity = require 'velocity-animate'
module.exports =
class InsertAnimation
@id = 'InsertAnimation'
constructor: (id, item, itemRenderer) ->
@itemRenderer = itemRenderer
@_id = id
@_item = item
@_insertLI = null
@_targetHeight = 0
fastForward: ->
@_insertLI?.style.height = @_targetHeight + 'px'
complete: ->
@itemRenderer.completedAnimation @_id
if @_insertLI
Velocity @_insertLI, 'stop', true
@_insertLI.style.height = null
@_insertLI.style.overflowY = null
insert: (LI, context) ->
targetHeight = LI.clientHeight
@_insertLI = LI
@_targetHeight = targetHeight
Velocity
e: LI
p:
height: targetHeight
o:
easing: context.easing
duration: context.duration
begin: (elements) ->
LI.style.height = '0px'
LI.style.overflowY = 'hidden'
complete: (elements) =>
@complete() |
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999120831489563,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/_components/bbcode-editor.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { button, div, em, form, i, label, option, select, span, strong, textarea } from 'react-dom-factories'
el = React.createElement
export class BBCodeEditor extends React.Component
componentDidMount: =>
@sizeSelect.value = ''
@body.selectionEnd = 0
@body.focus()
render: =>
blockClass = 'post-editor'
blockClass += " post-editor--#{modifier}" for modifier in @props.modifiers ? []
form className: blockClass,
textarea
className: 'post-editor__textarea'
name: 'body'
placeholder: @props.placeholder
defaultValue: @props.rawValue
disabled: @props.disabled
onKeyDown: @onKeyDown
ref: @setBody
div className: 'post-editor__footer',
div className: 'post-editor-footer',
div className: 'post-editor-footer__col post-editor-footer__col--toolbar',
div className: 'post-box-toolbar',
@toolbarButton 'bold', i(className: 'fas fa-bold')
@toolbarButton 'italic', i(className: 'fas fa-italic')
@toolbarButton 'strikethrough', i(className: 'fas fa-strikethrough')
@toolbarButton 'heading', i(className: 'fas fa-heading')
@toolbarButton 'link', i(className: 'fas fa-link')
@toolbarButton 'spoilerbox', i(className: 'fas fa-barcode')
@toolbarButton 'list-numbered', i(className: 'fas fa-list-ol')
@toolbarButton 'list', i(className: 'fas fa-list')
@toolbarButton 'image', i(className: 'fas fa-image')
label
className: 'bbcode-size-select'
title: osu.trans('bbcode.size._')
span className: "bbcode-size-select__label", osu.trans('bbcode.size._')
i className: "fas fa-chevron-down"
select
className: 'bbcode-size-select__select js-bbcode-btn--size'
disabled: @props.disabled
ref: @setSizeSelect
option value: '50', osu.trans('bbcode.size.tiny')
option value: '85', osu.trans('bbcode.size.small')
option value: '100', osu.trans('bbcode.size.normal')
option value: '150', osu.trans('bbcode.size.large')
div className: 'post-editor-footer__col post-editor-footer__col--actions',
@actionButton @_cancel, osu.trans('common.buttons.cancel')
@actionButton @_reset, osu.trans('common.buttons.reset')
@actionButton @_save, osu.trans('common.buttons.save')
actionButton: (action, title) =>
button
className: 'btn-osu btn-osu--post-editor btn-osu-default'
disabled: @props.disabled
type: 'button'
onClick: action
title
toolbarButton: (name, content) =>
button
className: "btn-circle btn-circle--bbcode js-bbcode-btn--#{name}"
disabled: @props.disabled
title: osu.trans("bbcode.#{_.snakeCase name}")
type: 'button'
span className: 'btn-circle__content', content
setBody: (element) =>
@body = element
setSizeSelect: (element) =>
@sizeSelect = element
onKeyDown: (e) =>
e.keyCode == 27 && @_cancel()
_cancel: (event) =>
return if @body.value != @props.rawValue && !confirm(osu.trans('common.confirmation_unsaved'))
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'cancel')
_reset: (event) =>
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'reset')
@body.focus()
_save: (event) =>
@sendOnChange(event: event, type: 'save')
sendOnChange: ({event, type}) =>
@props.onChange?(
event: event
type: type
value: @body.value
hasChanged: @body.value != @props.rawValue
)
| 53120 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { button, div, em, form, i, label, option, select, span, strong, textarea } from 'react-dom-factories'
el = React.createElement
export class BBCodeEditor extends React.Component
componentDidMount: =>
@sizeSelect.value = ''
@body.selectionEnd = 0
@body.focus()
render: =>
blockClass = 'post-editor'
blockClass += " post-editor--#{modifier}" for modifier in @props.modifiers ? []
form className: blockClass,
textarea
className: 'post-editor__textarea'
name: 'body'
placeholder: @props.placeholder
defaultValue: @props.rawValue
disabled: @props.disabled
onKeyDown: @onKeyDown
ref: @setBody
div className: 'post-editor__footer',
div className: 'post-editor-footer',
div className: 'post-editor-footer__col post-editor-footer__col--toolbar',
div className: 'post-box-toolbar',
@toolbarButton 'bold', i(className: 'fas fa-bold')
@toolbarButton 'italic', i(className: 'fas fa-italic')
@toolbarButton 'strikethrough', i(className: 'fas fa-strikethrough')
@toolbarButton 'heading', i(className: 'fas fa-heading')
@toolbarButton 'link', i(className: 'fas fa-link')
@toolbarButton 'spoilerbox', i(className: 'fas fa-barcode')
@toolbarButton 'list-numbered', i(className: 'fas fa-list-ol')
@toolbarButton 'list', i(className: 'fas fa-list')
@toolbarButton 'image', i(className: 'fas fa-image')
label
className: 'bbcode-size-select'
title: osu.trans('bbcode.size._')
span className: "bbcode-size-select__label", osu.trans('bbcode.size._')
i className: "fas fa-chevron-down"
select
className: 'bbcode-size-select__select js-bbcode-btn--size'
disabled: @props.disabled
ref: @setSizeSelect
option value: '50', osu.trans('bbcode.size.tiny')
option value: '85', osu.trans('bbcode.size.small')
option value: '100', osu.trans('bbcode.size.normal')
option value: '150', osu.trans('bbcode.size.large')
div className: 'post-editor-footer__col post-editor-footer__col--actions',
@actionButton @_cancel, osu.trans('common.buttons.cancel')
@actionButton @_reset, osu.trans('common.buttons.reset')
@actionButton @_save, osu.trans('common.buttons.save')
actionButton: (action, title) =>
button
className: 'btn-osu btn-osu--post-editor btn-osu-default'
disabled: @props.disabled
type: 'button'
onClick: action
title
toolbarButton: (name, content) =>
button
className: "btn-circle btn-circle--bbcode js-bbcode-btn--#{name}"
disabled: @props.disabled
title: osu.trans("bbcode.#{_.snakeCase name}")
type: 'button'
span className: 'btn-circle__content', content
setBody: (element) =>
@body = element
setSizeSelect: (element) =>
@sizeSelect = element
onKeyDown: (e) =>
e.keyCode == 27 && @_cancel()
_cancel: (event) =>
return if @body.value != @props.rawValue && !confirm(osu.trans('common.confirmation_unsaved'))
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'cancel')
_reset: (event) =>
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'reset')
@body.focus()
_save: (event) =>
@sendOnChange(event: event, type: 'save')
sendOnChange: ({event, type}) =>
@props.onChange?(
event: event
type: type
value: @body.value
hasChanged: @body.value != @props.rawValue
)
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { button, div, em, form, i, label, option, select, span, strong, textarea } from 'react-dom-factories'
el = React.createElement
export class BBCodeEditor extends React.Component
componentDidMount: =>
@sizeSelect.value = ''
@body.selectionEnd = 0
@body.focus()
render: =>
blockClass = 'post-editor'
blockClass += " post-editor--#{modifier}" for modifier in @props.modifiers ? []
form className: blockClass,
textarea
className: 'post-editor__textarea'
name: 'body'
placeholder: @props.placeholder
defaultValue: @props.rawValue
disabled: @props.disabled
onKeyDown: @onKeyDown
ref: @setBody
div className: 'post-editor__footer',
div className: 'post-editor-footer',
div className: 'post-editor-footer__col post-editor-footer__col--toolbar',
div className: 'post-box-toolbar',
@toolbarButton 'bold', i(className: 'fas fa-bold')
@toolbarButton 'italic', i(className: 'fas fa-italic')
@toolbarButton 'strikethrough', i(className: 'fas fa-strikethrough')
@toolbarButton 'heading', i(className: 'fas fa-heading')
@toolbarButton 'link', i(className: 'fas fa-link')
@toolbarButton 'spoilerbox', i(className: 'fas fa-barcode')
@toolbarButton 'list-numbered', i(className: 'fas fa-list-ol')
@toolbarButton 'list', i(className: 'fas fa-list')
@toolbarButton 'image', i(className: 'fas fa-image')
label
className: 'bbcode-size-select'
title: osu.trans('bbcode.size._')
span className: "bbcode-size-select__label", osu.trans('bbcode.size._')
i className: "fas fa-chevron-down"
select
className: 'bbcode-size-select__select js-bbcode-btn--size'
disabled: @props.disabled
ref: @setSizeSelect
option value: '50', osu.trans('bbcode.size.tiny')
option value: '85', osu.trans('bbcode.size.small')
option value: '100', osu.trans('bbcode.size.normal')
option value: '150', osu.trans('bbcode.size.large')
div className: 'post-editor-footer__col post-editor-footer__col--actions',
@actionButton @_cancel, osu.trans('common.buttons.cancel')
@actionButton @_reset, osu.trans('common.buttons.reset')
@actionButton @_save, osu.trans('common.buttons.save')
actionButton: (action, title) =>
button
className: 'btn-osu btn-osu--post-editor btn-osu-default'
disabled: @props.disabled
type: 'button'
onClick: action
title
toolbarButton: (name, content) =>
button
className: "btn-circle btn-circle--bbcode js-bbcode-btn--#{name}"
disabled: @props.disabled
title: osu.trans("bbcode.#{_.snakeCase name}")
type: 'button'
span className: 'btn-circle__content', content
setBody: (element) =>
@body = element
setSizeSelect: (element) =>
@sizeSelect = element
onKeyDown: (e) =>
e.keyCode == 27 && @_cancel()
_cancel: (event) =>
return if @body.value != @props.rawValue && !confirm(osu.trans('common.confirmation_unsaved'))
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'cancel')
_reset: (event) =>
@body.value = @props.rawValue
@sendOnChange(event: event, type: 'reset')
@body.focus()
_save: (event) =>
@sendOnChange(event: event, type: 'save')
sendOnChange: ({event, type}) =>
@props.onChange?(
event: event
type: type
value: @body.value
hasChanged: @body.value != @props.rawValue
)
|
[
{
"context": "ou have any questions, please feel free to contact Helaine Blumenthal (helaine@wikiedu.org). We look forward to working",
"end": 815,
"score": 0.9998928308486938,
"start": 797,
"tag": "NAME",
"value": "Helaine Blumenthal"
},
{
"context": ", please feel free to contact H... | source/javascripts/data/LoginContent.coffee | WikiEducationFoundation/WikiEduWizard | 1 | LoginContent =
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact Helaine Blumenthal (helaine@wikiedu.org). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
module.exports = LoginContent
| 124646 | LoginContent =
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact <NAME> (<EMAIL>). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
module.exports = LoginContent
| true | LoginContent =
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
module.exports = LoginContent
|
[
{
"context": "pt]\n# burger - find burger place\n#\n# Author:\n# robertocarroll\n\nburger_places = [\n {\n \"nam",
"end": 184,
"score": 0.8465903997421265,
"start": 182,
"tag": "NAME",
"value": "ro"
},
{
"context": "\n# burger - find burger place\n#\n# Author:\n# ro... | scripts/burger.coffee | robertocarroll/chatui | 0 | # Description:
# Find burger place
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Command: [not included in help script]
# burger - find burger place
#
# Author:
# robertocarroll
burger_places = [
{
"name": "Almost Famous",
"cost": "7.5",
"rating": "4",
"minutes": "4",
"distance": "0.2",
"address": "High Street",
"postcode": "M4 1HP",
"latitude": "53.484476",
"longitude": "-2.236841"
},
{
"name": "Archies Burgers and Shakes",
"cost": "-",
"rating": "4",
"minutes": "20",
"distance": "1",
"address": "Oxford Street",
"postcode": "M1 5NH",
"latitude": "53.474221",
"longitude": "-2.241047"
},
{
"name": "Byron",
"cost": "6.75",
"rating": "4",
"minutes": "9",
"distance": "0.5",
"address": "Piccadilly Gardens",
"postcode": "M1 1RG",
"latitude": "53.480333",
"longitude": "-2.236441"
},
{
"name": "Burger and Lobster",
"cost": "20",
"rating": "4",
"minutes": "10",
"distance": "0.5",
"address": "King Street",
"postcode": "M2 4WU",
"latitude": "53.480581",
"longitude": "-2.242922"
},
{
"name": "Dog Bowl",
"cost": "7.50",
"rating": "4",
"minutes": "19",
"distance": "1",
"address": "Whitworth Street West",
"postcode": "M1 5WW",
"latitude": "53.473974",
"longitude": "-2.243336"
},
{
"name": "Dukes 92",
"cost": "8.95",
"rating": "4",
"minutes": "24",
"distance": "1.2",
"address": "Castle Street",
"postcode": "M3 4LZ",
"latitude": "53.474435",
"longitude": "-2.254759"
},
{
"name": "Filthy Cow",
"cost": "6",
"rating": "-",
"minutes": "10",
"distance": "0.5",
"address": "Tib Lane",
"postcode": "M2 4JB",
"latitude": "53.480362",
"longitude": "-2.244578"
},
{
"name": "Gourmet Burger Kitchen",
"cost": "9.75",
"rating": "3",
"minutes": "15",
"distance": "0.8",
"address": "Spinningfields",
"postcode": "M3 3ER",
"latitude": "53.481341",
"longitude": "-2.253384"
},
{
"name": "Handmade Burger Company",
"cost": "6.45",
"rating": "4",
"minutes": "14",
"distance": "0.7",
"address": "Deansgate",
"postcode": "M3 3WB",
"latitude": "53.479201",
"longitude": "-2.249303"
},
{
"name": "Hard Rock Cafe",
"cost": "-",
"rating": "4",
"minutes": "2",
"distance": "0.1",
"address": "The Printworks",
"postcode": "M4 2BS",
"latitude": "53.485262",
"longitude": "-2.240696"
},
{
"name": "Red's True Barebecue",
"cost": "8.95",
"rating": "4",
"minutes": "12",
"distance": "0.6",
"address": "Lloyd Street",
"postcode": "M2 5WA",
"latitude": "53.478955",
"longitude": "-2.2468"
},
{
"name": "Smoak Bar and Grill",
"cost": "15.5",
"rating": "3.5",
"minutes": "7",
"distance": "0.3",
"address": "Malmaison Hotel",
"postcode": "M1 1LZ",
"latitude": "53.481876",
"longitude": "-2.237911"
},
{
"name": "Solita Bar and Grill",
"cost": "9.5",
"rating": "4",
"minutes": "5",
"distance": "0.2",
"address": "Northern Quarter",
"postcode": "M4 1DN",
"latitude": "53.483559",
"longitude": "-2.236851"
},
{
"name": "TGI Fridays",
"cost": "10",
"rating": "3.5",
"minutes": "6",
"distance": "0.3",
"address": "Cross Street",
"postcode": "M2 7DH",
"latitude": "53.481676",
"longitude": "-2.244633"
},
{
"name": "Tib Street Tavern",
"cost": "6.95",
"rating": "4",
"minutes": "6",
"distance": "0.3",
"address": "74 Tib Street",
"postcode": "M4 1LG",
"latitude": "53.483725",
"longitude": "-2.234621"
}
]
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
module.exports = (robot) ->
robot.hear /burger/i, (msg) ->
random_burger_places = shuffle (burger_places)
random_burger = random_burger_places[0]
burger_name = random_burger.name
burger_cost = random_burger.cost
burger_address = random_burger.address
burger_lat = random_burger.latitude
burger_lon = random_burger.longitude
if random_burger.rating >= 5
burger_rating = 'great'
else if random_burger.rating >= 4
burger_rating = 'good'
else if random_burger.rating >= 3
burger_rating = 'OK'
else if random_burger.rating >= 2
burger_rating = 'not bad'
else if random_burger.rating >= 1
burger_rating = 'poor'
else
burger_rating = 'not been rated'
msg.send "Try #{burger_name} on #{burger_address}. It's #{burger_rating} and a burger costs £#{burger_cost}. Here's how to get there: https://www.google.com/maps?saddr=My+Location&daddr=#{burger_lat},#{burger_lon}" | 63199 | # Description:
# Find burger place
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Command: [not included in help script]
# burger - find burger place
#
# Author:
# <NAME>bertocar<NAME>
burger_places = [
{
"name": "<NAME>",
"cost": "7.5",
"rating": "4",
"minutes": "4",
"distance": "0.2",
"address": "High Street",
"postcode": "M4 1HP",
"latitude": "53.484476",
"longitude": "-2.236841"
},
{
"name": "<NAME>",
"cost": "-",
"rating": "4",
"minutes": "20",
"distance": "1",
"address": "Oxford Street",
"postcode": "M1 5NH",
"latitude": "53.474221",
"longitude": "-2.241047"
},
{
"name": "<NAME>",
"cost": "6.75",
"rating": "4",
"minutes": "9",
"distance": "0.5",
"address": "Piccadilly Gardens",
"postcode": "M1 1RG",
"latitude": "53.480333",
"longitude": "-2.236441"
},
{
"name": "<NAME>",
"cost": "20",
"rating": "4",
"minutes": "10",
"distance": "0.5",
"address": "King Street",
"postcode": "M2 4WU",
"latitude": "53.480581",
"longitude": "-2.242922"
},
{
"name": "<NAME>",
"cost": "7.50",
"rating": "4",
"minutes": "19",
"distance": "1",
"address": "Whitworth Street West",
"postcode": "M1 5WW",
"latitude": "53.473974",
"longitude": "-2.243336"
},
{
"name": "<NAME>",
"cost": "8.95",
"rating": "4",
"minutes": "24",
"distance": "1.2",
"address": "Castle Street",
"postcode": "M3 4LZ",
"latitude": "53.474435",
"longitude": "-2.254759"
},
{
"name": "<NAME>",
"cost": "6",
"rating": "-",
"minutes": "10",
"distance": "0.5",
"address": "Tib Lane",
"postcode": "M2 4JB",
"latitude": "53.480362",
"longitude": "-2.244578"
},
{
"name": "<NAME>",
"cost": "9.75",
"rating": "3",
"minutes": "15",
"distance": "0.8",
"address": "Spinningfields",
"postcode": "M3 3ER",
"latitude": "53.481341",
"longitude": "-2.253384"
},
{
"name": "<NAME>",
"cost": "6.45",
"rating": "4",
"minutes": "14",
"distance": "0.7",
"address": "Deansgate",
"postcode": "M3 3WB",
"latitude": "53.479201",
"longitude": "-2.249303"
},
{
"name": "<NAME>",
"cost": "-",
"rating": "4",
"minutes": "2",
"distance": "0.1",
"address": "The Printworks",
"postcode": "M4 2BS",
"latitude": "53.485262",
"longitude": "-2.240696"
},
{
"name": "<NAME>",
"cost": "8.95",
"rating": "4",
"minutes": "12",
"distance": "0.6",
"address": "Lloyd Street",
"postcode": "M2 5WA",
"latitude": "53.478955",
"longitude": "-2.2468"
},
{
"name": "<NAME>",
"cost": "15.5",
"rating": "3.5",
"minutes": "7",
"distance": "0.3",
"address": "Malmaison Hotel",
"postcode": "M1 1LZ",
"latitude": "53.481876",
"longitude": "-2.237911"
},
{
"name": "<NAME>",
"cost": "9.5",
"rating": "4",
"minutes": "5",
"distance": "0.2",
"address": "Northern Quarter",
"postcode": "M4 1DN",
"latitude": "53.483559",
"longitude": "-2.236851"
},
{
"name": "<NAME>",
"cost": "10",
"rating": "3.5",
"minutes": "6",
"distance": "0.3",
"address": "Cross Street",
"postcode": "M2 7DH",
"latitude": "53.481676",
"longitude": "-2.244633"
},
{
"name": "<NAME>",
"cost": "6.95",
"rating": "4",
"minutes": "6",
"distance": "0.3",
"address": "74 Tib Street",
"postcode": "M4 1LG",
"latitude": "53.483725",
"longitude": "-2.234621"
}
]
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
module.exports = (robot) ->
robot.hear /burger/i, (msg) ->
random_burger_places = shuffle (burger_places)
random_burger = random_burger_places[0]
burger_name = random_burger.name
burger_cost = random_burger.cost
burger_address = random_burger.address
burger_lat = random_burger.latitude
burger_lon = random_burger.longitude
if random_burger.rating >= 5
burger_rating = 'great'
else if random_burger.rating >= 4
burger_rating = 'good'
else if random_burger.rating >= 3
burger_rating = 'OK'
else if random_burger.rating >= 2
burger_rating = 'not bad'
else if random_burger.rating >= 1
burger_rating = 'poor'
else
burger_rating = 'not been rated'
msg.send "Try #{burger_name} on #{burger_address}. It's #{burger_rating} and a burger costs £#{burger_cost}. Here's how to get there: https://www.google.com/maps?saddr=My+Location&daddr=#{burger_lat},#{burger_lon}" | true | # Description:
# Find burger place
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Command: [not included in help script]
# burger - find burger place
#
# Author:
# PI:NAME:<NAME>END_PIbertocarPI:NAME:<NAME>END_PI
burger_places = [
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "7.5",
"rating": "4",
"minutes": "4",
"distance": "0.2",
"address": "High Street",
"postcode": "M4 1HP",
"latitude": "53.484476",
"longitude": "-2.236841"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "-",
"rating": "4",
"minutes": "20",
"distance": "1",
"address": "Oxford Street",
"postcode": "M1 5NH",
"latitude": "53.474221",
"longitude": "-2.241047"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "6.75",
"rating": "4",
"minutes": "9",
"distance": "0.5",
"address": "Piccadilly Gardens",
"postcode": "M1 1RG",
"latitude": "53.480333",
"longitude": "-2.236441"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "20",
"rating": "4",
"minutes": "10",
"distance": "0.5",
"address": "King Street",
"postcode": "M2 4WU",
"latitude": "53.480581",
"longitude": "-2.242922"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "7.50",
"rating": "4",
"minutes": "19",
"distance": "1",
"address": "Whitworth Street West",
"postcode": "M1 5WW",
"latitude": "53.473974",
"longitude": "-2.243336"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "8.95",
"rating": "4",
"minutes": "24",
"distance": "1.2",
"address": "Castle Street",
"postcode": "M3 4LZ",
"latitude": "53.474435",
"longitude": "-2.254759"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "6",
"rating": "-",
"minutes": "10",
"distance": "0.5",
"address": "Tib Lane",
"postcode": "M2 4JB",
"latitude": "53.480362",
"longitude": "-2.244578"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "9.75",
"rating": "3",
"minutes": "15",
"distance": "0.8",
"address": "Spinningfields",
"postcode": "M3 3ER",
"latitude": "53.481341",
"longitude": "-2.253384"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "6.45",
"rating": "4",
"minutes": "14",
"distance": "0.7",
"address": "Deansgate",
"postcode": "M3 3WB",
"latitude": "53.479201",
"longitude": "-2.249303"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "-",
"rating": "4",
"minutes": "2",
"distance": "0.1",
"address": "The Printworks",
"postcode": "M4 2BS",
"latitude": "53.485262",
"longitude": "-2.240696"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "8.95",
"rating": "4",
"minutes": "12",
"distance": "0.6",
"address": "Lloyd Street",
"postcode": "M2 5WA",
"latitude": "53.478955",
"longitude": "-2.2468"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "15.5",
"rating": "3.5",
"minutes": "7",
"distance": "0.3",
"address": "Malmaison Hotel",
"postcode": "M1 1LZ",
"latitude": "53.481876",
"longitude": "-2.237911"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "9.5",
"rating": "4",
"minutes": "5",
"distance": "0.2",
"address": "Northern Quarter",
"postcode": "M4 1DN",
"latitude": "53.483559",
"longitude": "-2.236851"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "10",
"rating": "3.5",
"minutes": "6",
"distance": "0.3",
"address": "Cross Street",
"postcode": "M2 7DH",
"latitude": "53.481676",
"longitude": "-2.244633"
},
{
"name": "PI:NAME:<NAME>END_PI",
"cost": "6.95",
"rating": "4",
"minutes": "6",
"distance": "0.3",
"address": "74 Tib Street",
"postcode": "M4 1LG",
"latitude": "53.483725",
"longitude": "-2.234621"
}
]
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
module.exports = (robot) ->
robot.hear /burger/i, (msg) ->
random_burger_places = shuffle (burger_places)
random_burger = random_burger_places[0]
burger_name = random_burger.name
burger_cost = random_burger.cost
burger_address = random_burger.address
burger_lat = random_burger.latitude
burger_lon = random_burger.longitude
if random_burger.rating >= 5
burger_rating = 'great'
else if random_burger.rating >= 4
burger_rating = 'good'
else if random_burger.rating >= 3
burger_rating = 'OK'
else if random_burger.rating >= 2
burger_rating = 'not bad'
else if random_burger.rating >= 1
burger_rating = 'poor'
else
burger_rating = 'not been rated'
msg.send "Try #{burger_name} on #{burger_address}. It's #{burger_rating} and a burger costs £#{burger_cost}. Here's how to get there: https://www.google.com/maps?saddr=My+Location&daddr=#{burger_lat},#{burger_lon}" |
[
{
"context": "ool, i in @props.task.tools\n# tool._key ?= Math.random()\n# toolMarks = (mark for mark in @props.",
"end": 2097,
"score": 0.9635794758796692,
"start": 2086,
"tag": "KEY",
"value": "Math.random"
},
{
"context": "ark, i in toolMarks\n# mar... | app/assets/javascripts/components/core-tools/pick-many.cjsx | ogugugugugua/PRED-Crowdsourcing-2020 | 88 | React = require 'react'
GenericTask = require './generic'
# Markdown = require '../../components/markdown'
NOOP = Function.prototype
icons =
point: <svg viewBox="0 0 100 100">
<circle className="shape" r="30" cx="50" cy="50" />
<line className="shape" x1="50" y1="5" x2="50" y2="40" />
<line className="shape" x1="95" y1="50" x2="60" y2="50" />
<line className="shape" x1="50" y1="95" x2="50" y2="60" />
<line className="shape" x1="5" y1="50" x2="40" y2="50" />
</svg>
line: <svg viewBox="0 0 100 100">
<line className="shape" x1="25" y1="90" x2="75" y2="10" />
</svg>
rectangle: <svg viewBox="0 0 100 100">
<rect className="shape" x="10" y="30" width="80" height="40" />
</svg>
polygon: <svg viewBox="0 0 100 100">
<polyline className="shape" points="50, 5 90, 90 50, 70 5, 90 50, 5" />
</svg>
circle: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="33" ry="33" cx="50" cy="50" />
</svg>
ellipse: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="45" ry="25" cx="50" cy="50" transform="rotate(-30, 50, 50)" />
</svg>
# Summary = React.createClass
# displayName: 'SingleChoiceSummary'
# getDefaultProps: ->
# task: null
# annotation: null
# expanded: false
# getInitialState: ->
# expanded: @props.expanded
# render: ->
# <div className="classification-task-summary">
# <div className="question">
# {@props.task.instruction}
# {if @state.expanded
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: false, null}>Less</button>
# else
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: true, null}>More</button>}
# {if @props.onToggle?
# if @props.inactive
# <button type="button"><i className="fa fa-eye fa-fw"></i></button>
# else
# <button type="button"><i className="fa fa-eye-slash fa-fw"></i></button>}
# </div>
# {for tool, i in @props.task.tools
# tool._key ?= Math.random()
# toolMarks = (mark for mark in @props.annotation.value when mark.tool is i)
# if @state.expanded or toolMarks.length isnt 0
# <div key={tool._key} className="answer">
# {tool.type} <strong>{tool.label}</strong> ({[].concat toolMarks.length})
# {if @state.expanded
# for mark, i in toolMarks
# mark._key ?= Math.random()
# <div key={mark._key}>
# {i + 1}){' '}
# {for key, value of mark when key not in ['tool', 'sources'] and key.charAt(0) isnt '_'
# <code key={key}><strong>{key}</strong>: {JSON.stringify value} </code>}
# </div>}
# </div>}
# </div>
module.exports = React.createClass
displayName: 'PickMany'
statics:
# Summary: Summary
getDefaultAnnotation: ->
_toolIndex: 0
value: []
getDefaultProps: ->
task: null
onChange: NOOP
getInitialState: ->
annotation: [] #@props.annotation ? {}
render: ->
options = for option, i in @props.task.tool_config.options
option._key ?= Math.random()
isChecked = option.value in @state.annotation
<label
key={option._key}
className="answer minor-button #{if isChecked then 'active' else ''}"
>
<span
className='drawing-tool-icon'
style={color: option.color}>{icons[option.value]}
</span>{' '}
<input
type='checkbox'
className='drawing-tool-input'
checked={isChecked}
ref={'inp-' + i}
onChange={ @handleChange.bind this, i }
/>
<span>
{option.label}
</span>
{if option.help?
<span className="help" data-text="#{option.help}"><i className="fa fa-question"></i></span>
}
</label>
<GenericTask question={@props.task.instruction} onShowHelp={@props.onShowHelp} answers={options} />
handleChange: (index, e) ->
inp = @refs["inp-#{index}"]
annotation = @state.annotation
value = @props.task.tool_config.options[index].value
isChecked = annotation.indexOf(value) >= 0
# toggle checkmark
if isChecked
annotation.splice(annotation.indexOf(value), 1) # remove entry
else
annotation.push value
@setState
annotation: annotation, () =>
@props.onChange? annotation
| 128812 | React = require 'react'
GenericTask = require './generic'
# Markdown = require '../../components/markdown'
NOOP = Function.prototype
icons =
point: <svg viewBox="0 0 100 100">
<circle className="shape" r="30" cx="50" cy="50" />
<line className="shape" x1="50" y1="5" x2="50" y2="40" />
<line className="shape" x1="95" y1="50" x2="60" y2="50" />
<line className="shape" x1="50" y1="95" x2="50" y2="60" />
<line className="shape" x1="5" y1="50" x2="40" y2="50" />
</svg>
line: <svg viewBox="0 0 100 100">
<line className="shape" x1="25" y1="90" x2="75" y2="10" />
</svg>
rectangle: <svg viewBox="0 0 100 100">
<rect className="shape" x="10" y="30" width="80" height="40" />
</svg>
polygon: <svg viewBox="0 0 100 100">
<polyline className="shape" points="50, 5 90, 90 50, 70 5, 90 50, 5" />
</svg>
circle: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="33" ry="33" cx="50" cy="50" />
</svg>
ellipse: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="45" ry="25" cx="50" cy="50" transform="rotate(-30, 50, 50)" />
</svg>
# Summary = React.createClass
# displayName: 'SingleChoiceSummary'
# getDefaultProps: ->
# task: null
# annotation: null
# expanded: false
# getInitialState: ->
# expanded: @props.expanded
# render: ->
# <div className="classification-task-summary">
# <div className="question">
# {@props.task.instruction}
# {if @state.expanded
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: false, null}>Less</button>
# else
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: true, null}>More</button>}
# {if @props.onToggle?
# if @props.inactive
# <button type="button"><i className="fa fa-eye fa-fw"></i></button>
# else
# <button type="button"><i className="fa fa-eye-slash fa-fw"></i></button>}
# </div>
# {for tool, i in @props.task.tools
# tool._key ?= <KEY>()
# toolMarks = (mark for mark in @props.annotation.value when mark.tool is i)
# if @state.expanded or toolMarks.length isnt 0
# <div key={tool._key} className="answer">
# {tool.type} <strong>{tool.label}</strong> ({[].concat toolMarks.length})
# {if @state.expanded
# for mark, i in toolMarks
# mark._key ?= <KEY>()
# <div key={mark._key}>
# {i + 1}){' '}
# {for key, value of mark when key not in ['tool', 'sources'] and key.charAt(0) isnt '_'
# <code key={key}><strong>{key}</strong>: {JSON.stringify value} </code>}
# </div>}
# </div>}
# </div>
module.exports = React.createClass
displayName: 'PickMany'
statics:
# Summary: Summary
getDefaultAnnotation: ->
_toolIndex: 0
value: []
getDefaultProps: ->
task: null
onChange: NOOP
getInitialState: ->
annotation: [] #@props.annotation ? {}
render: ->
options = for option, i in @props.task.tool_config.options
option._key ?= Math.random()
isChecked = option.value in @state.annotation
<label
key={option._key}
className="answer minor-button #{if isChecked then 'active' else ''}"
>
<span
className='drawing-tool-icon'
style={color: option.color}>{icons[option.value]}
</span>{' '}
<input
type='checkbox'
className='drawing-tool-input'
checked={isChecked}
ref={'inp-' + i}
onChange={ @handleChange.bind this, i }
/>
<span>
{option.label}
</span>
{if option.help?
<span className="help" data-text="#{option.help}"><i className="fa fa-question"></i></span>
}
</label>
<GenericTask question={@props.task.instruction} onShowHelp={@props.onShowHelp} answers={options} />
handleChange: (index, e) ->
inp = @refs["inp-#{index}"]
annotation = @state.annotation
value = @props.task.tool_config.options[index].value
isChecked = annotation.indexOf(value) >= 0
# toggle checkmark
if isChecked
annotation.splice(annotation.indexOf(value), 1) # remove entry
else
annotation.push value
@setState
annotation: annotation, () =>
@props.onChange? annotation
| true | React = require 'react'
GenericTask = require './generic'
# Markdown = require '../../components/markdown'
NOOP = Function.prototype
icons =
point: <svg viewBox="0 0 100 100">
<circle className="shape" r="30" cx="50" cy="50" />
<line className="shape" x1="50" y1="5" x2="50" y2="40" />
<line className="shape" x1="95" y1="50" x2="60" y2="50" />
<line className="shape" x1="50" y1="95" x2="50" y2="60" />
<line className="shape" x1="5" y1="50" x2="40" y2="50" />
</svg>
line: <svg viewBox="0 0 100 100">
<line className="shape" x1="25" y1="90" x2="75" y2="10" />
</svg>
rectangle: <svg viewBox="0 0 100 100">
<rect className="shape" x="10" y="30" width="80" height="40" />
</svg>
polygon: <svg viewBox="0 0 100 100">
<polyline className="shape" points="50, 5 90, 90 50, 70 5, 90 50, 5" />
</svg>
circle: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="33" ry="33" cx="50" cy="50" />
</svg>
ellipse: <svg viewBox="0 0 100 100">
<ellipse className="shape" rx="45" ry="25" cx="50" cy="50" transform="rotate(-30, 50, 50)" />
</svg>
# Summary = React.createClass
# displayName: 'SingleChoiceSummary'
# getDefaultProps: ->
# task: null
# annotation: null
# expanded: false
# getInitialState: ->
# expanded: @props.expanded
# render: ->
# <div className="classification-task-summary">
# <div className="question">
# {@props.task.instruction}
# {if @state.expanded
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: false, null}>Less</button>
# else
# <button type="button" className="toggle-more" onClick={@setState.bind this, expanded: true, null}>More</button>}
# {if @props.onToggle?
# if @props.inactive
# <button type="button"><i className="fa fa-eye fa-fw"></i></button>
# else
# <button type="button"><i className="fa fa-eye-slash fa-fw"></i></button>}
# </div>
# {for tool, i in @props.task.tools
# tool._key ?= PI:KEY:<KEY>END_PI()
# toolMarks = (mark for mark in @props.annotation.value when mark.tool is i)
# if @state.expanded or toolMarks.length isnt 0
# <div key={tool._key} className="answer">
# {tool.type} <strong>{tool.label}</strong> ({[].concat toolMarks.length})
# {if @state.expanded
# for mark, i in toolMarks
# mark._key ?= PI:KEY:<KEY>END_PI()
# <div key={mark._key}>
# {i + 1}){' '}
# {for key, value of mark when key not in ['tool', 'sources'] and key.charAt(0) isnt '_'
# <code key={key}><strong>{key}</strong>: {JSON.stringify value} </code>}
# </div>}
# </div>}
# </div>
module.exports = React.createClass
displayName: 'PickMany'
statics:
# Summary: Summary
getDefaultAnnotation: ->
_toolIndex: 0
value: []
getDefaultProps: ->
task: null
onChange: NOOP
getInitialState: ->
annotation: [] #@props.annotation ? {}
render: ->
options = for option, i in @props.task.tool_config.options
option._key ?= Math.random()
isChecked = option.value in @state.annotation
<label
key={option._key}
className="answer minor-button #{if isChecked then 'active' else ''}"
>
<span
className='drawing-tool-icon'
style={color: option.color}>{icons[option.value]}
</span>{' '}
<input
type='checkbox'
className='drawing-tool-input'
checked={isChecked}
ref={'inp-' + i}
onChange={ @handleChange.bind this, i }
/>
<span>
{option.label}
</span>
{if option.help?
<span className="help" data-text="#{option.help}"><i className="fa fa-question"></i></span>
}
</label>
<GenericTask question={@props.task.instruction} onShowHelp={@props.onShowHelp} answers={options} />
handleChange: (index, e) ->
inp = @refs["inp-#{index}"]
annotation = @state.annotation
value = @props.task.tool_config.options[index].value
isChecked = annotation.indexOf(value) >= 0
# toggle checkmark
if isChecked
annotation.splice(annotation.indexOf(value), 1) # remove entry
else
annotation.push value
@setState
annotation: annotation, () =>
@props.onChange? annotation
|
[
{
"context": "peed: -> 100\n glassJoe = \n name: \"Joe\"\n getSpeed: -> 25\n speed.add litt",
"end": 2073,
"score": 0.9994896650314331,
"start": 2070,
"tag": "NAME",
"value": "Joe"
}
] | _old/node/test/scheduler/schedulerSpeedTest.coffee | lagdotcom/rot.js | 1,653 | # schedulerSpeedTest.coffee
#----------------------------------------------------------------------------
_ = require "underscore"
should = require "should"
ROT = require "../../lib/rot"
NO_REPEAT = false
YES_REPEAT = true
describe "scheduler", ->
it "should export ROT.Scheduler.Speed", ->
ROT.should.have.property "Scheduler"
ROT.Scheduler.should.have.property "Speed"
it "should be possible to create a Speed", ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
describe "Speed", ->
speed = null
beforeEach ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
it "should extend Scheduler", ->
speed.should.be.an.instanceof ROT.Scheduler
speed.should.be.an.instanceof ROT.Scheduler.Speed
describe "add", ->
it "should call the getSpeed method on added events", (done) ->
MOCK_event =
getSpeed: ->
done()
return 50
speed.add MOCK_event, NO_REPEAT
it "should add the item to the backing queue", (done) ->
MOCK_event =
getSpeed: -> 50
speed._queue.add = -> done()
speed.add MOCK_event, NO_REPEAT
describe "next", ->
it "should return the next item from the backing queue", ->
MOCK_event =
getSpeed: -> 50
speed.add MOCK_event, NO_REPEAT
event = speed.next()
event.should.equal MOCK_event
it "should return repeating events to the queue", (done) ->
MOCK_event1 =
getSpeed: -> 50
MOCK_event2 =
getSpeed: -> 50
almostDone = _.after 2, done
speed.add MOCK_event1, YES_REPEAT
speed.add MOCK_event2, YES_REPEAT
speed._queue.add = -> almostDone()
event = speed.next()
event.should.equal MOCK_event1
event = speed.next()
event.should.equal MOCK_event2
speed.next()
it "should respect the speed of the actors", ->
littleMac =
name: "Mac"
getSpeed: -> 100
glassJoe =
name: "Joe"
getSpeed: -> 25
speed.add littleMac, YES_REPEAT
speed.add glassJoe, YES_REPEAT
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
for i in [0..100]
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
#----------------------------------------------------------------------------
# end of schedulerSpeedTest.coffee
| 181534 | # schedulerSpeedTest.coffee
#----------------------------------------------------------------------------
_ = require "underscore"
should = require "should"
ROT = require "../../lib/rot"
NO_REPEAT = false
YES_REPEAT = true
describe "scheduler", ->
it "should export ROT.Scheduler.Speed", ->
ROT.should.have.property "Scheduler"
ROT.Scheduler.should.have.property "Speed"
it "should be possible to create a Speed", ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
describe "Speed", ->
speed = null
beforeEach ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
it "should extend Scheduler", ->
speed.should.be.an.instanceof ROT.Scheduler
speed.should.be.an.instanceof ROT.Scheduler.Speed
describe "add", ->
it "should call the getSpeed method on added events", (done) ->
MOCK_event =
getSpeed: ->
done()
return 50
speed.add MOCK_event, NO_REPEAT
it "should add the item to the backing queue", (done) ->
MOCK_event =
getSpeed: -> 50
speed._queue.add = -> done()
speed.add MOCK_event, NO_REPEAT
describe "next", ->
it "should return the next item from the backing queue", ->
MOCK_event =
getSpeed: -> 50
speed.add MOCK_event, NO_REPEAT
event = speed.next()
event.should.equal MOCK_event
it "should return repeating events to the queue", (done) ->
MOCK_event1 =
getSpeed: -> 50
MOCK_event2 =
getSpeed: -> 50
almostDone = _.after 2, done
speed.add MOCK_event1, YES_REPEAT
speed.add MOCK_event2, YES_REPEAT
speed._queue.add = -> almostDone()
event = speed.next()
event.should.equal MOCK_event1
event = speed.next()
event.should.equal MOCK_event2
speed.next()
it "should respect the speed of the actors", ->
littleMac =
name: "Mac"
getSpeed: -> 100
glassJoe =
name: "<NAME>"
getSpeed: -> 25
speed.add littleMac, YES_REPEAT
speed.add glassJoe, YES_REPEAT
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
for i in [0..100]
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
#----------------------------------------------------------------------------
# end of schedulerSpeedTest.coffee
| true | # schedulerSpeedTest.coffee
#----------------------------------------------------------------------------
_ = require "underscore"
should = require "should"
ROT = require "../../lib/rot"
NO_REPEAT = false
YES_REPEAT = true
describe "scheduler", ->
it "should export ROT.Scheduler.Speed", ->
ROT.should.have.property "Scheduler"
ROT.Scheduler.should.have.property "Speed"
it "should be possible to create a Speed", ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
describe "Speed", ->
speed = null
beforeEach ->
speed = new ROT.Scheduler.Speed()
speed.should.be.ok
it "should extend Scheduler", ->
speed.should.be.an.instanceof ROT.Scheduler
speed.should.be.an.instanceof ROT.Scheduler.Speed
describe "add", ->
it "should call the getSpeed method on added events", (done) ->
MOCK_event =
getSpeed: ->
done()
return 50
speed.add MOCK_event, NO_REPEAT
it "should add the item to the backing queue", (done) ->
MOCK_event =
getSpeed: -> 50
speed._queue.add = -> done()
speed.add MOCK_event, NO_REPEAT
describe "next", ->
it "should return the next item from the backing queue", ->
MOCK_event =
getSpeed: -> 50
speed.add MOCK_event, NO_REPEAT
event = speed.next()
event.should.equal MOCK_event
it "should return repeating events to the queue", (done) ->
MOCK_event1 =
getSpeed: -> 50
MOCK_event2 =
getSpeed: -> 50
almostDone = _.after 2, done
speed.add MOCK_event1, YES_REPEAT
speed.add MOCK_event2, YES_REPEAT
speed._queue.add = -> almostDone()
event = speed.next()
event.should.equal MOCK_event1
event = speed.next()
event.should.equal MOCK_event2
speed.next()
it "should respect the speed of the actors", ->
littleMac =
name: "Mac"
getSpeed: -> 100
glassJoe =
name: "PI:NAME:<NAME>END_PI"
getSpeed: -> 25
speed.add littleMac, YES_REPEAT
speed.add glassJoe, YES_REPEAT
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
for i in [0..100]
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql littleMac
speed.next().should.eql glassJoe
#----------------------------------------------------------------------------
# end of schedulerSpeedTest.coffee
|
[
{
"context": "\"\n description: \"description\"\n author: \"me <em@i.l> (http://url)\"\n scripts:\n tes",
"end": 328,
"score": 0.7809420228004456,
"start": 326,
"tag": "USERNAME",
"value": "me"
},
{
"context": " description: \"description\"\n author: \"m... | deps/npm/node_modules/init-package-json/test/basic.coffee | lxe/io.coffee | 0 | tap = require("tap")
init = require("../")
rimraf = require("rimraf")
tap.test "the basics", (t) ->
i = __dirname + "/basic.input"
dir = __dirname
init dir, i,
foo: "bar"
, (er, data) ->
throw er if er
expect =
name: "the-name"
version: "1.2.5"
description: "description"
author: "me <em@i.l> (http://url)"
scripts:
test: "make test"
main: "main.js"
config:
foo: "bar"
package: {}
t.same data, expect
t.end()
return
setTimeout (->
process.stdin.emit "data", "the-name\n"
return
), 50
setTimeout (->
process.stdin.emit "data", "description\n"
return
), 100
setTimeout (->
process.stdin.emit "data", "yes\n"
return
), 150
return
tap.test "teardown", (t) ->
rimraf __dirname + "/package.json", t.end.bind(t)
return
| 73629 | tap = require("tap")
init = require("../")
rimraf = require("rimraf")
tap.test "the basics", (t) ->
i = __dirname + "/basic.input"
dir = __dirname
init dir, i,
foo: "bar"
, (er, data) ->
throw er if er
expect =
name: "the-name"
version: "1.2.5"
description: "description"
author: "me <<EMAIL>> (http://url)"
scripts:
test: "make test"
main: "main.js"
config:
foo: "bar"
package: {}
t.same data, expect
t.end()
return
setTimeout (->
process.stdin.emit "data", "the-name\n"
return
), 50
setTimeout (->
process.stdin.emit "data", "description\n"
return
), 100
setTimeout (->
process.stdin.emit "data", "yes\n"
return
), 150
return
tap.test "teardown", (t) ->
rimraf __dirname + "/package.json", t.end.bind(t)
return
| true | tap = require("tap")
init = require("../")
rimraf = require("rimraf")
tap.test "the basics", (t) ->
i = __dirname + "/basic.input"
dir = __dirname
init dir, i,
foo: "bar"
, (er, data) ->
throw er if er
expect =
name: "the-name"
version: "1.2.5"
description: "description"
author: "me <PI:EMAIL:<EMAIL>END_PI> (http://url)"
scripts:
test: "make test"
main: "main.js"
config:
foo: "bar"
package: {}
t.same data, expect
t.end()
return
setTimeout (->
process.stdin.emit "data", "the-name\n"
return
), 50
setTimeout (->
process.stdin.emit "data", "description\n"
return
), 100
setTimeout (->
process.stdin.emit "data", "yes\n"
return
), 150
return
tap.test "teardown", (t) ->
rimraf __dirname + "/package.json", t.end.bind(t)
return
|
[
{
"context": " next(null, null) if not user?\n\t\t\t\tuser.password = req.body.newPassword\n\t\t\t\tuser.save cb\n\t\t], (err, user) ->\n\t\t\treturn ne",
"end": 502,
"score": 0.99615478515625,
"start": 482,
"tag": "PASSWORD",
"value": "req.body.newPassword"
}
] | backend/app/controllers/user.coffee | bafcar/bafcar | 0 | # User controller
async = require 'async'
User = require '../models/user'
module.exports =
create: (req, res, next) ->
user = User.create req.body, (err, user) ->
return next(err) if err?
res.status(201).json user
current: (req, res) -> res.json req.user
updatePassword: (req, res, next) ->
async.waterfall [
(cb) -> User.authenticate req.user.username, req.body.currentPassword, cb
(user, cb) ->
return next(null, null) if not user?
user.password = req.body.newPassword
user.save cb
], (err, user) ->
return next(err) if err?
res.json user | 64927 | # User controller
async = require 'async'
User = require '../models/user'
module.exports =
create: (req, res, next) ->
user = User.create req.body, (err, user) ->
return next(err) if err?
res.status(201).json user
current: (req, res) -> res.json req.user
updatePassword: (req, res, next) ->
async.waterfall [
(cb) -> User.authenticate req.user.username, req.body.currentPassword, cb
(user, cb) ->
return next(null, null) if not user?
user.password = <PASSWORD>
user.save cb
], (err, user) ->
return next(err) if err?
res.json user | true | # User controller
async = require 'async'
User = require '../models/user'
module.exports =
create: (req, res, next) ->
user = User.create req.body, (err, user) ->
return next(err) if err?
res.status(201).json user
current: (req, res) -> res.json req.user
updatePassword: (req, res, next) ->
async.waterfall [
(cb) -> User.authenticate req.user.username, req.body.currentPassword, cb
(user, cb) ->
return next(null, null) if not user?
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.save cb
], (err, user) ->
return next(err) if err?
res.json user |
[
{
"context": " * @var {String}\n ###\n COMPOSER_COMMIT: 'd0310b646229c3dc57b71bfea2f14ed6c560a5bd'\n\n ###*\n * @var {String}\n ###\n phpBi",
"end": 518,
"score": 0.9660202264785767,
"start": 478,
"tag": "PASSWORD",
"value": "d0310b646229c3dc57b71bfea2f14ed6c560a5bd"
... | lib/ComposerService.coffee | php-integrator/atom-base-legacy-php56 | 0 | fs = require 'fs'
path = require 'path'
download = require 'download'
child_process = require 'child_process'
module.exports =
##*
# Handles usage of Composer (PHP package manager).
##
class ComposerService
###*
* The commit to download from the Composer repository.
*
* Currently set to version 1.2.4.
*
* @see https://getcomposer.org/doc/faqs/how-to-install-composer-programmatically.md
*
* @var {String}
###
COMPOSER_COMMIT: 'd0310b646229c3dc57b71bfea2f14ed6c560a5bd'
###*
* @var {String}
###
phpBinary: null
###*
* @var {String}
###
folder: null
###*
* @param {String} phpBinary
* @param {String} folder
###
constructor: (@phpBinary, @folder) ->
###*
* @param {Array} parameters
* @param {String|null} workingDirectory
*
* @return {Promise}
###
run: (parameters, workingDirectory = null) ->
return @installIfNecessary().then () =>
options = {}
if workingDirectory?
options.cwd = workingDirectory
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, [@getPath()].concat(parameters), options)
process.stdout.on 'data', (data) =>
console.debug('Composer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
installIfNecessary: () ->
if @isInstalled()
return new Promise (resolve, reject) ->
resolve()
return @install()
###*
* @param {Boolean}
###
isInstalled: () ->
return true if fs.existsSync(@getPath())
###*
* @return {Promise}
###
install: () ->
@download().then () =>
parameters = [
@getInstallerFileFilePath(),
'--install-dir=' + @folder + '',
'--filename=' + @getFileName()
]
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, parameters)
process.stdout.on 'data', (data) =>
console.debug('Composer installer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer installer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer installer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
download: () ->
return download(
'https://raw.githubusercontent.com/composer/getcomposer.org/' + @COMPOSER_COMMIT + '/web/installer',
@getInstallerFilePath()
)
###*
* @return {String}
###
getInstallerFilePath: () ->
return @folder
###*
* @return {String}
###
getInstallerFileFileName: () ->
return 'installer'
###*
* @return {String}
###
getInstallerFileFilePath: () ->
return path.join(@getInstallerFilePath(), @getInstallerFileFileName())
###*
* @return {String}
###
getPath: () ->
return path.join(@folder, @getFileName())
###*
* @return {String}
###
getFileName: () ->
return 'composer.phar'
| 192135 | fs = require 'fs'
path = require 'path'
download = require 'download'
child_process = require 'child_process'
module.exports =
##*
# Handles usage of Composer (PHP package manager).
##
class ComposerService
###*
* The commit to download from the Composer repository.
*
* Currently set to version 1.2.4.
*
* @see https://getcomposer.org/doc/faqs/how-to-install-composer-programmatically.md
*
* @var {String}
###
COMPOSER_COMMIT: '<PASSWORD>'
###*
* @var {String}
###
phpBinary: null
###*
* @var {String}
###
folder: null
###*
* @param {String} phpBinary
* @param {String} folder
###
constructor: (@phpBinary, @folder) ->
###*
* @param {Array} parameters
* @param {String|null} workingDirectory
*
* @return {Promise}
###
run: (parameters, workingDirectory = null) ->
return @installIfNecessary().then () =>
options = {}
if workingDirectory?
options.cwd = workingDirectory
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, [@getPath()].concat(parameters), options)
process.stdout.on 'data', (data) =>
console.debug('Composer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
installIfNecessary: () ->
if @isInstalled()
return new Promise (resolve, reject) ->
resolve()
return @install()
###*
* @param {Boolean}
###
isInstalled: () ->
return true if fs.existsSync(@getPath())
###*
* @return {Promise}
###
install: () ->
@download().then () =>
parameters = [
@getInstallerFileFilePath(),
'--install-dir=' + @folder + '',
'--filename=' + @getFileName()
]
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, parameters)
process.stdout.on 'data', (data) =>
console.debug('Composer installer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer installer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer installer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
download: () ->
return download(
'https://raw.githubusercontent.com/composer/getcomposer.org/' + @COMPOSER_COMMIT + '/web/installer',
@getInstallerFilePath()
)
###*
* @return {String}
###
getInstallerFilePath: () ->
return @folder
###*
* @return {String}
###
getInstallerFileFileName: () ->
return 'installer'
###*
* @return {String}
###
getInstallerFileFilePath: () ->
return path.join(@getInstallerFilePath(), @getInstallerFileFileName())
###*
* @return {String}
###
getPath: () ->
return path.join(@folder, @getFileName())
###*
* @return {String}
###
getFileName: () ->
return 'composer.phar'
| true | fs = require 'fs'
path = require 'path'
download = require 'download'
child_process = require 'child_process'
module.exports =
##*
# Handles usage of Composer (PHP package manager).
##
class ComposerService
###*
* The commit to download from the Composer repository.
*
* Currently set to version 1.2.4.
*
* @see https://getcomposer.org/doc/faqs/how-to-install-composer-programmatically.md
*
* @var {String}
###
COMPOSER_COMMIT: 'PI:PASSWORD:<PASSWORD>END_PI'
###*
* @var {String}
###
phpBinary: null
###*
* @var {String}
###
folder: null
###*
* @param {String} phpBinary
* @param {String} folder
###
constructor: (@phpBinary, @folder) ->
###*
* @param {Array} parameters
* @param {String|null} workingDirectory
*
* @return {Promise}
###
run: (parameters, workingDirectory = null) ->
return @installIfNecessary().then () =>
options = {}
if workingDirectory?
options.cwd = workingDirectory
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, [@getPath()].concat(parameters), options)
process.stdout.on 'data', (data) =>
console.debug('Composer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
installIfNecessary: () ->
if @isInstalled()
return new Promise (resolve, reject) ->
resolve()
return @install()
###*
* @param {Boolean}
###
isInstalled: () ->
return true if fs.existsSync(@getPath())
###*
* @return {Promise}
###
install: () ->
@download().then () =>
parameters = [
@getInstallerFileFilePath(),
'--install-dir=' + @folder + '',
'--filename=' + @getFileName()
]
return new Promise (resolve, reject) =>
process = child_process.spawn(@phpBinary, parameters)
process.stdout.on 'data', (data) =>
console.debug('Composer installer has something to say:', data.toString())
process.stderr.on 'data', (data) =>
console.warn('Composer installer has errors to report:', data.toString())
process.on 'close', (code) =>
console.debug('Composer installer exited with status code:', code)
if code != 0
reject()
else
resolve()
###*
* @return {Promise}
###
download: () ->
return download(
'https://raw.githubusercontent.com/composer/getcomposer.org/' + @COMPOSER_COMMIT + '/web/installer',
@getInstallerFilePath()
)
###*
* @return {String}
###
getInstallerFilePath: () ->
return @folder
###*
* @return {String}
###
getInstallerFileFileName: () ->
return 'installer'
###*
* @return {String}
###
getInstallerFileFilePath: () ->
return path.join(@getInstallerFilePath(), @getInstallerFileFileName())
###*
* @return {String}
###
getPath: () ->
return path.join(@folder, @getFileName())
###*
* @return {String}
###
getFileName: () ->
return 'composer.phar'
|
[
{
"context": "pe = null\n $state = null\n url = null\n email = 'test@guc.edu.eg'\n beforeEach inject (_$controller_, _$httpBacken",
"end": 188,
"score": 0.9999313354492188,
"start": 173,
"tag": "EMAIL",
"value": "test@guc.edu.eg"
}
] | spec/controllers/send_verify_spec.coffee | ah450/guclink-auth-www | 0 | describe 'SendVerifyController', ->
beforeEach module 'guclinkAuth'
$controller = null
$httpBackend = null
$rootScope = null
$state = null
url = null
email = 'test@guc.edu.eg'
beforeEach inject (_$controller_, _$httpBackend_, _$rootScope_, _$state_) ->
$controller = _$controller_
$httpBackend = _$httpBackend_
$state = _$state_
$rootScope = _$rootScope_
url = ['http://localhost:3000/api', 'users',
encodeURIComponent(btoa(email)),
'resend_verify.json'].join '/'
beforeEach ->
$httpBackend.when('GET', 'http://localhost:3000/api/configurations.json')
.respond({
default_token_exp: 30,
pass_reset_resend_delay: 120,
pass_reset_expiration: 240,
user_verification_resend_delay: 120
})
it 'sets processing to true and done to false', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
expect($scope.processing).to.be.true
expect($scope.done).to.be.false
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'sets correct error on 404', ->
$httpBackend.when('GET', url)
.respond 404
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('User does not exist')
it 'sets correct error on 422', ->
$httpBackend.when('GET', url)
.respond 422, {message: 'error'}
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('error')
it 'sets correct error on 420', ->
$httpBackend.when('GET', url)
.respond 420
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('Must wait 2 minutes between requests')
it 'transitions to internal_error state otherwise', ->
$httpBackend.when('GET', url)
.respond 500
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
$rootScope.$apply()
expect($state.current.name).to.eql('public.internal_error')
it 'sets processing to false and done to true on success', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.true
| 32220 | describe 'SendVerifyController', ->
beforeEach module 'guclinkAuth'
$controller = null
$httpBackend = null
$rootScope = null
$state = null
url = null
email = '<EMAIL>'
beforeEach inject (_$controller_, _$httpBackend_, _$rootScope_, _$state_) ->
$controller = _$controller_
$httpBackend = _$httpBackend_
$state = _$state_
$rootScope = _$rootScope_
url = ['http://localhost:3000/api', 'users',
encodeURIComponent(btoa(email)),
'resend_verify.json'].join '/'
beforeEach ->
$httpBackend.when('GET', 'http://localhost:3000/api/configurations.json')
.respond({
default_token_exp: 30,
pass_reset_resend_delay: 120,
pass_reset_expiration: 240,
user_verification_resend_delay: 120
})
it 'sets processing to true and done to false', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
expect($scope.processing).to.be.true
expect($scope.done).to.be.false
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'sets correct error on 404', ->
$httpBackend.when('GET', url)
.respond 404
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('User does not exist')
it 'sets correct error on 422', ->
$httpBackend.when('GET', url)
.respond 422, {message: 'error'}
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('error')
it 'sets correct error on 420', ->
$httpBackend.when('GET', url)
.respond 420
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('Must wait 2 minutes between requests')
it 'transitions to internal_error state otherwise', ->
$httpBackend.when('GET', url)
.respond 500
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
$rootScope.$apply()
expect($state.current.name).to.eql('public.internal_error')
it 'sets processing to false and done to true on success', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.true
| true | describe 'SendVerifyController', ->
beforeEach module 'guclinkAuth'
$controller = null
$httpBackend = null
$rootScope = null
$state = null
url = null
email = 'PI:EMAIL:<EMAIL>END_PI'
beforeEach inject (_$controller_, _$httpBackend_, _$rootScope_, _$state_) ->
$controller = _$controller_
$httpBackend = _$httpBackend_
$state = _$state_
$rootScope = _$rootScope_
url = ['http://localhost:3000/api', 'users',
encodeURIComponent(btoa(email)),
'resend_verify.json'].join '/'
beforeEach ->
$httpBackend.when('GET', 'http://localhost:3000/api/configurations.json')
.respond({
default_token_exp: 30,
pass_reset_resend_delay: 120,
pass_reset_expiration: 240,
user_verification_resend_delay: 120
})
it 'sets processing to true and done to false', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
expect($scope.processing).to.be.true
expect($scope.done).to.be.false
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'sets correct error on 404', ->
$httpBackend.when('GET', url)
.respond 404
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('User does not exist')
it 'sets correct error on 422', ->
$httpBackend.when('GET', url)
.respond 422, {message: 'error'}
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('error')
it 'sets correct error on 420', ->
$httpBackend.when('GET', url)
.respond 420
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.false
expect($scope.error).to.eql('Must wait 2 minutes between requests')
it 'transitions to internal_error state otherwise', ->
$httpBackend.when('GET', url)
.respond 500
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
$rootScope.$apply()
expect($state.current.name).to.eql('public.internal_error')
it 'sets processing to false and done to true on success', ->
$httpBackend.when('GET', url)
.respond 200
$scope = {}
controller = $controller 'SendVerifyController', {$scope: $scope}
$scope.userData.email = email
$scope.verify()
$httpBackend.flush()
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
expect($scope.processing).to.be.false
expect($scope.done).to.be.true
|
[
{
"context": "###\n# test/karma.conf.coffee\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# Configure karm",
"end": 51,
"score": 0.9994246363639832,
"start": 40,
"tag": "NAME",
"value": "Dan Nichols"
}
] | test/karma.conf.coffee | dlnichols/h_media | 0 | ###
# test/karma.conf.coffee
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# Configure karma
###
'use strict'
module.exports = (config) ->
config.set
basePath: '..'
frameworks: [ 'mocha', 'chai', 'sinon' ]
files: [
'lib/**/*.coffee'
'test/**/*.coffee'
]
exclude: []
preprocessors: {
'**/*.coffee': ['coffee']
}
reporters: [
'progress'
]
port: 9876
colors: true
# level of logging - possible: DISABLE,ERROR,WARN,INFO,DEBUG
logLevel: config.LOG_INFO
autoWatch: true
browsers: [ 'PhantomJS' ]
captureTimeout: 6000
singleRun: false
| 116566 | ###
# test/karma.conf.coffee
#
# © 2014 <NAME>
# See LICENSE for more details
#
# Configure karma
###
'use strict'
module.exports = (config) ->
config.set
basePath: '..'
frameworks: [ 'mocha', 'chai', 'sinon' ]
files: [
'lib/**/*.coffee'
'test/**/*.coffee'
]
exclude: []
preprocessors: {
'**/*.coffee': ['coffee']
}
reporters: [
'progress'
]
port: 9876
colors: true
# level of logging - possible: DISABLE,ERROR,WARN,INFO,DEBUG
logLevel: config.LOG_INFO
autoWatch: true
browsers: [ 'PhantomJS' ]
captureTimeout: 6000
singleRun: false
| true | ###
# test/karma.conf.coffee
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# Configure karma
###
'use strict'
module.exports = (config) ->
config.set
basePath: '..'
frameworks: [ 'mocha', 'chai', 'sinon' ]
files: [
'lib/**/*.coffee'
'test/**/*.coffee'
]
exclude: []
preprocessors: {
'**/*.coffee': ['coffee']
}
reporters: [
'progress'
]
port: 9876
colors: true
# level of logging - possible: DISABLE,ERROR,WARN,INFO,DEBUG
logLevel: config.LOG_INFO
autoWatch: true
browsers: [ 'PhantomJS' ]
captureTimeout: 6000
singleRun: false
|
[
{
"context": "ckage_json.version}\"\n contactEmail: 'npmjs@commercetools.com'\n httpConfig:\n host: prog",
"end": 7527,
"score": 0.9999301433563232,
"start": 7504,
"tag": "EMAIL",
"value": "npmjs@commercetools.com"
},
{
"context": "ckage_json.vers... | src/coffee/run.coffee | daern91/sphere-node-product-csv-sync | 0 | _ = require 'underscore'
program = require 'commander'
prompt = require 'prompt'
Csv = require 'csv'
Promise = require 'bluebird'
fs = Promise.promisifyAll require('fs')
{ProjectCredentialsConfig} = require 'sphere-node-utils'
Importer = require './import'
Exporter = require './export'
CONS = require './constants'
GLOBALS = require './globals'
package_json = require '../package.json'
module.exports = class
@_list: (val) -> _.map val.split(','), (v) -> v.trim()
@_getFilterFunction: (opts) ->
new Promise (resolve, reject) ->
if opts.csv
fs.readFileAsync opts.csv, 'utf8'
.catch (err) ->
console.error "Problems on reading identity file '#{opts.csv}': #{err}"
process.exit 2
.then (content) ->
Csv().from.string(content)
.to.array (data, count) ->
identHeader = data[0][0]
if identHeader is CONS.HEADER_ID
productIds = _.flatten _.rest data
f = (product) -> _.contains productIds, product.id
resolve f
else if identHeader is CONS.HEADER_SKU
skus = _.flatten _.rest data
f = (product) ->
product.variants or= []
variants = [product.masterVariant].concat(product.variants)
v = _.find variants, (variant) ->
_.contains skus, variant.sku
v?
resolve f
else
reject "CSV does not fit! You only need one column - either '#{CONS.HEADER_ID}' or '#{CONS.HEADER_SKU}'."
# TODO: you may define a custom attribute to filter on
# customAttributeName = ''
# customAttributeType = ''
# customAttributeValues = []
# filterFunction = (product) ->
# product.variants or= []
# variants = [product.masterVariant].concat(product.variants)
# _.find variants, (variant) ->
# variant.attributes or= []
# _.find variant.attributes, (attribute) ->
# attribute.name is customAttributeName and
# # TODO: pass function for getValueOfType
# value = switch customAttributeType
# when CONS.ATTRIBUTE_ENUM, CONS.ATTRIBUTE_LENUM then attribute.value.key
# else attribute.value
# _.contains customAttributeValues, value
else
f = (product) -> true
resolve f
@_ensureCredentials: (argv) ->
if argv.accessToken
Promise.resolve
projectKey: argv.projectKey
accessToken: argv.accessToken
else if argv.clientId and argv.clientSecret
Promise.resolve
projectKey: argv.projectKey
credentials:
clientId: argv.clientId
clientSecret: argv.clientSecret
else
ProjectCredentialsConfig.create()
.then (credentials) ->
{ project_key, client_id, client_secret } = credentials.enrichCredentials
project_key: argv.projectKey
client_id: argv.clientId
client_secret: argv.clientSecret
Promise.resolve
projectKey: project_key
credentials:
clientId: client_id
clientSecret: client_secret
@run: (argv) ->
_subCommandHelp = (cmd) ->
program.emit(cmd, null, ['--help'])
process.exit 1
program
.version package_json.version
.usage '[globals] [sub-command] [options]'
.option '-p, --projectKey <key>', 'your SPHERE.IO project-key'
.option '-i, --clientId <id>', 'your OAuth client id for the SPHERE.IO API'
.option '-s, --clientSecret <secret>', 'your OAuth client secret for the SPHERE.IO API'
.option '--accessToken <token>', 'an OAuth access token for the SPHERE.IO API, used instead of clientId and clientSecret'
.option '--sphereHost <host>', 'SPHERE.IO API host to connect to', 'https://api.commercetools.com'
.option '--sphereAuthHost <host>', 'SPHERE.IO OAuth host to connect to', 'https://auth.commercetools.com'
.option '--timeout [millis]', 'Set timeout for requests (default is 300000)', parseInt, 300000
.option '--verbose', 'give more feedback during action'
.option '--debug', 'give as many feedback as possible'
# TODO: validate required options
program
.command 'import'
.description 'Import your products from CSV into your SPHERE.IO project.'
.option '-c, --csv <file>', 'CSV file containing products to import (alias for "in" parameter)'
# add alias for csv parameter and "-i" is taken so use "-f" parameter
.option '-f, --in <file>', 'File containing products to import'
.option '-z, --zip', 'Input file is archived'
.option '-x, --xlsx', 'Import from XLSX format'
.option '-l, --language [lang]', 'Default language to using during import (for slug generation, category linking etc. - default is en)', 'en'
.option '--csvDelimiter [delim]', 'CSV Delimiter that separates the cells (default is comma - ",")', ','
.option '--multiValueDelimiter [delim]', 'Delimiter to separate values inside of a cell (default is semicolon - ";")', ';'
.option '--customAttributesForCreationOnly <items>', 'List of comma-separated attributes to use when creating products (ignore when updating)', @_list
.option '--continueOnProblems', 'When a product does not validate on the server side (400er response), ignore it and continue with the next products'
.option '--suppressMissingHeaderWarning', 'Do not show which headers are missing per produt type.'
.option '--allowRemovalOfVariants', 'If given variants will be removed if there is no corresponding row in the CSV. Otherwise they are not touched.'
.option '--mergeCategoryOrderHints', 'Merge category order hints instead of replacing them'
.option '--publish', 'When given, all changes will be published immediately'
.option '--updatesOnly', "Won't create any new products, only updates existing"
.option '--dryRun', 'Will list all action that would be triggered, but will not POST them to SPHERE.IO'
.option '-m, --matchBy [value]', 'Product attribute name which will be used to match products. Possible values: id, slug, sku, <custom_attribute_name>. Default: id. Localized attribute types are not supported for <custom_attribute_name> option', 'id'
.option '-e, --encoding [encoding]', 'Encoding used when reading data from input file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --csv <file>'
.action (opts) =>
GLOBALS.DEFAULT_LANGUAGE = opts.language
GLOBALS.DELIM_MULTI_VALUE = opts.multiValueDelimiter ? GLOBALS.DELIM_MULTI_VALUE
return _subCommandHelp('import') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
timeout: program.timeout
show_progress: true
user_agent: "#{package_json.name} - Import - #{package_json.version}"
csvDelimiter: opts.csvDelimiter
encoding: opts.encoding
importFormat: if opts.xlsx then 'xlsx' else 'csv'
debug: Boolean(opts.parent.debug)
mergeCategoryOrderHints: Boolean(opts.mergeCategoryOrderHints)
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: 'npmjs@commercetools.com'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
options.continueOnProblems = opts.continueOnProblems or false
# if program.verbose
# options.logConfig.streams = [
# {level: 'info', stream: process.stdout}
# ]
# if program.debug
# options.logConfig.streams = [
# {level: 'debug', stream: process.stdout}
# ]
importer = new Importer options
importer.blackListedCustomAttributesForUpdate = opts.customAttributesForCreationOnly or []
importer.suppressMissingHeaderWarning = opts.suppressMissingHeaderWarning
importer.allowRemovalOfVariants = opts.allowRemovalOfVariants
importer.publishProducts = opts.publish
importer.updatesOnly = true if opts.updatesOnly
importer.dryRun = true if opts.dryRun
importer.matchBy = opts.matchBy
# params: importManager (filePath, isArchived)
importer.importManager opts.in || opts.csv, opts.zip
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading file '#{opts.csv}': #{err}"
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('import')
program
.command 'state'
.description 'Allows to publish, unpublish or delete (all) products of your SPHERE.IO project.'
.option '--changeTo <publish,unpublish,delete>', 'publish unpublished products / unpublish published products / delete unpublished products'
.option '--csv <file>', 'processes products defined in a CSV file by either "sku" or "id". Otherwise all products are processed.'
.option '--continueOnProblems', 'When a there is a problem on changing a product\'s state (400er response), ignore it and continue with the next products'
.option '--forceDelete', 'whether to force deletion without asking for confirmation', false
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --changeTo <state>'
.action (opts) =>
return _subCommandHelp('state') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) =>
options =
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - State"
libraryVersion: "#{package_json.version}"
contactEmail: 'npmjs@commercetools.com'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
remove = opts.changeTo is 'delete'
publish = switch opts.changeTo
when 'publish','delete' then true
when 'unpublish' then false
else
console.error "Unknown argument '#{opts.changeTo}' for option changeTo!"
process.exit 3
run = =>
@_getFilterFunction(opts)
.then (filterFunction) ->
importer = new Importer options
importer.continueOnProblems = opts.continueOnProblems
importer.changeState(publish, remove, filterFunction)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
if remove
if opts.forceDelete
run options
else
prompt.start()
property =
name: 'ask'
message: 'Do you really want to delete products?'
validator: /y[es]*|n[o]?/
warning: 'Please answer with yes or no'
default: 'no'
prompt.get property, (err, result) ->
if _.isString(result.ask) and result.ask.match(/y(es){0,1}/i)
run options
else
console.error 'Aborted.'
process.exit 9
else
run options
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('state')
program
.command 'export'
.description 'Export your products from your SPHERE.IO project to CSV using.'
.option '-t, --template <file>', 'CSV file containing your header that defines what you want to export'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '-x, --xlsx', 'Export in XLSX format'
.option '-f, --fullExport', 'Do a full export.'
.option '-q, --queryString <query>', 'Query string to specify the sub-set of products to export'
.option '-l, --language [lang]', 'Language used on export for localised attributes (except lenums) and category names (default is en)'
.option '--queryEncoded', 'Whether the given query string is already encoded or not', false
.option '--current', 'Will export current product version instead of staged one', false
.option '--fillAllRows', 'When given product attributes like name will be added to each variant row.'
.option '--categoryBy <name>', 'Define which identifier should be used for the categories column - either slug or externalId. If nothing given the named path is used.'
.option '--categoryOrderHintBy <name>', 'Define which identifier should be used for the categoryOrderHints column - either id or externalId. If nothing given the category id is used.', 'id'
.option '--filterVariantsByAttributes <query>', 'Query string to filter variants of products'
.option '--filterPrices <query>', 'Query string to filter prices of products'
.option '--templateDelimiter <delimiter>', 'Delimiter used in template | default: ,', ","
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-e, --encoding [encoding]', 'Encoding used when saving data to output file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --template <file> --out <file>'
.action (opts) =>
if opts.language
GLOBALS.DEFAULT_LANGUAGE = opts.language
return _subCommandHelp('export') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
encoding: opts.encoding
exportFormat: if opts.xlsx then 'xlsx' else 'csv'
outputDelimiter: opts.outputDelimiter
templateDelimiter: opts.templateDelimiter
fillAllRows: opts.fillAllRows
categoryBy: opts.categoryBy
categoryOrderHintBy: opts.categoryOrderHintBy || 'id'
debug: Boolean(opts.parent.debug)
export:
show_progress: true
queryString: opts.queryString
isQueryEncoded: opts.queryEncoded or false
filterVariantsByAttributes: opts.filterVariantsByAttributes
filterPrices: opts.filterPrices
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: 'npmjs@commercetools.com'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
(if opts.fullExport then Promise.resolve false
else if opts.template? then fs.readFileAsync opts.template, 'utf8'
else new Promise (resolve) ->
console.warn 'Reading from stdin...'
chunks = []
process.stdin.on 'data', (chunk) -> chunks.push chunk
process.stdin.on 'end', () -> resolve Buffer.concat chunks
)
.then (content) ->
(if content
exporter.exportDefault(content, opts.out, not opts.current)
else
exporter.exportFull(opts.out, not opts.current)
)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading template input: #{err}"
console.error err
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('export')
program
.command 'template'
.description 'Create a template for a product type of your SPHERE.IO project.'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-l, --languages [lang,lang]', 'List of languages to use for template (default is [en])', @_list, ['en']
.option '--all', 'Generates one template for all product types - if not given you will be ask which product type to use'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --out <file>'
.action (opts) =>
return _subCommandHelp('template') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
outputDelimiter: opts.outputDelimiter
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Template"
libraryVersion: "#{package_json.version}"
contactEmail: 'npmjs@commercetools.com'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
exporter.createTemplate(opts.languages, opts.out, opts.all)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('template')
program.parse argv
program.help() if program.args.length is 0
module.exports.run process.argv
| 185515 | _ = require 'underscore'
program = require 'commander'
prompt = require 'prompt'
Csv = require 'csv'
Promise = require 'bluebird'
fs = Promise.promisifyAll require('fs')
{ProjectCredentialsConfig} = require 'sphere-node-utils'
Importer = require './import'
Exporter = require './export'
CONS = require './constants'
GLOBALS = require './globals'
package_json = require '../package.json'
module.exports = class
@_list: (val) -> _.map val.split(','), (v) -> v.trim()
@_getFilterFunction: (opts) ->
new Promise (resolve, reject) ->
if opts.csv
fs.readFileAsync opts.csv, 'utf8'
.catch (err) ->
console.error "Problems on reading identity file '#{opts.csv}': #{err}"
process.exit 2
.then (content) ->
Csv().from.string(content)
.to.array (data, count) ->
identHeader = data[0][0]
if identHeader is CONS.HEADER_ID
productIds = _.flatten _.rest data
f = (product) -> _.contains productIds, product.id
resolve f
else if identHeader is CONS.HEADER_SKU
skus = _.flatten _.rest data
f = (product) ->
product.variants or= []
variants = [product.masterVariant].concat(product.variants)
v = _.find variants, (variant) ->
_.contains skus, variant.sku
v?
resolve f
else
reject "CSV does not fit! You only need one column - either '#{CONS.HEADER_ID}' or '#{CONS.HEADER_SKU}'."
# TODO: you may define a custom attribute to filter on
# customAttributeName = ''
# customAttributeType = ''
# customAttributeValues = []
# filterFunction = (product) ->
# product.variants or= []
# variants = [product.masterVariant].concat(product.variants)
# _.find variants, (variant) ->
# variant.attributes or= []
# _.find variant.attributes, (attribute) ->
# attribute.name is customAttributeName and
# # TODO: pass function for getValueOfType
# value = switch customAttributeType
# when CONS.ATTRIBUTE_ENUM, CONS.ATTRIBUTE_LENUM then attribute.value.key
# else attribute.value
# _.contains customAttributeValues, value
else
f = (product) -> true
resolve f
@_ensureCredentials: (argv) ->
if argv.accessToken
Promise.resolve
projectKey: argv.projectKey
accessToken: argv.accessToken
else if argv.clientId and argv.clientSecret
Promise.resolve
projectKey: argv.projectKey
credentials:
clientId: argv.clientId
clientSecret: argv.clientSecret
else
ProjectCredentialsConfig.create()
.then (credentials) ->
{ project_key, client_id, client_secret } = credentials.enrichCredentials
project_key: argv.projectKey
client_id: argv.clientId
client_secret: argv.clientSecret
Promise.resolve
projectKey: project_key
credentials:
clientId: client_id
clientSecret: client_secret
@run: (argv) ->
_subCommandHelp = (cmd) ->
program.emit(cmd, null, ['--help'])
process.exit 1
program
.version package_json.version
.usage '[globals] [sub-command] [options]'
.option '-p, --projectKey <key>', 'your SPHERE.IO project-key'
.option '-i, --clientId <id>', 'your OAuth client id for the SPHERE.IO API'
.option '-s, --clientSecret <secret>', 'your OAuth client secret for the SPHERE.IO API'
.option '--accessToken <token>', 'an OAuth access token for the SPHERE.IO API, used instead of clientId and clientSecret'
.option '--sphereHost <host>', 'SPHERE.IO API host to connect to', 'https://api.commercetools.com'
.option '--sphereAuthHost <host>', 'SPHERE.IO OAuth host to connect to', 'https://auth.commercetools.com'
.option '--timeout [millis]', 'Set timeout for requests (default is 300000)', parseInt, 300000
.option '--verbose', 'give more feedback during action'
.option '--debug', 'give as many feedback as possible'
# TODO: validate required options
program
.command 'import'
.description 'Import your products from CSV into your SPHERE.IO project.'
.option '-c, --csv <file>', 'CSV file containing products to import (alias for "in" parameter)'
# add alias for csv parameter and "-i" is taken so use "-f" parameter
.option '-f, --in <file>', 'File containing products to import'
.option '-z, --zip', 'Input file is archived'
.option '-x, --xlsx', 'Import from XLSX format'
.option '-l, --language [lang]', 'Default language to using during import (for slug generation, category linking etc. - default is en)', 'en'
.option '--csvDelimiter [delim]', 'CSV Delimiter that separates the cells (default is comma - ",")', ','
.option '--multiValueDelimiter [delim]', 'Delimiter to separate values inside of a cell (default is semicolon - ";")', ';'
.option '--customAttributesForCreationOnly <items>', 'List of comma-separated attributes to use when creating products (ignore when updating)', @_list
.option '--continueOnProblems', 'When a product does not validate on the server side (400er response), ignore it and continue with the next products'
.option '--suppressMissingHeaderWarning', 'Do not show which headers are missing per produt type.'
.option '--allowRemovalOfVariants', 'If given variants will be removed if there is no corresponding row in the CSV. Otherwise they are not touched.'
.option '--mergeCategoryOrderHints', 'Merge category order hints instead of replacing them'
.option '--publish', 'When given, all changes will be published immediately'
.option '--updatesOnly', "Won't create any new products, only updates existing"
.option '--dryRun', 'Will list all action that would be triggered, but will not POST them to SPHERE.IO'
.option '-m, --matchBy [value]', 'Product attribute name which will be used to match products. Possible values: id, slug, sku, <custom_attribute_name>. Default: id. Localized attribute types are not supported for <custom_attribute_name> option', 'id'
.option '-e, --encoding [encoding]', 'Encoding used when reading data from input file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --csv <file>'
.action (opts) =>
GLOBALS.DEFAULT_LANGUAGE = opts.language
GLOBALS.DELIM_MULTI_VALUE = opts.multiValueDelimiter ? GLOBALS.DELIM_MULTI_VALUE
return _subCommandHelp('import') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
timeout: program.timeout
show_progress: true
user_agent: "#{package_json.name} - Import - #{package_json.version}"
csvDelimiter: opts.csvDelimiter
encoding: opts.encoding
importFormat: if opts.xlsx then 'xlsx' else 'csv'
debug: Boolean(opts.parent.debug)
mergeCategoryOrderHints: Boolean(opts.mergeCategoryOrderHints)
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: '<EMAIL>'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
options.continueOnProblems = opts.continueOnProblems or false
# if program.verbose
# options.logConfig.streams = [
# {level: 'info', stream: process.stdout}
# ]
# if program.debug
# options.logConfig.streams = [
# {level: 'debug', stream: process.stdout}
# ]
importer = new Importer options
importer.blackListedCustomAttributesForUpdate = opts.customAttributesForCreationOnly or []
importer.suppressMissingHeaderWarning = opts.suppressMissingHeaderWarning
importer.allowRemovalOfVariants = opts.allowRemovalOfVariants
importer.publishProducts = opts.publish
importer.updatesOnly = true if opts.updatesOnly
importer.dryRun = true if opts.dryRun
importer.matchBy = opts.matchBy
# params: importManager (filePath, isArchived)
importer.importManager opts.in || opts.csv, opts.zip
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading file '#{opts.csv}': #{err}"
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('import')
program
.command 'state'
.description 'Allows to publish, unpublish or delete (all) products of your SPHERE.IO project.'
.option '--changeTo <publish,unpublish,delete>', 'publish unpublished products / unpublish published products / delete unpublished products'
.option '--csv <file>', 'processes products defined in a CSV file by either "sku" or "id". Otherwise all products are processed.'
.option '--continueOnProblems', 'When a there is a problem on changing a product\'s state (400er response), ignore it and continue with the next products'
.option '--forceDelete', 'whether to force deletion without asking for confirmation', false
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --changeTo <state>'
.action (opts) =>
return _subCommandHelp('state') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) =>
options =
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - State"
libraryVersion: "#{package_json.version}"
contactEmail: '<EMAIL>'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
remove = opts.changeTo is 'delete'
publish = switch opts.changeTo
when 'publish','delete' then true
when 'unpublish' then false
else
console.error "Unknown argument '#{opts.changeTo}' for option changeTo!"
process.exit 3
run = =>
@_getFilterFunction(opts)
.then (filterFunction) ->
importer = new Importer options
importer.continueOnProblems = opts.continueOnProblems
importer.changeState(publish, remove, filterFunction)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
if remove
if opts.forceDelete
run options
else
prompt.start()
property =
name: 'ask'
message: 'Do you really want to delete products?'
validator: /y[es]*|n[o]?/
warning: 'Please answer with yes or no'
default: 'no'
prompt.get property, (err, result) ->
if _.isString(result.ask) and result.ask.match(/y(es){0,1}/i)
run options
else
console.error 'Aborted.'
process.exit 9
else
run options
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('state')
program
.command 'export'
.description 'Export your products from your SPHERE.IO project to CSV using.'
.option '-t, --template <file>', 'CSV file containing your header that defines what you want to export'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '-x, --xlsx', 'Export in XLSX format'
.option '-f, --fullExport', 'Do a full export.'
.option '-q, --queryString <query>', 'Query string to specify the sub-set of products to export'
.option '-l, --language [lang]', 'Language used on export for localised attributes (except lenums) and category names (default is en)'
.option '--queryEncoded', 'Whether the given query string is already encoded or not', false
.option '--current', 'Will export current product version instead of staged one', false
.option '--fillAllRows', 'When given product attributes like name will be added to each variant row.'
.option '--categoryBy <name>', 'Define which identifier should be used for the categories column - either slug or externalId. If nothing given the named path is used.'
.option '--categoryOrderHintBy <name>', 'Define which identifier should be used for the categoryOrderHints column - either id or externalId. If nothing given the category id is used.', 'id'
.option '--filterVariantsByAttributes <query>', 'Query string to filter variants of products'
.option '--filterPrices <query>', 'Query string to filter prices of products'
.option '--templateDelimiter <delimiter>', 'Delimiter used in template | default: ,', ","
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-e, --encoding [encoding]', 'Encoding used when saving data to output file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --template <file> --out <file>'
.action (opts) =>
if opts.language
GLOBALS.DEFAULT_LANGUAGE = opts.language
return _subCommandHelp('export') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
encoding: opts.encoding
exportFormat: if opts.xlsx then 'xlsx' else 'csv'
outputDelimiter: opts.outputDelimiter
templateDelimiter: opts.templateDelimiter
fillAllRows: opts.fillAllRows
categoryBy: opts.categoryBy
categoryOrderHintBy: opts.categoryOrderHintBy || 'id'
debug: Boolean(opts.parent.debug)
export:
show_progress: true
queryString: opts.queryString
isQueryEncoded: opts.queryEncoded or false
filterVariantsByAttributes: opts.filterVariantsByAttributes
filterPrices: opts.filterPrices
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: '<EMAIL>'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
(if opts.fullExport then Promise.resolve false
else if opts.template? then fs.readFileAsync opts.template, 'utf8'
else new Promise (resolve) ->
console.warn 'Reading from stdin...'
chunks = []
process.stdin.on 'data', (chunk) -> chunks.push chunk
process.stdin.on 'end', () -> resolve Buffer.concat chunks
)
.then (content) ->
(if content
exporter.exportDefault(content, opts.out, not opts.current)
else
exporter.exportFull(opts.out, not opts.current)
)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading template input: #{err}"
console.error err
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('export')
program
.command 'template'
.description 'Create a template for a product type of your SPHERE.IO project.'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-l, --languages [lang,lang]', 'List of languages to use for template (default is [en])', @_list, ['en']
.option '--all', 'Generates one template for all product types - if not given you will be ask which product type to use'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --out <file>'
.action (opts) =>
return _subCommandHelp('template') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
outputDelimiter: opts.outputDelimiter
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Template"
libraryVersion: "#{package_json.version}"
contactEmail: '<EMAIL>'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
exporter.createTemplate(opts.languages, opts.out, opts.all)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('template')
program.parse argv
program.help() if program.args.length is 0
module.exports.run process.argv
| true | _ = require 'underscore'
program = require 'commander'
prompt = require 'prompt'
Csv = require 'csv'
Promise = require 'bluebird'
fs = Promise.promisifyAll require('fs')
{ProjectCredentialsConfig} = require 'sphere-node-utils'
Importer = require './import'
Exporter = require './export'
CONS = require './constants'
GLOBALS = require './globals'
package_json = require '../package.json'
module.exports = class
@_list: (val) -> _.map val.split(','), (v) -> v.trim()
@_getFilterFunction: (opts) ->
new Promise (resolve, reject) ->
if opts.csv
fs.readFileAsync opts.csv, 'utf8'
.catch (err) ->
console.error "Problems on reading identity file '#{opts.csv}': #{err}"
process.exit 2
.then (content) ->
Csv().from.string(content)
.to.array (data, count) ->
identHeader = data[0][0]
if identHeader is CONS.HEADER_ID
productIds = _.flatten _.rest data
f = (product) -> _.contains productIds, product.id
resolve f
else if identHeader is CONS.HEADER_SKU
skus = _.flatten _.rest data
f = (product) ->
product.variants or= []
variants = [product.masterVariant].concat(product.variants)
v = _.find variants, (variant) ->
_.contains skus, variant.sku
v?
resolve f
else
reject "CSV does not fit! You only need one column - either '#{CONS.HEADER_ID}' or '#{CONS.HEADER_SKU}'."
# TODO: you may define a custom attribute to filter on
# customAttributeName = ''
# customAttributeType = ''
# customAttributeValues = []
# filterFunction = (product) ->
# product.variants or= []
# variants = [product.masterVariant].concat(product.variants)
# _.find variants, (variant) ->
# variant.attributes or= []
# _.find variant.attributes, (attribute) ->
# attribute.name is customAttributeName and
# # TODO: pass function for getValueOfType
# value = switch customAttributeType
# when CONS.ATTRIBUTE_ENUM, CONS.ATTRIBUTE_LENUM then attribute.value.key
# else attribute.value
# _.contains customAttributeValues, value
else
f = (product) -> true
resolve f
@_ensureCredentials: (argv) ->
if argv.accessToken
Promise.resolve
projectKey: argv.projectKey
accessToken: argv.accessToken
else if argv.clientId and argv.clientSecret
Promise.resolve
projectKey: argv.projectKey
credentials:
clientId: argv.clientId
clientSecret: argv.clientSecret
else
ProjectCredentialsConfig.create()
.then (credentials) ->
{ project_key, client_id, client_secret } = credentials.enrichCredentials
project_key: argv.projectKey
client_id: argv.clientId
client_secret: argv.clientSecret
Promise.resolve
projectKey: project_key
credentials:
clientId: client_id
clientSecret: client_secret
@run: (argv) ->
_subCommandHelp = (cmd) ->
program.emit(cmd, null, ['--help'])
process.exit 1
program
.version package_json.version
.usage '[globals] [sub-command] [options]'
.option '-p, --projectKey <key>', 'your SPHERE.IO project-key'
.option '-i, --clientId <id>', 'your OAuth client id for the SPHERE.IO API'
.option '-s, --clientSecret <secret>', 'your OAuth client secret for the SPHERE.IO API'
.option '--accessToken <token>', 'an OAuth access token for the SPHERE.IO API, used instead of clientId and clientSecret'
.option '--sphereHost <host>', 'SPHERE.IO API host to connect to', 'https://api.commercetools.com'
.option '--sphereAuthHost <host>', 'SPHERE.IO OAuth host to connect to', 'https://auth.commercetools.com'
.option '--timeout [millis]', 'Set timeout for requests (default is 300000)', parseInt, 300000
.option '--verbose', 'give more feedback during action'
.option '--debug', 'give as many feedback as possible'
# TODO: validate required options
program
.command 'import'
.description 'Import your products from CSV into your SPHERE.IO project.'
.option '-c, --csv <file>', 'CSV file containing products to import (alias for "in" parameter)'
# add alias for csv parameter and "-i" is taken so use "-f" parameter
.option '-f, --in <file>', 'File containing products to import'
.option '-z, --zip', 'Input file is archived'
.option '-x, --xlsx', 'Import from XLSX format'
.option '-l, --language [lang]', 'Default language to using during import (for slug generation, category linking etc. - default is en)', 'en'
.option '--csvDelimiter [delim]', 'CSV Delimiter that separates the cells (default is comma - ",")', ','
.option '--multiValueDelimiter [delim]', 'Delimiter to separate values inside of a cell (default is semicolon - ";")', ';'
.option '--customAttributesForCreationOnly <items>', 'List of comma-separated attributes to use when creating products (ignore when updating)', @_list
.option '--continueOnProblems', 'When a product does not validate on the server side (400er response), ignore it and continue with the next products'
.option '--suppressMissingHeaderWarning', 'Do not show which headers are missing per produt type.'
.option '--allowRemovalOfVariants', 'If given variants will be removed if there is no corresponding row in the CSV. Otherwise they are not touched.'
.option '--mergeCategoryOrderHints', 'Merge category order hints instead of replacing them'
.option '--publish', 'When given, all changes will be published immediately'
.option '--updatesOnly', "Won't create any new products, only updates existing"
.option '--dryRun', 'Will list all action that would be triggered, but will not POST them to SPHERE.IO'
.option '-m, --matchBy [value]', 'Product attribute name which will be used to match products. Possible values: id, slug, sku, <custom_attribute_name>. Default: id. Localized attribute types are not supported for <custom_attribute_name> option', 'id'
.option '-e, --encoding [encoding]', 'Encoding used when reading data from input file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --csv <file>'
.action (opts) =>
GLOBALS.DEFAULT_LANGUAGE = opts.language
GLOBALS.DELIM_MULTI_VALUE = opts.multiValueDelimiter ? GLOBALS.DELIM_MULTI_VALUE
return _subCommandHelp('import') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
timeout: program.timeout
show_progress: true
user_agent: "#{package_json.name} - Import - #{package_json.version}"
csvDelimiter: opts.csvDelimiter
encoding: opts.encoding
importFormat: if opts.xlsx then 'xlsx' else 'csv'
debug: Boolean(opts.parent.debug)
mergeCategoryOrderHints: Boolean(opts.mergeCategoryOrderHints)
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: 'PI:EMAIL:<EMAIL>END_PI'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
options.continueOnProblems = opts.continueOnProblems or false
# if program.verbose
# options.logConfig.streams = [
# {level: 'info', stream: process.stdout}
# ]
# if program.debug
# options.logConfig.streams = [
# {level: 'debug', stream: process.stdout}
# ]
importer = new Importer options
importer.blackListedCustomAttributesForUpdate = opts.customAttributesForCreationOnly or []
importer.suppressMissingHeaderWarning = opts.suppressMissingHeaderWarning
importer.allowRemovalOfVariants = opts.allowRemovalOfVariants
importer.publishProducts = opts.publish
importer.updatesOnly = true if opts.updatesOnly
importer.dryRun = true if opts.dryRun
importer.matchBy = opts.matchBy
# params: importManager (filePath, isArchived)
importer.importManager opts.in || opts.csv, opts.zip
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading file '#{opts.csv}': #{err}"
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('import')
program
.command 'state'
.description 'Allows to publish, unpublish or delete (all) products of your SPHERE.IO project.'
.option '--changeTo <publish,unpublish,delete>', 'publish unpublished products / unpublish published products / delete unpublished products'
.option '--csv <file>', 'processes products defined in a CSV file by either "sku" or "id". Otherwise all products are processed.'
.option '--continueOnProblems', 'When a there is a problem on changing a product\'s state (400er response), ignore it and continue with the next products'
.option '--forceDelete', 'whether to force deletion without asking for confirmation', false
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --changeTo <state>'
.action (opts) =>
return _subCommandHelp('state') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) =>
options =
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - State"
libraryVersion: "#{package_json.version}"
contactEmail: 'PI:EMAIL:<EMAIL>END_PI'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
remove = opts.changeTo is 'delete'
publish = switch opts.changeTo
when 'publish','delete' then true
when 'unpublish' then false
else
console.error "Unknown argument '#{opts.changeTo}' for option changeTo!"
process.exit 3
run = =>
@_getFilterFunction(opts)
.then (filterFunction) ->
importer = new Importer options
importer.continueOnProblems = opts.continueOnProblems
importer.changeState(publish, remove, filterFunction)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
if remove
if opts.forceDelete
run options
else
prompt.start()
property =
name: 'ask'
message: 'Do you really want to delete products?'
validator: /y[es]*|n[o]?/
warning: 'Please answer with yes or no'
default: 'no'
prompt.get property, (err, result) ->
if _.isString(result.ask) and result.ask.match(/y(es){0,1}/i)
run options
else
console.error 'Aborted.'
process.exit 9
else
run options
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('state')
program
.command 'export'
.description 'Export your products from your SPHERE.IO project to CSV using.'
.option '-t, --template <file>', 'CSV file containing your header that defines what you want to export'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '-x, --xlsx', 'Export in XLSX format'
.option '-f, --fullExport', 'Do a full export.'
.option '-q, --queryString <query>', 'Query string to specify the sub-set of products to export'
.option '-l, --language [lang]', 'Language used on export for localised attributes (except lenums) and category names (default is en)'
.option '--queryEncoded', 'Whether the given query string is already encoded or not', false
.option '--current', 'Will export current product version instead of staged one', false
.option '--fillAllRows', 'When given product attributes like name will be added to each variant row.'
.option '--categoryBy <name>', 'Define which identifier should be used for the categories column - either slug or externalId. If nothing given the named path is used.'
.option '--categoryOrderHintBy <name>', 'Define which identifier should be used for the categoryOrderHints column - either id or externalId. If nothing given the category id is used.', 'id'
.option '--filterVariantsByAttributes <query>', 'Query string to filter variants of products'
.option '--filterPrices <query>', 'Query string to filter prices of products'
.option '--templateDelimiter <delimiter>', 'Delimiter used in template | default: ,', ","
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-e, --encoding [encoding]', 'Encoding used when saving data to output file | default: utf8', 'utf8'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --template <file> --out <file>'
.action (opts) =>
if opts.language
GLOBALS.DEFAULT_LANGUAGE = opts.language
return _subCommandHelp('export') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
encoding: opts.encoding
exportFormat: if opts.xlsx then 'xlsx' else 'csv'
outputDelimiter: opts.outputDelimiter
templateDelimiter: opts.templateDelimiter
fillAllRows: opts.fillAllRows
categoryBy: opts.categoryBy
categoryOrderHintBy: opts.categoryOrderHintBy || 'id'
debug: Boolean(opts.parent.debug)
export:
show_progress: true
queryString: opts.queryString
isQueryEncoded: opts.queryEncoded or false
filterVariantsByAttributes: opts.filterVariantsByAttributes
filterPrices: opts.filterPrices
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Export"
libraryVersion: "#{package_json.version}"
contactEmail: 'PI:EMAIL:<EMAIL>END_PI'
httpConfig:
host: program.sphereHost
enableRetry: true
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
(if opts.fullExport then Promise.resolve false
else if opts.template? then fs.readFileAsync opts.template, 'utf8'
else new Promise (resolve) ->
console.warn 'Reading from stdin...'
chunks = []
process.stdin.on 'data', (chunk) -> chunks.push chunk
process.stdin.on 'end', () -> resolve Buffer.concat chunks
)
.then (content) ->
(if content
exporter.exportDefault(content, opts.out, not opts.current)
else
exporter.exportFull(opts.out, not opts.current)
)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
if err.stack then console.error(err.stack)
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on reading template input: #{err}"
console.error err
process.exit 2
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('export')
program
.command 'template'
.description 'Create a template for a product type of your SPHERE.IO project.'
.option '-o, --out <file>', 'Path to the file the exporter will write the resulting CSV in'
.option '--outputDelimiter <delimiter>', 'Delimiter used to separate cells in output file | default: ,', ","
.option '-l, --languages [lang,lang]', 'List of languages to use for template (default is [en])', @_list, ['en']
.option '--all', 'Generates one template for all product types - if not given you will be ask which product type to use'
.usage '--projectKey <project-key> --clientId <client-id> --clientSecret <client-secret> --out <file>'
.action (opts) =>
return _subCommandHelp('template') unless program.projectKey
@_ensureCredentials(program)
.then (authConfig) ->
options =
outputDelimiter: opts.outputDelimiter
timeout: program.timeout
show_progress: true
authConfig: authConfig
userAgentConfig:
libraryName: "#{package_json.name} - Template"
libraryVersion: "#{package_json.version}"
contactEmail: 'PI:EMAIL:<EMAIL>END_PI'
httpConfig:
host: program.sphereHost
enableRetry: true
# logConfig:
# streams: [
# {level: 'warn', stream: process.stdout}
# ]
options.authConfig.host = program.sphereAuthHost
exporter = new Exporter options
exporter.createTemplate(opts.languages, opts.out, opts.all)
.then (result) ->
console.warn result
process.exit 0
.catch (err) ->
console.error err
process.exit 1
.catch (err) ->
console.error "Problems on getting client credentials from config files: #{err}"
_subCommandHelp('template')
program.parse argv
program.help() if program.args.length is 0
module.exports.run process.argv
|
[
{
"context": "th: '12'\n exp_year: '2020'\n email: 'foo@koding.com'\n\n createTokenAction = stripe.createToken co",
"end": 2193,
"score": 0.9999140501022339,
"start": 2179,
"tag": "EMAIL",
"value": "foo@koding.com"
}
] | client/app/lib/redux/modules/stripe.test.coffee | lionheart1022/koding | 0 | expect = require 'expect'
configureStore = require 'redux-mock-store'
fixtures = require 'app/redux/services/fixtures/stripe'
{ reducer } = stripe = require './stripe'
describe 'redux/modules/stripe#reducer', ->
it 'should return the initial state', ->
expect(reducer undefined, {}).toEqual
errors: null
it 'should handle CREATE_TOKEN.FAIL', ->
state = reducer undefined, {}
state = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
expect(state.errors).toExist()
expect(state.errors).toEqual([fixtures.createTokenError.number])
it 'should clear errors on CREATE_TOKEN.BEGIN and CREATE_TOKEN.SUCCESS', ->
state = reducer undefined, {}
# add some error to state
afterFail = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
afterBegin = reducer afterFail,
type: stripe.CREATE_TOKEN.BEGIN
afterSuccess = reducer afterFail,
type: stripe.CREATE_TOKEN.SUCCESS
expect(afterBegin.errors).toBe null
expect(afterSuccess.errors).toBe null
describe 'redux/modules/stripe#actions', ->
# appendHeadElement does window stuff, it fails on mocha-webpack.
# so mock it out!
stripeService = require('inject!app/redux/services/stripe')({
'app/lib/appendHeadElement': ->
})
mockStore = configureStore [
require('app/redux/middleware/stripe')(stripeService, 'stripe_config_key')
require('app/redux/middleware/promise')
]
store = null
before ->
store = mockStore {}
beforeEach ->
# make sure there is a client. In theory this is `global.Stripe` when
# Stripe.js is loaded to page.
expect.spyOn(stripeService, 'ensureClient').andReturn Promise.resolve({})
store.clearActions()
afterEach -> expect.restoreSpies()
describe 'createToken', ->
it 'should dispatch success action', ->
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.resolve(fixtures.createTokenSuccess)
correctCardOptions =
number: '4242 4242 4242 4242'
cvc: '111'
exp_month: '12'
exp_year: '2020'
email: 'foo@koding.com'
createTokenAction = stripe.createToken correctCardOptions
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.SUCCESS, result: fixtures.createTokenSuccess }
]
it 'should dispatch fail action', ->
createTokenErrors = _.values fixtures.createTokenError
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.reject(createTokenErrors)
wrongCardOpts =
number: ''
cvc: ''
exp_month: ''
exp_year: ''
email: ''
createTokenAction = stripe.createToken wrongCardOpts
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.FAIL, error: createTokenErrors }
]
| 101452 | expect = require 'expect'
configureStore = require 'redux-mock-store'
fixtures = require 'app/redux/services/fixtures/stripe'
{ reducer } = stripe = require './stripe'
describe 'redux/modules/stripe#reducer', ->
it 'should return the initial state', ->
expect(reducer undefined, {}).toEqual
errors: null
it 'should handle CREATE_TOKEN.FAIL', ->
state = reducer undefined, {}
state = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
expect(state.errors).toExist()
expect(state.errors).toEqual([fixtures.createTokenError.number])
it 'should clear errors on CREATE_TOKEN.BEGIN and CREATE_TOKEN.SUCCESS', ->
state = reducer undefined, {}
# add some error to state
afterFail = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
afterBegin = reducer afterFail,
type: stripe.CREATE_TOKEN.BEGIN
afterSuccess = reducer afterFail,
type: stripe.CREATE_TOKEN.SUCCESS
expect(afterBegin.errors).toBe null
expect(afterSuccess.errors).toBe null
describe 'redux/modules/stripe#actions', ->
# appendHeadElement does window stuff, it fails on mocha-webpack.
# so mock it out!
stripeService = require('inject!app/redux/services/stripe')({
'app/lib/appendHeadElement': ->
})
mockStore = configureStore [
require('app/redux/middleware/stripe')(stripeService, 'stripe_config_key')
require('app/redux/middleware/promise')
]
store = null
before ->
store = mockStore {}
beforeEach ->
# make sure there is a client. In theory this is `global.Stripe` when
# Stripe.js is loaded to page.
expect.spyOn(stripeService, 'ensureClient').andReturn Promise.resolve({})
store.clearActions()
afterEach -> expect.restoreSpies()
describe 'createToken', ->
it 'should dispatch success action', ->
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.resolve(fixtures.createTokenSuccess)
correctCardOptions =
number: '4242 4242 4242 4242'
cvc: '111'
exp_month: '12'
exp_year: '2020'
email: '<EMAIL>'
createTokenAction = stripe.createToken correctCardOptions
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.SUCCESS, result: fixtures.createTokenSuccess }
]
it 'should dispatch fail action', ->
createTokenErrors = _.values fixtures.createTokenError
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.reject(createTokenErrors)
wrongCardOpts =
number: ''
cvc: ''
exp_month: ''
exp_year: ''
email: ''
createTokenAction = stripe.createToken wrongCardOpts
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.FAIL, error: createTokenErrors }
]
| true | expect = require 'expect'
configureStore = require 'redux-mock-store'
fixtures = require 'app/redux/services/fixtures/stripe'
{ reducer } = stripe = require './stripe'
describe 'redux/modules/stripe#reducer', ->
it 'should return the initial state', ->
expect(reducer undefined, {}).toEqual
errors: null
it 'should handle CREATE_TOKEN.FAIL', ->
state = reducer undefined, {}
state = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
expect(state.errors).toExist()
expect(state.errors).toEqual([fixtures.createTokenError.number])
it 'should clear errors on CREATE_TOKEN.BEGIN and CREATE_TOKEN.SUCCESS', ->
state = reducer undefined, {}
# add some error to state
afterFail = reducer state,
type: stripe.CREATE_TOKEN.FAIL
error: fixtures.createTokenError.number
afterBegin = reducer afterFail,
type: stripe.CREATE_TOKEN.BEGIN
afterSuccess = reducer afterFail,
type: stripe.CREATE_TOKEN.SUCCESS
expect(afterBegin.errors).toBe null
expect(afterSuccess.errors).toBe null
describe 'redux/modules/stripe#actions', ->
# appendHeadElement does window stuff, it fails on mocha-webpack.
# so mock it out!
stripeService = require('inject!app/redux/services/stripe')({
'app/lib/appendHeadElement': ->
})
mockStore = configureStore [
require('app/redux/middleware/stripe')(stripeService, 'stripe_config_key')
require('app/redux/middleware/promise')
]
store = null
before ->
store = mockStore {}
beforeEach ->
# make sure there is a client. In theory this is `global.Stripe` when
# Stripe.js is loaded to page.
expect.spyOn(stripeService, 'ensureClient').andReturn Promise.resolve({})
store.clearActions()
afterEach -> expect.restoreSpies()
describe 'createToken', ->
it 'should dispatch success action', ->
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.resolve(fixtures.createTokenSuccess)
correctCardOptions =
number: '4242 4242 4242 4242'
cvc: '111'
exp_month: '12'
exp_year: '2020'
email: 'PI:EMAIL:<EMAIL>END_PI'
createTokenAction = stripe.createToken correctCardOptions
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.SUCCESS, result: fixtures.createTokenSuccess }
]
it 'should dispatch fail action', ->
createTokenErrors = _.values fixtures.createTokenError
expect
.spyOn stripeService, 'createToken'
.andReturn Promise.reject(createTokenErrors)
wrongCardOpts =
number: ''
cvc: ''
exp_month: ''
exp_year: ''
email: ''
createTokenAction = stripe.createToken wrongCardOpts
store.dispatch(createTokenAction).then ->
expect(store.getActions()).toEqual [
{ type: stripe.CREATE_TOKEN.BEGIN }
{ type: stripe.CREATE_TOKEN.FAIL, error: createTokenErrors }
]
|
[
{
"context": "cription: \"\"\n keywords: \"\"\n author: \"Lance Pollard\"\n year: \"2012\"\n copyright: \"© u",
"end": 118,
"score": 0.9998700618743896,
"start": 105,
"tag": "NAME",
"value": "Lance Pollard"
},
{
"context": ": \"2012\"\n copyrig... | test/example/app/config/shared/locales/en.coffee | jivagoalves/tower | 1 | module.exports =
title: "Test Tower App"
description: ""
keywords: ""
author: "Lance Pollard"
year: "2012"
copyright: "© undefined Lance Pollard. All rights reserved."
robots: "noodp,noydir,index,follow"
github: "viatropos"
email: "lancejpollard@gmail.com"
titles:
index: "%{name}"
show: "%{name} overview"
new: "Create a new %{name}"
edit: "Editing %{name}"
links:
default: "%{name}"
home: "Home"
docs: "Docs"
openGraph:
siteName: "Client"
title: "Client"
description: ""
type: "website"
url: ""
image: "" | 67797 | module.exports =
title: "Test Tower App"
description: ""
keywords: ""
author: "<NAME>"
year: "2012"
copyright: "© undefined <NAME>. All rights reserved."
robots: "noodp,noydir,index,follow"
github: "viatropos"
email: "<EMAIL>"
titles:
index: "%{name}"
show: "%{name} overview"
new: "Create a new %{name}"
edit: "Editing %{name}"
links:
default: "%{name}"
home: "Home"
docs: "Docs"
openGraph:
siteName: "Client"
title: "Client"
description: ""
type: "website"
url: ""
image: "" | true | module.exports =
title: "Test Tower App"
description: ""
keywords: ""
author: "PI:NAME:<NAME>END_PI"
year: "2012"
copyright: "© undefined PI:NAME:<NAME>END_PI. All rights reserved."
robots: "noodp,noydir,index,follow"
github: "viatropos"
email: "PI:EMAIL:<EMAIL>END_PI"
titles:
index: "%{name}"
show: "%{name} overview"
new: "Create a new %{name}"
edit: "Editing %{name}"
links:
default: "%{name}"
home: "Home"
docs: "Docs"
openGraph:
siteName: "Client"
title: "Client"
description: ""
type: "website"
url: ""
image: "" |
[
{
"context": "sed under the MIT License\nDate: 11-08-2015\nAuthor: Julio Cesar Fausto\nSource: https://github.com/jcfausto/jcfausto-com-",
"end": 104,
"score": 0.999872088432312,
"start": 86,
"tag": "NAME",
"value": "Julio Cesar Fausto"
},
{
"context": "or: Julio Cesar Fausto\nSource... | app/assets/javascripts/components/skill_item.js.jsx.coffee | jcfausto/jcfausto-rails-website | 1 | ###
SkillItem React Component
Released under the MIT License
Date: 11-08-2015
Author: Julio Cesar Fausto
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@SkillItem = React.createClass
getInitialState: ->
title: this.props.title
description: this.props.description
order: this.props.order
render: ->
`<li>
<p>
<span>{this.state.title}</span>
<span>{this.state.description}</span>
</p>
</li>`
| 1680 | ###
SkillItem React Component
Released under the MIT License
Date: 11-08-2015
Author: <NAME>
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@SkillItem = React.createClass
getInitialState: ->
title: this.props.title
description: this.props.description
order: this.props.order
render: ->
`<li>
<p>
<span>{this.state.title}</span>
<span>{this.state.description}</span>
</p>
</li>`
| true | ###
SkillItem React Component
Released under the MIT License
Date: 11-08-2015
Author: PI:NAME:<NAME>END_PI
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@SkillItem = React.createClass
getInitialState: ->
title: this.props.title
description: this.props.description
order: this.props.order
render: ->
`<li>
<p>
<span>{this.state.title}</span>
<span>{this.state.description}</span>
</p>
</li>`
|
[
{
"context": "'\n\n\n# cheap user list\nusers =\n\troot:\n\t\tusername: 'root'\n\t\tpassword: 'password'\n\napp.post '/authenticate'",
"end": 1311,
"score": 0.9940837025642395,
"start": 1307,
"tag": "USERNAME",
"value": "root"
},
{
"context": "st\nusers =\n\troot:\n\t\tusername: 'roo... | server/app/application.coffee | shackspace/shack-hq | 1 | log4js = require 'log4js'
logger = log4js.getLogger 'shack-hq'
logger.setLevel if process.isTest then 'FATAL' else 'INFO'
path = require 'path'
fs = require 'fs'
clientPublic = path.normalize __dirname + '/../../client/public'
express = require 'express'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
expressSession = require 'express-session'
passport = require 'passport'
passportLogic = require './controllers/passport'
userRoutes = require './controllers/userRoutes'
# config = require '../server_config'
mediator = require './mediator'
app = module.exports = express()
# Load db stuff
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/shack-hq'
global.mongoose = mongoose
Project = mongoose.model 'member', require('./schemas/Member'), 'members'
User = mongoose.model 'User', require('./schemas/User'), 'users'
# Server config
app.use bodyParser()
app.use cookieParser 'ponies'
app.use expressSession()
app.use passport.initialize()
app.use passport.session()
# app.use log4js.connectLogger log4js.getLogger 'my-project-access'
app.use express.static clientPublic
# app.use express.errorHandler {dumpExceptions: true, showStack:true }
# admin routers
# ModelRouter = require './routers/Model'
# cheap user list
users =
root:
username: 'root'
password: 'password'
app.post '/authenticate', (req, res) ->
console.log req.body
passport.authenticate 'local', (err, user, info) =>
if err
return next(err)
if not user
req.session.message = [info.message]
return res.send 403
req.logIn user, (err) =>
if err
return next(err)
User.findOne {name: user.name}, (err, user) =>
if err and not user?
next(err)
return res.json user
app.post '/login', userRoutes.postLogin
# if req.body.user?
# user = users[req.body.user.username]
# if user? and user.password = req.body.user.password
# return res.json user
# res.send 404
# else
# if req.session?.user?
# return req.session.user
# res.send 401
# Catch all other requests and deliver client files.
app.get '*', (req, res) ->
res.sendfile path.join clientPublic, 'index.html'
server = app.listen 9000, ->
logger.info "Express server listening on port %d in %s mode", 9000, app.settings.env
io = require('socket.io').listen server
mediator.init io
memberController = new (require './controllers/Member')
# game = new (require './controllers/Game')
# io.sockets.on 'message', (msg) ->
# console.log 'message', msg
# io.sockets.on 'connection', (socket) ->
# # inject mediator
# $emit = socket.$emit
# socket.$emit = ->
# #args = Array.prototype.slice.call arguments
# mediator.emit.apply mediator, arguments
# $emit.apply socket, arguments
# # new IoRouter socket
| 88243 | log4js = require 'log4js'
logger = log4js.getLogger 'shack-hq'
logger.setLevel if process.isTest then 'FATAL' else 'INFO'
path = require 'path'
fs = require 'fs'
clientPublic = path.normalize __dirname + '/../../client/public'
express = require 'express'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
expressSession = require 'express-session'
passport = require 'passport'
passportLogic = require './controllers/passport'
userRoutes = require './controllers/userRoutes'
# config = require '../server_config'
mediator = require './mediator'
app = module.exports = express()
# Load db stuff
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/shack-hq'
global.mongoose = mongoose
Project = mongoose.model 'member', require('./schemas/Member'), 'members'
User = mongoose.model 'User', require('./schemas/User'), 'users'
# Server config
app.use bodyParser()
app.use cookieParser 'ponies'
app.use expressSession()
app.use passport.initialize()
app.use passport.session()
# app.use log4js.connectLogger log4js.getLogger 'my-project-access'
app.use express.static clientPublic
# app.use express.errorHandler {dumpExceptions: true, showStack:true }
# admin routers
# ModelRouter = require './routers/Model'
# cheap user list
users =
root:
username: 'root'
password: '<PASSWORD>'
app.post '/authenticate', (req, res) ->
console.log req.body
passport.authenticate 'local', (err, user, info) =>
if err
return next(err)
if not user
req.session.message = [info.message]
return res.send 403
req.logIn user, (err) =>
if err
return next(err)
User.findOne {name: user.name}, (err, user) =>
if err and not user?
next(err)
return res.json user
app.post '/login', userRoutes.postLogin
# if req.body.user?
# user = users[req.body.user.username]
# if user? and user.password = req.body.user.password
# return res.json user
# res.send 404
# else
# if req.session?.user?
# return req.session.user
# res.send 401
# Catch all other requests and deliver client files.
app.get '*', (req, res) ->
res.sendfile path.join clientPublic, 'index.html'
server = app.listen 9000, ->
logger.info "Express server listening on port %d in %s mode", 9000, app.settings.env
io = require('socket.io').listen server
mediator.init io
memberController = new (require './controllers/Member')
# game = new (require './controllers/Game')
# io.sockets.on 'message', (msg) ->
# console.log 'message', msg
# io.sockets.on 'connection', (socket) ->
# # inject mediator
# $emit = socket.$emit
# socket.$emit = ->
# #args = Array.prototype.slice.call arguments
# mediator.emit.apply mediator, arguments
# $emit.apply socket, arguments
# # new IoRouter socket
| true | log4js = require 'log4js'
logger = log4js.getLogger 'shack-hq'
logger.setLevel if process.isTest then 'FATAL' else 'INFO'
path = require 'path'
fs = require 'fs'
clientPublic = path.normalize __dirname + '/../../client/public'
express = require 'express'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
expressSession = require 'express-session'
passport = require 'passport'
passportLogic = require './controllers/passport'
userRoutes = require './controllers/userRoutes'
# config = require '../server_config'
mediator = require './mediator'
app = module.exports = express()
# Load db stuff
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/shack-hq'
global.mongoose = mongoose
Project = mongoose.model 'member', require('./schemas/Member'), 'members'
User = mongoose.model 'User', require('./schemas/User'), 'users'
# Server config
app.use bodyParser()
app.use cookieParser 'ponies'
app.use expressSession()
app.use passport.initialize()
app.use passport.session()
# app.use log4js.connectLogger log4js.getLogger 'my-project-access'
app.use express.static clientPublic
# app.use express.errorHandler {dumpExceptions: true, showStack:true }
# admin routers
# ModelRouter = require './routers/Model'
# cheap user list
users =
root:
username: 'root'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
app.post '/authenticate', (req, res) ->
console.log req.body
passport.authenticate 'local', (err, user, info) =>
if err
return next(err)
if not user
req.session.message = [info.message]
return res.send 403
req.logIn user, (err) =>
if err
return next(err)
User.findOne {name: user.name}, (err, user) =>
if err and not user?
next(err)
return res.json user
app.post '/login', userRoutes.postLogin
# if req.body.user?
# user = users[req.body.user.username]
# if user? and user.password = req.body.user.password
# return res.json user
# res.send 404
# else
# if req.session?.user?
# return req.session.user
# res.send 401
# Catch all other requests and deliver client files.
app.get '*', (req, res) ->
res.sendfile path.join clientPublic, 'index.html'
server = app.listen 9000, ->
logger.info "Express server listening on port %d in %s mode", 9000, app.settings.env
io = require('socket.io').listen server
mediator.init io
memberController = new (require './controllers/Member')
# game = new (require './controllers/Game')
# io.sockets.on 'message', (msg) ->
# console.log 'message', msg
# io.sockets.on 'connection', (socket) ->
# # inject mediator
# $emit = socket.$emit
# socket.$emit = ->
# #args = Array.prototype.slice.call arguments
# mediator.emit.apply mediator, arguments
# $emit.apply socket, arguments
# # new IoRouter socket
|
[
{
"context": " this.options = extend({}, options, {\n key: 'abcdeg'\n algorithm: 'sha1'\n encoding: 'hex'\n ",
"end": 162,
"score": 0.999427855014801,
"start": 156,
"tag": "KEY",
"value": "abcdeg"
}
] | index.coffee | pmoelgaard/nx-url-hash | 0 | extend = require('deep-extend')
crypto = require('crypto')
class URLHash
constructor: (options) ->
this.options = extend({}, options, {
key: 'abcdeg'
algorithm: 'sha1'
encoding: 'hex'
})
hash: (url) =>
options = this.options
hashProvider = crypto.createHmac(options.algorithm, options.key)
hashProvider.setEncoding(options.encoding)
hashProvider.write(url)
hashProvider.end()
hash = hashProvider.read()
return hash
module.exports = URLHash | 10866 | extend = require('deep-extend')
crypto = require('crypto')
class URLHash
constructor: (options) ->
this.options = extend({}, options, {
key: '<KEY>'
algorithm: 'sha1'
encoding: 'hex'
})
hash: (url) =>
options = this.options
hashProvider = crypto.createHmac(options.algorithm, options.key)
hashProvider.setEncoding(options.encoding)
hashProvider.write(url)
hashProvider.end()
hash = hashProvider.read()
return hash
module.exports = URLHash | true | extend = require('deep-extend')
crypto = require('crypto')
class URLHash
constructor: (options) ->
this.options = extend({}, options, {
key: 'PI:KEY:<KEY>END_PI'
algorithm: 'sha1'
encoding: 'hex'
})
hash: (url) =>
options = this.options
hashProvider = crypto.createHmac(options.algorithm, options.key)
hashProvider.setEncoding(options.encoding)
hashProvider.write(url)
hashProvider.end()
hash = hashProvider.read()
return hash
module.exports = URLHash |
[
{
"context": "rue\n auth:\n username: uuid\n password: token\n\n request.post options, (error, response, body) ",
"end": 3449,
"score": 0.9982953667640686,
"start": 3444,
"tag": "PASSWORD",
"value": "token"
}
] | migrate-to-devices.coffee | octoblu/nanocyte-flow-deploy-service | 0 | _ = require 'lodash'
async = require 'async'
mongojs = require 'mongojs'
Redis = require 'ioredis'
request = require 'request'
MeshbluHttp = require 'meshblu-http'
throw new Error("INTERVAL_REDIS_URI is required") unless process.env.INTERVAL_REDIS_URI?
client = new Redis process.env.REDIS_URI, dropBufferSupport: true
intervalClient = new Redis process.env.INTERVAL_REDIS_URI, dropBufferSupport: true
database = mongojs process.env.MONGODB_URI, ['instances']
datastore = database.instances
convertFlow = (record, callback) =>
{flowId, instanceId} = record
flowId = flowId.replace /-stop/, ''
console.log {flowId}
datastore.findOne {flowId, instanceId}, (error, record) =>
flowData = JSON.parse record.flowData
{uuid, token} = flowData['engine-output']?.config
return callback() unless uuid? && token?
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.whoami (error) =>
return callback() if error?
intervals = getIntervals flowData
uniqIntervals = _.uniqBy intervals, 'id'
async.map uniqIntervals, async.apply(convertToDevice, uuid, token), (error, data) =>
return callback error if error?
data = _.compact data
async.each data, async.apply(updateInterval, flowId, instanceId, intervals), (error) =>
updatePermissions {uuid, token, data}, (error) =>
return callback error if error?
updateMongoFlow {flowId, instanceId, flowData}, (error) =>
return callback error if error?
update =
$set:
intervalDeviceMigration: Date.now()
datastore.update flowId: "#{flowId}-stop", update, callback
updateMongoFlow = ({flowId, instanceId, flowData}, callback) =>
update =
$set:
flowData: JSON.stringify(flowData)
intervalDeviceMigration: Date.now()
datastore.update {flowId, instanceId}, update, callback
updatePermissions = ({uuid, token, data}, callback) =>
deviceIds = _.map data, 'uuid'
updateSendWhitelist =
$addToSet:
sendWhitelist:
$each: deviceIds
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.updateDangerously uuid, updateSendWhitelist, callback
updateInterval = (flowId, instanceId, intervals, intervalDevice, callback) =>
intervals = _.filter intervals, id: intervalDevice.nodeId
async.each intervals, (interval, callback) =>
interval.deviceId = intervalDevice.deviceId
nodeId = interval.id
client.hset flowId, "#{instanceId}/#{interval.id}/config", JSON.stringify(interval), (error) =>
return callback error if error?
redisData = [
"interval/uuid/#{flowId}/#{nodeId}"
intervalDevice.uuid
"interval/token/#{flowId}/#{nodeId}"
intervalDevice.token
]
intervalClient.mset redisData, callback
, callback
convertToDevice = (uuid, token, interval, callback) =>
return callback() unless interval.deviceId == '765bd3a4-546d-45e6-a62f-1157281083f0'
createFlowDevice {uuid, token, nodeId: interval.id}, (error, response) =>
return callback error if error?
callback null, nodeId: interval.id, uuid: response.uuid, token: response.token
createFlowDevice = ({uuid, token, nodeId}, callback) =>
options =
uri: "https://interval.octoblu.com/nodes/#{nodeId}/intervals"
json: true
auth:
username: uuid
password: token
request.post options, (error, response, body) =>
return callback error if error?
return callback new Error "Bad response: #{response.statusCode}" unless response.statusCode < 300
callback null, body
getIntervals = (flowData) =>
nodes = _.map _.values(flowData), 'config'
_.filter nodes, (node) =>
return _.includes ['interval', 'schedule', 'throttle', 'debounce', 'delay', 'leading-edge-debounce'], node?.class
query =
flowId: /-stop$/
intervalDeviceMigration: $eq: null
cursor = datastore.find(query).limit 1000, (error, records) =>
throw error if error?
async.eachSeries records, convertFlow, (error) =>
throw error if error?
process.exit 0
| 91893 | _ = require 'lodash'
async = require 'async'
mongojs = require 'mongojs'
Redis = require 'ioredis'
request = require 'request'
MeshbluHttp = require 'meshblu-http'
throw new Error("INTERVAL_REDIS_URI is required") unless process.env.INTERVAL_REDIS_URI?
client = new Redis process.env.REDIS_URI, dropBufferSupport: true
intervalClient = new Redis process.env.INTERVAL_REDIS_URI, dropBufferSupport: true
database = mongojs process.env.MONGODB_URI, ['instances']
datastore = database.instances
convertFlow = (record, callback) =>
{flowId, instanceId} = record
flowId = flowId.replace /-stop/, ''
console.log {flowId}
datastore.findOne {flowId, instanceId}, (error, record) =>
flowData = JSON.parse record.flowData
{uuid, token} = flowData['engine-output']?.config
return callback() unless uuid? && token?
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.whoami (error) =>
return callback() if error?
intervals = getIntervals flowData
uniqIntervals = _.uniqBy intervals, 'id'
async.map uniqIntervals, async.apply(convertToDevice, uuid, token), (error, data) =>
return callback error if error?
data = _.compact data
async.each data, async.apply(updateInterval, flowId, instanceId, intervals), (error) =>
updatePermissions {uuid, token, data}, (error) =>
return callback error if error?
updateMongoFlow {flowId, instanceId, flowData}, (error) =>
return callback error if error?
update =
$set:
intervalDeviceMigration: Date.now()
datastore.update flowId: "#{flowId}-stop", update, callback
updateMongoFlow = ({flowId, instanceId, flowData}, callback) =>
update =
$set:
flowData: JSON.stringify(flowData)
intervalDeviceMigration: Date.now()
datastore.update {flowId, instanceId}, update, callback
updatePermissions = ({uuid, token, data}, callback) =>
deviceIds = _.map data, 'uuid'
updateSendWhitelist =
$addToSet:
sendWhitelist:
$each: deviceIds
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.updateDangerously uuid, updateSendWhitelist, callback
updateInterval = (flowId, instanceId, intervals, intervalDevice, callback) =>
intervals = _.filter intervals, id: intervalDevice.nodeId
async.each intervals, (interval, callback) =>
interval.deviceId = intervalDevice.deviceId
nodeId = interval.id
client.hset flowId, "#{instanceId}/#{interval.id}/config", JSON.stringify(interval), (error) =>
return callback error if error?
redisData = [
"interval/uuid/#{flowId}/#{nodeId}"
intervalDevice.uuid
"interval/token/#{flowId}/#{nodeId}"
intervalDevice.token
]
intervalClient.mset redisData, callback
, callback
convertToDevice = (uuid, token, interval, callback) =>
return callback() unless interval.deviceId == '765bd3a4-546d-45e6-a62f-1157281083f0'
createFlowDevice {uuid, token, nodeId: interval.id}, (error, response) =>
return callback error if error?
callback null, nodeId: interval.id, uuid: response.uuid, token: response.token
createFlowDevice = ({uuid, token, nodeId}, callback) =>
options =
uri: "https://interval.octoblu.com/nodes/#{nodeId}/intervals"
json: true
auth:
username: uuid
password: <PASSWORD>
request.post options, (error, response, body) =>
return callback error if error?
return callback new Error "Bad response: #{response.statusCode}" unless response.statusCode < 300
callback null, body
getIntervals = (flowData) =>
nodes = _.map _.values(flowData), 'config'
_.filter nodes, (node) =>
return _.includes ['interval', 'schedule', 'throttle', 'debounce', 'delay', 'leading-edge-debounce'], node?.class
query =
flowId: /-stop$/
intervalDeviceMigration: $eq: null
cursor = datastore.find(query).limit 1000, (error, records) =>
throw error if error?
async.eachSeries records, convertFlow, (error) =>
throw error if error?
process.exit 0
| true | _ = require 'lodash'
async = require 'async'
mongojs = require 'mongojs'
Redis = require 'ioredis'
request = require 'request'
MeshbluHttp = require 'meshblu-http'
throw new Error("INTERVAL_REDIS_URI is required") unless process.env.INTERVAL_REDIS_URI?
client = new Redis process.env.REDIS_URI, dropBufferSupport: true
intervalClient = new Redis process.env.INTERVAL_REDIS_URI, dropBufferSupport: true
database = mongojs process.env.MONGODB_URI, ['instances']
datastore = database.instances
convertFlow = (record, callback) =>
{flowId, instanceId} = record
flowId = flowId.replace /-stop/, ''
console.log {flowId}
datastore.findOne {flowId, instanceId}, (error, record) =>
flowData = JSON.parse record.flowData
{uuid, token} = flowData['engine-output']?.config
return callback() unless uuid? && token?
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.whoami (error) =>
return callback() if error?
intervals = getIntervals flowData
uniqIntervals = _.uniqBy intervals, 'id'
async.map uniqIntervals, async.apply(convertToDevice, uuid, token), (error, data) =>
return callback error if error?
data = _.compact data
async.each data, async.apply(updateInterval, flowId, instanceId, intervals), (error) =>
updatePermissions {uuid, token, data}, (error) =>
return callback error if error?
updateMongoFlow {flowId, instanceId, flowData}, (error) =>
return callback error if error?
update =
$set:
intervalDeviceMigration: Date.now()
datastore.update flowId: "#{flowId}-stop", update, callback
updateMongoFlow = ({flowId, instanceId, flowData}, callback) =>
update =
$set:
flowData: JSON.stringify(flowData)
intervalDeviceMigration: Date.now()
datastore.update {flowId, instanceId}, update, callback
updatePermissions = ({uuid, token, data}, callback) =>
deviceIds = _.map data, 'uuid'
updateSendWhitelist =
$addToSet:
sendWhitelist:
$each: deviceIds
meshbluHttp = new MeshbluHttp {uuid, token, hostname: 'meshblu-http.octoblu.com'}
meshbluHttp.updateDangerously uuid, updateSendWhitelist, callback
updateInterval = (flowId, instanceId, intervals, intervalDevice, callback) =>
intervals = _.filter intervals, id: intervalDevice.nodeId
async.each intervals, (interval, callback) =>
interval.deviceId = intervalDevice.deviceId
nodeId = interval.id
client.hset flowId, "#{instanceId}/#{interval.id}/config", JSON.stringify(interval), (error) =>
return callback error if error?
redisData = [
"interval/uuid/#{flowId}/#{nodeId}"
intervalDevice.uuid
"interval/token/#{flowId}/#{nodeId}"
intervalDevice.token
]
intervalClient.mset redisData, callback
, callback
convertToDevice = (uuid, token, interval, callback) =>
return callback() unless interval.deviceId == '765bd3a4-546d-45e6-a62f-1157281083f0'
createFlowDevice {uuid, token, nodeId: interval.id}, (error, response) =>
return callback error if error?
callback null, nodeId: interval.id, uuid: response.uuid, token: response.token
createFlowDevice = ({uuid, token, nodeId}, callback) =>
options =
uri: "https://interval.octoblu.com/nodes/#{nodeId}/intervals"
json: true
auth:
username: uuid
password: PI:PASSWORD:<PASSWORD>END_PI
request.post options, (error, response, body) =>
return callback error if error?
return callback new Error "Bad response: #{response.statusCode}" unless response.statusCode < 300
callback null, body
getIntervals = (flowData) =>
nodes = _.map _.values(flowData), 'config'
_.filter nodes, (node) =>
return _.includes ['interval', 'schedule', 'throttle', 'debounce', 'delay', 'leading-edge-debounce'], node?.class
query =
flowId: /-stop$/
intervalDeviceMigration: $eq: null
cursor = datastore.find(query).limit 1000, (error, records) =>
throw error if error?
async.eachSeries records, convertFlow, (error) =>
throw error if error?
process.exit 0
|
[
{
"context": "#\n# Copyright (c) 2012 Konstantin Bender.\n#\n# Permission is hereby granted, free of charge",
"end": 40,
"score": 0.9998767971992493,
"start": 23,
"tag": "NAME",
"value": "Konstantin Bender"
},
{
"context": "'@name'\n person[instance_variable_name] = \"Peter\"\... | tests/test-function.coffee | konstantinbe/milk | 0 | #
# Copyright (c) 2012 Konstantin Bender.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Function", ->
describe "#new()", ->
it "instantiates an object with that constructor", ->
class Person
person = Person.new()
expect(person).to_be_instance_of Person
it "passes arguments to the constructor", ->
class Person
constructor: (one, two, three) ->
@one = one
@two = two
@three = three
person = Person.new 1, 2, 3
expect(person['one']).to_be 1
expect(person['two']).to_be 2
expect(person['three']).to_be 3
describe "#has()", ->
describe "when no options are passed", ->
person = null
instance_variable_name = null
class Person
@has 'name'
before ->
person = new Person()
instance_variable_name = '@name'
person[instance_variable_name] = "Peter"
it "defines a getter method returning the value of the instance variable", ->
expect(person).to_respond_to 'name'
expect(person.name()).to_be "Peter"
it "defines a setter method updating the instance variable", ->
expect(person).to_respond_to 'set_name'
person.set_name 'Anna'
expect(person[instance_variable_name]).to_be "Anna"
it "sets the initial value to be null", ->
person = new Person()
expect(person.name()).to_be null
describe "when option 'initial: value' is passed", ->
class Person
@has 'name', initial: "New"
it "sets the initial value", ->
person = new Person()
expect(person.name()).to_be "New"
describe "when option 'secret: yes' is passed", ->
class Person
@has 'age', secret: yes
it "makes the getter and setter non-enumerable", ->
person = new Person()
keys = (key for key of person)
expect(keys).not.to_contain 'age'
expect(keys).not.to_contain 'set_age'
describe "when option 'readonly: yes' is passed", ->
class Person
@has 'age', readonly: yes
| 112878 | #
# Copyright (c) 2012 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Function", ->
describe "#new()", ->
it "instantiates an object with that constructor", ->
class Person
person = Person.new()
expect(person).to_be_instance_of Person
it "passes arguments to the constructor", ->
class Person
constructor: (one, two, three) ->
@one = one
@two = two
@three = three
person = Person.new 1, 2, 3
expect(person['one']).to_be 1
expect(person['two']).to_be 2
expect(person['three']).to_be 3
describe "#has()", ->
describe "when no options are passed", ->
person = null
instance_variable_name = null
class Person
@has 'name'
before ->
person = new Person()
instance_variable_name = '@name'
person[instance_variable_name] = "<NAME>"
it "defines a getter method returning the value of the instance variable", ->
expect(person).to_respond_to 'name'
expect(person.name()).to_be "Peter"
it "defines a setter method updating the instance variable", ->
expect(person).to_respond_to 'set_name'
person.set_name '<NAME>'
expect(person[instance_variable_name]).to_be "<NAME>"
it "sets the initial value to be null", ->
person = new Person()
expect(person.name()).to_be null
describe "when option 'initial: value' is passed", ->
class Person
@has 'name', initial: "New"
it "sets the initial value", ->
person = new Person()
expect(person.name()).to_be "New"
describe "when option 'secret: yes' is passed", ->
class Person
@has 'age', secret: yes
it "makes the getter and setter non-enumerable", ->
person = new Person()
keys = (key for key of person)
expect(keys).not.to_contain 'age'
expect(keys).not.to_contain 'set_<KEY>'
describe "when option 'readonly: yes' is passed", ->
class Person
@has 'age', readonly: yes
| true | #
# Copyright (c) 2012 PI:NAME:<NAME>END_PI.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
describe "Function", ->
describe "#new()", ->
it "instantiates an object with that constructor", ->
class Person
person = Person.new()
expect(person).to_be_instance_of Person
it "passes arguments to the constructor", ->
class Person
constructor: (one, two, three) ->
@one = one
@two = two
@three = three
person = Person.new 1, 2, 3
expect(person['one']).to_be 1
expect(person['two']).to_be 2
expect(person['three']).to_be 3
describe "#has()", ->
describe "when no options are passed", ->
person = null
instance_variable_name = null
class Person
@has 'name'
before ->
person = new Person()
instance_variable_name = '@name'
person[instance_variable_name] = "PI:NAME:<NAME>END_PI"
it "defines a getter method returning the value of the instance variable", ->
expect(person).to_respond_to 'name'
expect(person.name()).to_be "Peter"
it "defines a setter method updating the instance variable", ->
expect(person).to_respond_to 'set_name'
person.set_name 'PI:NAME:<NAME>END_PI'
expect(person[instance_variable_name]).to_be "PI:NAME:<NAME>END_PI"
it "sets the initial value to be null", ->
person = new Person()
expect(person.name()).to_be null
describe "when option 'initial: value' is passed", ->
class Person
@has 'name', initial: "New"
it "sets the initial value", ->
person = new Person()
expect(person.name()).to_be "New"
describe "when option 'secret: yes' is passed", ->
class Person
@has 'age', secret: yes
it "makes the getter and setter non-enumerable", ->
person = new Person()
keys = (key for key of person)
expect(keys).not.to_contain 'age'
expect(keys).not.to_contain 'set_PI:KEY:<KEY>END_PI'
describe "when option 'readonly: yes' is passed", ->
class Person
@has 'age', readonly: yes
|
[
{
"context": "ferWriter buf\n cleverBufferWriter.writeString 'HELLOWORLD',\n length:5\n offset: 5\n\n #Writes hel",
"end": 6510,
"score": 0.6201989650726318,
"start": 6500,
"tag": "NAME",
"value": "HELLOWORLD"
}
] | test/clever-buffer-writer.spec.coffee | vickvu/clever-buffer | 0 | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
describe 'CleverBuffer', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'EXPECTED RETURN!'
cleverBufferWriter.writeString 'RETURN OF $2.00!'
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string of specified length', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLOWORLD', length:5
#Only writes hello
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
cleverBufferWriter.getOffset().should.eql 5
it 'should write string of specified length at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLOWORLD',
length:5
offset: 5
#Writes hello starting at offset 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
cleverBufferWriter.getOffset().should.eql 0
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
| 215525 | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
describe 'CleverBuffer', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'EXPECTED RETURN!'
cleverBufferWriter.writeString 'RETURN OF $2.00!'
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string of specified length', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLOWORLD', length:5
#Only writes hello
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
cleverBufferWriter.getOffset().should.eql 5
it 'should write string of specified length at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString '<NAME>',
length:5
offset: 5
#Writes hello starting at offset 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
cleverBufferWriter.getOffset().should.eql 0
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
| true | should = require 'should'
CleverBufferWriter = require "#{SRC}/clever-buffer-writer"
{ writeToStupidBuffer,
writeToCleverBuffer } = require './support/test-helper'
describe 'CleverBuffer', ->
NUMBER_OF_ITERATIONS = 16
it 'should write Uint8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt8 value
buf.should.eql cleverBuffer
it 'should write int8', ->
numberOfBytesPerWord = 1
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt8 value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt8 value
buf.should.eql cleverBuffer
it 'should write Uint16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Little Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt16 value
buf.should.eql cleverBuffer
it 'should write int16 Big Endian', ->
numberOfBytesPerWord = 2
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt16BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt16 value
buf.should.eql cleverBuffer
it 'should write Uint32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Little Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32LE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, false, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write Uint32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeUInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeUInt32 value
buf.should.eql cleverBuffer
it 'should write int32 Big Endian', ->
numberOfBytesPerWord = 4
buf = writeToStupidBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, (buf, value, offset) ->
buf.writeInt32BE value, offset, true
cleverBuffer = writeToCleverBuffer NUMBER_OF_ITERATIONS, numberOfBytesPerWord, true, (cleverBufferWriter, value) ->
cleverBufferWriter.writeInt32 value
buf.should.eql cleverBuffer
it 'should write bytes', ->
buf = new Buffer 11
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeBytes [0x20, 0x6d, 0x65, 0x20, 0x57, 0x6f, 0x72, 0x72, 0x79, 0x21]
cleverBufferWriter.writeBytes [0x20]
cleverBufferWriter.writeBytes [0x57, 0x68, 0x61, 0x74], {offset: 2}
cleverBufferWriter.getBuffer().should.eql new Buffer [0x20, 0x6d, 0x57, 0x68, 0x61, 0x74, 0x72, 0x72, 0x79, 0x21, 0x20]
it 'should skip bytes', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skip 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x00, 0x20]
it 'should skip to set offset', ->
buf = new Buffer 4
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 0x10
cleverBufferWriter.skipTo 2
cleverBufferWriter.writeUInt8 0x20
cleverBufferWriter.getBuffer().should.eql new Buffer [0x10, 0x00, 0x20, 0x00]
it 'should write string', ->
buf = new Buffer 32
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'EXPECTED RETURN!'
cleverBufferWriter.writeString 'RETURN OF $2.00!'
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x45,0x58,0x50,0x45,0x43,0x54,0x45,0x44,0x20,0x52,0x45,0x54,0x55,0x52,0x4e,0x21,
0x52,0x45,0x54,0x55,0x52,0x4e,0x20,0x4f,0x46,0x20,0x24,0x32,0x2e,0x30,0x30,0x21
]
it 'should write string of specified length', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'HELLOWORLD', length:5
#Only writes hello
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x48, 0x45, 0x4C, 0x4C, 0x4F, 0x00, 0x00, 0x00, 0x00, 0x00
]
cleverBufferWriter.getOffset().should.eql 5
it 'should write string of specified length at a specified offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeString 'PI:NAME:<NAME>END_PI',
length:5
offset: 5
#Writes hello starting at offset 5
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x45, 0x4C, 0x4C, 0x4F
]
cleverBufferWriter.getOffset().should.eql 0
it 'should be able to writeUInt8 at a specific offset', ->
buf = new Buffer 5
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt8 1
cleverBufferWriter.writeUInt8 2
cleverBufferWriter.writeUInt8 3
cleverBufferWriter.writeUInt8 4
cleverBufferWriter.writeUInt8 5
cleverBufferWriter.writeUInt8 6, 1
#Writes 6 at position 1
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x06, 0x03, 0x04, 0x05
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 5
it 'should be able to writeUInt16 at a specific offset', ->
buf = new Buffer 10
buf.fill 0
cleverBufferWriter = new CleverBufferWriter buf
cleverBufferWriter.writeUInt16 1
cleverBufferWriter.writeUInt16 2
cleverBufferWriter.writeUInt16 3
cleverBufferWriter.writeUInt16 4
cleverBufferWriter.writeUInt16 5
cleverBufferWriter.writeUInt16 6, 2
#Writes 6 at position 2
cleverBufferWriter.getBuffer().should.eql new Buffer [
0x01, 0x00, 0x06, 0x00, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00
]
#Does not increment the offset
cleverBufferWriter.getOffset().should.eql 10
it 'should write Uint64 little endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 big endian MAX', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x46, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
]
it 'should write Uint64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('4294967366')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x46
]
it 'should write int64 little endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write int64 big endian', ->
buf = new Buffer 8
buf.fill 0
cleverBuffer = new CleverBufferWriter buf, {bigEndian:true}
cleverBuffer.writeUInt64('-1')
cleverBuffer.getBuffer().should.eql new Buffer [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
it 'should write Uint64 at specified offset, currentOffset should not increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.writeUInt64('18446744073709551615', 2)
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
]
cleverBuffer.getOffset().should.eql 0
it 'should write Uint64 at current offset, currentOffset should increment', ->
buf = new Buffer 10
buf.fill 0
cleverBuffer = new CleverBufferWriter buf
cleverBuffer.skip 1
cleverBuffer.writeUInt64('18446744073709551615')
cleverBuffer.getBuffer().should.eql new Buffer [
0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00
]
cleverBuffer.getOffset().should.eql 9
|
[
{
"context": "ion(req, res) {\",\n \"// res.send({ name: 'example' });\",\n \"// });\",\n \"\",\n \"if ",
"end": 3899,
"score": 0.5249502658843994,
"start": 3892,
"tag": "NAME",
"value": "example"
}
] | test/scaffolder.coffee | sundarsy/osprey-cli | 8 | Scaffolder = require '../src/scaffolder'
should = require 'should'
logger = require '../src/utils/logger'
ramlParser = require 'raml-parser'
path = require 'path'
describe 'TOOLKIT SCAFFOLDER', ->
describe 'SCAFFOLDER READ RAML RESOURCES', ->
describe 'SCAFFOLDER GENERATION', ->
it 'Should correctly generate app.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"express = require 'express'",
"path = require 'path'",
"osprey = require 'osprey'",
"",
"app = module.exports = express()",
"",
"app.use express.bodyParser()",
"app.use express.methodOverride()",
"app.use express.compress()",
"app.use express.logger('dev')",
"",
"app.set 'port', process.env.PORT || 3000",
"",
"api = osprey.create '/hello', app,",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' # logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"",
"# Adding business logic to a valid RAML Resource",
"# api.get '/examples/:exampleId', (req, res) ->",
"# res.send({ name: 'example' })",
"",
"unless module.parent",
" port = app.get('port')",
" app.listen port",
' console.log "listening on port #{port}"'
].join('\n').trim()
done()
it 'Should correctly generate app.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"var express = require('express');",
"var path = require('path');",
"var osprey = require('osprey');",
"",
"var app = module.exports = express();",
"",
"app.use(express.bodyParser());",
"app.use(express.methodOverride());",
"app.use(express.compress());",
"app.use(express.logger('dev'));",
"",
"app.set('port', process.env.PORT || 3000);",
"",
"api = osprey.create('/hello', app, {",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' // logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"});",
"",
"// Adding business logic to a valid RAML Resource",
"// api.get('/examples/:exampleId', function(req, res) {",
"// res.send({ name: 'example' });",
"// });",
"",
"if (!module.parent) {",
" var port = app.get('port');",
" app.listen(port);",
" console.log('listening on port ' + port);",
"}"
].join('\n').trim()
done()
it 'Should correctly generate package file', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createPackage options
# Assert
fileWriter.target.should.eql 'target/package.json'
fileWriter.content.should.eql [
'{',
' "name": "demo",',
' "version": "0.0.1",',
' "private": true,',
' "dependencies": {',
' "express": "3.4.4",',
' "osprey": "0.1.1"',
' },',
' "devDependencies": {',
' "grunt": "~0.4.2",',
' "grunt-contrib-watch": "~0.5.3",',
' "grunt-contrib-copy": "~0.4.1",',
' "grunt-contrib-clean": "~0.5.0",',
' "grunt-mocha-test": "~0.8.1",',
' "mocha": "1.15.1",',
' "should": "2.1.1",',
' "grunt-express-server": "~0.4.13",',
' "load-grunt-tasks": "~0.2.1",',
' "supertest": "~0.8.2",',
' "grunt-contrib-jshint": "~0.8.0"',
' }',
'}',
''
].join('\n')
done()
it 'Should correctly generate Gruntfile.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'coffee'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.coffee'
content.should.eql [
"path = require 'path'",
"",
"module.exports = (grunt) ->",
" require('load-grunt-tasks') grunt",
"",
" grunt.initConfig(",
" pkg: grunt.file.readJSON('package.json')",
"",
" coffee:",
" compile:",
" expand: true",
" flatten: false",
" cwd: 'src'",
" src: ['**/*.coffee']",
" dest: './dist'",
" ext: '.js'",
"",
" coffeelint:",
" app: ['src/**/*.coffee']",
" options:",
" max_line_length:",
" level: 'ignore'",
"",
" express:",
" options:",
" cmd: 'coffee'",
" port: process.env.PORT || 3000",
" script: 'src/app.coffee'",
" development:",
" options:",
" node_env: 'development'",
" test:",
" options:",
" node_env: 'test'",
" port: 3001",
" ",
" watch:",
" express:",
" files: ['src/**/*.coffee', 'src/assets/raml/**/*.*']",
" tasks: ['coffeelint', 'express:development']",
" options:",
" spawn: false",
" atBegin: true",
" )",
"",
" grunt.registerTask 'default', ['watch']"
].join('\n')
done()
it 'Should correctly generate Gruntfile.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'js'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.js'
content.should.eql [
"var path = require('path');",
"",
"module.exports = function(grunt) {",
" grunt.initConfig({",
" pkg: grunt.file.readJSON('package.json'),",
" jshint: {",
" all: ['src/**/*.js']",
" },",
"",
" express: {",
" options: {",
" port: process.env.PORT || 3000,",
" script: 'src/app.js'",
" },",
" development: {",
" options: {",
" node_env: 'development'",
" }",
" },",
" test: {",
" options: {",
" node_env: 'test',",
" port: 3001",
" }",
" }",
" },",
"",
" watch: {",
" express: {",
" files: ['src/**/*.js', 'src/assets/raml/**/*.*'],",
" tasks: ['jshint', 'express:development'],",
" options: {",
" spawn: false,",
" atBegin: true",
" }",
" }",
" }",
" });",
"",
" require('load-grunt-tasks')(grunt);",
"",
" grunt.registerTask('default', ['watch']);",
"};",
""
].join('\n')
done()
it 'Should correctly generate default raml file having the correct name inside', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
scaffolder.copyRaml options
# Assert
fileWriter.target.should.eql 'target/src/assets/raml/api.raml'
fileWriter.content.should.eql "#%RAML 0.8\n---\ntitle: \"demo\""
done()
| 171289 | Scaffolder = require '../src/scaffolder'
should = require 'should'
logger = require '../src/utils/logger'
ramlParser = require 'raml-parser'
path = require 'path'
describe 'TOOLKIT SCAFFOLDER', ->
describe 'SCAFFOLDER READ RAML RESOURCES', ->
describe 'SCAFFOLDER GENERATION', ->
it 'Should correctly generate app.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"express = require 'express'",
"path = require 'path'",
"osprey = require 'osprey'",
"",
"app = module.exports = express()",
"",
"app.use express.bodyParser()",
"app.use express.methodOverride()",
"app.use express.compress()",
"app.use express.logger('dev')",
"",
"app.set 'port', process.env.PORT || 3000",
"",
"api = osprey.create '/hello', app,",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' # logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"",
"# Adding business logic to a valid RAML Resource",
"# api.get '/examples/:exampleId', (req, res) ->",
"# res.send({ name: 'example' })",
"",
"unless module.parent",
" port = app.get('port')",
" app.listen port",
' console.log "listening on port #{port}"'
].join('\n').trim()
done()
it 'Should correctly generate app.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"var express = require('express');",
"var path = require('path');",
"var osprey = require('osprey');",
"",
"var app = module.exports = express();",
"",
"app.use(express.bodyParser());",
"app.use(express.methodOverride());",
"app.use(express.compress());",
"app.use(express.logger('dev'));",
"",
"app.set('port', process.env.PORT || 3000);",
"",
"api = osprey.create('/hello', app, {",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' // logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"});",
"",
"// Adding business logic to a valid RAML Resource",
"// api.get('/examples/:exampleId', function(req, res) {",
"// res.send({ name: '<NAME>' });",
"// });",
"",
"if (!module.parent) {",
" var port = app.get('port');",
" app.listen(port);",
" console.log('listening on port ' + port);",
"}"
].join('\n').trim()
done()
it 'Should correctly generate package file', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createPackage options
# Assert
fileWriter.target.should.eql 'target/package.json'
fileWriter.content.should.eql [
'{',
' "name": "demo",',
' "version": "0.0.1",',
' "private": true,',
' "dependencies": {',
' "express": "3.4.4",',
' "osprey": "0.1.1"',
' },',
' "devDependencies": {',
' "grunt": "~0.4.2",',
' "grunt-contrib-watch": "~0.5.3",',
' "grunt-contrib-copy": "~0.4.1",',
' "grunt-contrib-clean": "~0.5.0",',
' "grunt-mocha-test": "~0.8.1",',
' "mocha": "1.15.1",',
' "should": "2.1.1",',
' "grunt-express-server": "~0.4.13",',
' "load-grunt-tasks": "~0.2.1",',
' "supertest": "~0.8.2",',
' "grunt-contrib-jshint": "~0.8.0"',
' }',
'}',
''
].join('\n')
done()
it 'Should correctly generate Gruntfile.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'coffee'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.coffee'
content.should.eql [
"path = require 'path'",
"",
"module.exports = (grunt) ->",
" require('load-grunt-tasks') grunt",
"",
" grunt.initConfig(",
" pkg: grunt.file.readJSON('package.json')",
"",
" coffee:",
" compile:",
" expand: true",
" flatten: false",
" cwd: 'src'",
" src: ['**/*.coffee']",
" dest: './dist'",
" ext: '.js'",
"",
" coffeelint:",
" app: ['src/**/*.coffee']",
" options:",
" max_line_length:",
" level: 'ignore'",
"",
" express:",
" options:",
" cmd: 'coffee'",
" port: process.env.PORT || 3000",
" script: 'src/app.coffee'",
" development:",
" options:",
" node_env: 'development'",
" test:",
" options:",
" node_env: 'test'",
" port: 3001",
" ",
" watch:",
" express:",
" files: ['src/**/*.coffee', 'src/assets/raml/**/*.*']",
" tasks: ['coffeelint', 'express:development']",
" options:",
" spawn: false",
" atBegin: true",
" )",
"",
" grunt.registerTask 'default', ['watch']"
].join('\n')
done()
it 'Should correctly generate Gruntfile.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'js'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.js'
content.should.eql [
"var path = require('path');",
"",
"module.exports = function(grunt) {",
" grunt.initConfig({",
" pkg: grunt.file.readJSON('package.json'),",
" jshint: {",
" all: ['src/**/*.js']",
" },",
"",
" express: {",
" options: {",
" port: process.env.PORT || 3000,",
" script: 'src/app.js'",
" },",
" development: {",
" options: {",
" node_env: 'development'",
" }",
" },",
" test: {",
" options: {",
" node_env: 'test',",
" port: 3001",
" }",
" }",
" },",
"",
" watch: {",
" express: {",
" files: ['src/**/*.js', 'src/assets/raml/**/*.*'],",
" tasks: ['jshint', 'express:development'],",
" options: {",
" spawn: false,",
" atBegin: true",
" }",
" }",
" }",
" });",
"",
" require('load-grunt-tasks')(grunt);",
"",
" grunt.registerTask('default', ['watch']);",
"};",
""
].join('\n')
done()
it 'Should correctly generate default raml file having the correct name inside', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
scaffolder.copyRaml options
# Assert
fileWriter.target.should.eql 'target/src/assets/raml/api.raml'
fileWriter.content.should.eql "#%RAML 0.8\n---\ntitle: \"demo\""
done()
| true | Scaffolder = require '../src/scaffolder'
should = require 'should'
logger = require '../src/utils/logger'
ramlParser = require 'raml-parser'
path = require 'path'
describe 'TOOLKIT SCAFFOLDER', ->
describe 'SCAFFOLDER READ RAML RESOURCES', ->
describe 'SCAFFOLDER GENERATION', ->
it 'Should correctly generate app.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"express = require 'express'",
"path = require 'path'",
"osprey = require 'osprey'",
"",
"app = module.exports = express()",
"",
"app.use express.bodyParser()",
"app.use express.methodOverride()",
"app.use express.compress()",
"app.use express.logger('dev')",
"",
"app.set 'port', process.env.PORT || 3000",
"",
"api = osprey.create '/hello', app,",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' # logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"",
"# Adding business logic to a valid RAML Resource",
"# api.get '/examples/:exampleId', (req, res) ->",
"# res.send({ name: 'example' })",
"",
"unless module.parent",
" port = app.get('port')",
" app.listen port",
' console.log "listening on port #{port}"'
].join('\n').trim()
done()
it 'Should correctly generate app.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createApp options, "coffee"
# Assert
fileWriter.target.should.eql 'target/src/app.coffee'
fileWriter.content.trim().should.eql [
"var express = require('express');",
"var path = require('path');",
"var osprey = require('osprey');",
"",
"var app = module.exports = express();",
"",
"app.use(express.bodyParser());",
"app.use(express.methodOverride());",
"app.use(express.compress());",
"app.use(express.logger('dev'));",
"",
"app.set('port', process.env.PORT || 3000);",
"",
"api = osprey.create('/hello', app, {",
" ramlFile: path.join(__dirname, '/assets/raml/" + options.raml + "'),",
" logLevel: 'debug' // logLevel: off->No logs | info->Show Osprey modules initializations | debug->Show all",
"});",
"",
"// Adding business logic to a valid RAML Resource",
"// api.get('/examples/:exampleId', function(req, res) {",
"// res.send({ name: 'PI:NAME:<NAME>END_PI' });",
"// });",
"",
"if (!module.parent) {",
" var port = app.get('port');",
" app.listen(port);",
" console.log('listening on port ' + port);",
"}"
].join('\n').trim()
done()
it 'Should correctly generate package file', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createPackage options
# Assert
fileWriter.target.should.eql 'target/package.json'
fileWriter.content.should.eql [
'{',
' "name": "demo",',
' "version": "0.0.1",',
' "private": true,',
' "dependencies": {',
' "express": "3.4.4",',
' "osprey": "0.1.1"',
' },',
' "devDependencies": {',
' "grunt": "~0.4.2",',
' "grunt-contrib-watch": "~0.5.3",',
' "grunt-contrib-copy": "~0.4.1",',
' "grunt-contrib-clean": "~0.5.0",',
' "grunt-mocha-test": "~0.8.1",',
' "mocha": "1.15.1",',
' "should": "2.1.1",',
' "grunt-express-server": "~0.4.13",',
' "load-grunt-tasks": "~0.2.1",',
' "supertest": "~0.8.2",',
' "grunt-contrib-jshint": "~0.8.0"',
' }',
'}',
''
].join('\n')
done()
it 'Should correctly generate Gruntfile.coffee', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'coffeescript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'coffee'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.coffee'
content.should.eql [
"path = require 'path'",
"",
"module.exports = (grunt) ->",
" require('load-grunt-tasks') grunt",
"",
" grunt.initConfig(",
" pkg: grunt.file.readJSON('package.json')",
"",
" coffee:",
" compile:",
" expand: true",
" flatten: false",
" cwd: 'src'",
" src: ['**/*.coffee']",
" dest: './dist'",
" ext: '.js'",
"",
" coffeelint:",
" app: ['src/**/*.coffee']",
" options:",
" max_line_length:",
" level: 'ignore'",
"",
" express:",
" options:",
" cmd: 'coffee'",
" port: process.env.PORT || 3000",
" script: 'src/app.coffee'",
" development:",
" options:",
" node_env: 'development'",
" test:",
" options:",
" node_env: 'test'",
" port: 3001",
" ",
" watch:",
" express:",
" files: ['src/**/*.coffee', 'src/assets/raml/**/*.*']",
" tasks: ['coffeelint', 'express:development']",
" options:",
" spawn: false",
" atBegin: true",
" )",
"",
" grunt.registerTask 'default', ['watch']"
].join('\n')
done()
it 'Should correctly generate Gruntfile.js', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) ->
copy: (location,target)=>
@target = target
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
options.raml = 'hello.raml'
scaffolder.createGruntfile options, 'js'
templatePath = path.join __dirname, '../src/templates', options.language, 'Gruntfile.swig'
params =
appName: options.name
content = scaffolder.render templatePath, params
# Assert
fileWriter.target.should.eql 'target/Gruntfile.js'
content.should.eql [
"var path = require('path');",
"",
"module.exports = function(grunt) {",
" grunt.initConfig({",
" pkg: grunt.file.readJSON('package.json'),",
" jshint: {",
" all: ['src/**/*.js']",
" },",
"",
" express: {",
" options: {",
" port: process.env.PORT || 3000,",
" script: 'src/app.js'",
" },",
" development: {",
" options: {",
" node_env: 'development'",
" }",
" },",
" test: {",
" options: {",
" node_env: 'test',",
" port: 3001",
" }",
" }",
" },",
"",
" watch: {",
" express: {",
" files: ['src/**/*.js', 'src/assets/raml/**/*.*'],",
" tasks: ['jshint', 'express:development'],",
" options: {",
" spawn: false,",
" atBegin: true",
" }",
" }",
" }",
" });",
"",
" require('load-grunt-tasks')(grunt);",
"",
" grunt.registerTask('default', ['watch']);",
"};",
""
].join('\n')
done()
it 'Should correctly generate default raml file having the correct name inside', (done) ->
# Arrange
fileWriter = new (class FileWriter
writeFile: (target, content) =>
@target = target
@content = content
copy: (src, target)->
lstatSync: (src)->
return new (class directory
isDirectory:()->
return true
)
copyRecursive: ()->
)
scaffolder = new Scaffolder logger, fileWriter
# Act
options = new Object()
options.baseUri = 'hello'
options.language = 'javascript'
options.target = './target'
options.name = 'demo'
scaffolder.copyRaml options
# Assert
fileWriter.target.should.eql 'target/src/assets/raml/api.raml'
fileWriter.content.should.eql "#%RAML 0.8\n---\ntitle: \"demo\""
done()
|
[
{
"context": "uildEndpoint(resource, data)\n username : 'x'\n password : @apiToken\n dataType ",
"end": 2062,
"score": 0.9058769345283508,
"start": 2061,
"tag": "USERNAME",
"value": "x"
},
{
"context": " data)\n username : 'x'\n password : @api... | app/assets/javascripts/utility_box/connection_manager.js.coffee | digitalopera/utility_box | 1 | #= require utility_box/utility_box_namespace
#= require utility_box/forms
# @class connectionManager
# A simple global interface for ajax calls and handlebars templates
#
# @method get
# resource [Handlebar Template]
# data [Object]
#
# @method post
# resource [Handlebar Template]
# data [Object]
#
# @method put
# resource [Handlebar Template]
# data [Object]
#
# @method destroy
# resource [Handlebar Template]
# data [Object]
#
# Example:
#
# file team_url.hbs
# /api/v1/team/{{id}}
#
# file script.js
# var conn = window.utilityBox.connectionManager()
# conn.get('team_url', {id: 1})
# conn.success(function(data){
# console.log(data)
# });
class window.utilityBox.connectionManager
constructor : ->
# gather tokens for each ajax call
@apiToken = $('meta[name="api-token"]').attr('content')
@csrfToken = $('meta[name="csrf-token"]').attr('content')
# add progress event to ajax calls
originalXhr = $.ajaxSettings.xhr
$.ajaxSetup({
progress: $.noop
xhr: ->
xhr = originalXhr()
if xhr instanceof window.XMLHttpRequest
xhr.addEventListener('progress', this.progress, false)
if xhr.upload
xhr.upload.addEventListener('progress', this.progress, false)
xhr.setRequestHeader 'X-CSRF-Token', @csrfToken
xhr
})
get : (resource, data={}) ->
@_send @_buildParams('GET', resource, data)
post : (resource, data={}) ->
@_send @_buildParams('POST', resource, data)
put : (resource, data={}) ->
@_send @_buildParams('PUT', resource, data)
destroy : (resource, data={}) ->
@_send @_buildParams('DELETE', resource, data)
_send : (params) ->
return $.ajax(params)
_buildParams : (method = 'POST', resource, data={}) ->
if method == 'GET'
formData = $.param(data)
else
formData = new FormData()
for key, val of window.utilityBox.parameterizeObject(data)
formData.append key, val
params = {
type : method
url : @buildEndpoint(resource, data)
username : 'x'
password : @apiToken
dataType : 'json'
data : formData
cache : false,
contentType : false,
processData : false
}
buildEndpoint : (resource='base_uri', data) ->
JST["endpoints/#{resource}"](data) | 5862 | #= require utility_box/utility_box_namespace
#= require utility_box/forms
# @class connectionManager
# A simple global interface for ajax calls and handlebars templates
#
# @method get
# resource [Handlebar Template]
# data [Object]
#
# @method post
# resource [Handlebar Template]
# data [Object]
#
# @method put
# resource [Handlebar Template]
# data [Object]
#
# @method destroy
# resource [Handlebar Template]
# data [Object]
#
# Example:
#
# file team_url.hbs
# /api/v1/team/{{id}}
#
# file script.js
# var conn = window.utilityBox.connectionManager()
# conn.get('team_url', {id: 1})
# conn.success(function(data){
# console.log(data)
# });
class window.utilityBox.connectionManager
constructor : ->
# gather tokens for each ajax call
@apiToken = $('meta[name="api-token"]').attr('content')
@csrfToken = $('meta[name="csrf-token"]').attr('content')
# add progress event to ajax calls
originalXhr = $.ajaxSettings.xhr
$.ajaxSetup({
progress: $.noop
xhr: ->
xhr = originalXhr()
if xhr instanceof window.XMLHttpRequest
xhr.addEventListener('progress', this.progress, false)
if xhr.upload
xhr.upload.addEventListener('progress', this.progress, false)
xhr.setRequestHeader 'X-CSRF-Token', @csrfToken
xhr
})
get : (resource, data={}) ->
@_send @_buildParams('GET', resource, data)
post : (resource, data={}) ->
@_send @_buildParams('POST', resource, data)
put : (resource, data={}) ->
@_send @_buildParams('PUT', resource, data)
destroy : (resource, data={}) ->
@_send @_buildParams('DELETE', resource, data)
_send : (params) ->
return $.ajax(params)
_buildParams : (method = 'POST', resource, data={}) ->
if method == 'GET'
formData = $.param(data)
else
formData = new FormData()
for key, val of window.utilityBox.parameterizeObject(data)
formData.append key, val
params = {
type : method
url : @buildEndpoint(resource, data)
username : 'x'
password : <PASSWORD>
dataType : 'json'
data : formData
cache : false,
contentType : false,
processData : false
}
buildEndpoint : (resource='base_uri', data) ->
JST["endpoints/#{resource}"](data) | true | #= require utility_box/utility_box_namespace
#= require utility_box/forms
# @class connectionManager
# A simple global interface for ajax calls and handlebars templates
#
# @method get
# resource [Handlebar Template]
# data [Object]
#
# @method post
# resource [Handlebar Template]
# data [Object]
#
# @method put
# resource [Handlebar Template]
# data [Object]
#
# @method destroy
# resource [Handlebar Template]
# data [Object]
#
# Example:
#
# file team_url.hbs
# /api/v1/team/{{id}}
#
# file script.js
# var conn = window.utilityBox.connectionManager()
# conn.get('team_url', {id: 1})
# conn.success(function(data){
# console.log(data)
# });
class window.utilityBox.connectionManager
constructor : ->
# gather tokens for each ajax call
@apiToken = $('meta[name="api-token"]').attr('content')
@csrfToken = $('meta[name="csrf-token"]').attr('content')
# add progress event to ajax calls
originalXhr = $.ajaxSettings.xhr
$.ajaxSetup({
progress: $.noop
xhr: ->
xhr = originalXhr()
if xhr instanceof window.XMLHttpRequest
xhr.addEventListener('progress', this.progress, false)
if xhr.upload
xhr.upload.addEventListener('progress', this.progress, false)
xhr.setRequestHeader 'X-CSRF-Token', @csrfToken
xhr
})
get : (resource, data={}) ->
@_send @_buildParams('GET', resource, data)
post : (resource, data={}) ->
@_send @_buildParams('POST', resource, data)
put : (resource, data={}) ->
@_send @_buildParams('PUT', resource, data)
destroy : (resource, data={}) ->
@_send @_buildParams('DELETE', resource, data)
_send : (params) ->
return $.ajax(params)
_buildParams : (method = 'POST', resource, data={}) ->
if method == 'GET'
formData = $.param(data)
else
formData = new FormData()
for key, val of window.utilityBox.parameterizeObject(data)
formData.append key, val
params = {
type : method
url : @buildEndpoint(resource, data)
username : 'x'
password : PI:PASSWORD:<PASSWORD>END_PI
dataType : 'json'
data : formData
cache : false,
contentType : false,
processData : false
}
buildEndpoint : (resource='base_uri', data) ->
JST["endpoints/#{resource}"](data) |
[
{
"context": "Algorithm API for JavaScript\n# https://github.com/kzokm/ga.js\n#\n# Copyright (c) 2014 OKAMURA, Kazuhide\n#\n",
"end": 69,
"score": 0.9996655583381653,
"start": 64,
"tag": "USERNAME",
"value": "kzokm"
},
{
"context": "ps://github.com/kzokm/ga.js\n#\n# Copyright (c) 201... | lib/selector.coffee | kzokm/ga.js | 4 | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 OKAMURA, Kazuhide
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{randomInt} = require './utils'
class Selector
constructor: (@next)->
@roulette: (population)->
S = population.fitness.sum()
new Selector ->
r = Math.random() * S
s = 0
population.sample (I)->
(s += I.fitness) > r
@tournament: (population, size = @tournament.defaultSize)->
N = population.size()
selector = new Selector ->
group = for [1..size]
population.get randomInt N
(group.sort population.comparator)[0]
Object.defineProperty selector, 'size', value: size
Object.defineProperty @tournament, 'defaultSize', value: 4
module.exports = Selector
| 75568 | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 <NAME>, <NAME>
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{randomInt} = require './utils'
class Selector
constructor: (@next)->
@roulette: (population)->
S = population.fitness.sum()
new Selector ->
r = Math.random() * S
s = 0
population.sample (I)->
(s += I.fitness) > r
@tournament: (population, size = @tournament.defaultSize)->
N = population.size()
selector = new Selector ->
group = for [1..size]
population.get randomInt N
(group.sort population.comparator)[0]
Object.defineProperty selector, 'size', value: size
Object.defineProperty @tournament, 'defaultSize', value: 4
module.exports = Selector
| true | ###
# Genetic Algorithm API for JavaScript
# https://github.com/kzokm/ga.js
#
# Copyright (c) 2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
###
{randomInt} = require './utils'
class Selector
constructor: (@next)->
@roulette: (population)->
S = population.fitness.sum()
new Selector ->
r = Math.random() * S
s = 0
population.sample (I)->
(s += I.fitness) > r
@tournament: (population, size = @tournament.defaultSize)->
N = population.size()
selector = new Selector ->
group = for [1..size]
population.get randomInt N
(group.sort population.comparator)[0]
Object.defineProperty selector, 'size', value: size
Object.defineProperty @tournament, 'defaultSize', value: 4
module.exports = Selector
|
[
{
"context": " courseModel(\n cno: i + ''\n cname: \"Thinkins in #{i}\"\n credit: 6\n teacher: 'Stev",
"end": 213,
"score": 0.8197946548461914,
"start": 205,
"tag": "NAME",
"value": "Thinkins"
},
{
"context": "kins in #{i}\"\n credit: 6\n tea... | test/courseTest.coffee | ycjcl868/StudentInfo | 0 | db = require('../config/database');
mongoose = require 'mongoose'
courseModel = mongoose.model('Course')
for i in [200002..200010]
courseEntity = new courseModel(
cno: i + ''
cname: "Thinkins in #{i}"
credit: 6
teacher: 'Steve Jobs'
)
courseEntity.save (err)->
if err?
console.log(err)
| 135932 | db = require('../config/database');
mongoose = require 'mongoose'
courseModel = mongoose.model('Course')
for i in [200002..200010]
courseEntity = new courseModel(
cno: i + ''
cname: "<NAME> in #{i}"
credit: 6
teacher: '<NAME>'
)
courseEntity.save (err)->
if err?
console.log(err)
| true | db = require('../config/database');
mongoose = require 'mongoose'
courseModel = mongoose.model('Course')
for i in [200002..200010]
courseEntity = new courseModel(
cno: i + ''
cname: "PI:NAME:<NAME>END_PI in #{i}"
credit: 6
teacher: 'PI:NAME:<NAME>END_PI'
)
courseEntity.save (err)->
if err?
console.log(err)
|
[
{
"context": "# Copyright (c) 2012 Mattes Groeger\n#\n# Permission is hereby granted, free of charge,",
"end": 35,
"score": 0.9998874664306641,
"start": 21,
"tag": "NAME",
"value": "Mattes Groeger"
}
] | src/link_grabber.coffee | MattesGroeger/chrome-open-links | 14 | # Copyright (c) 2012 Mattes Groeger
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
BLACKLIST = [/^mailto\:/i, /^javascript/i]
class window.LinkGrabber
constructor: (@html, @selection) ->
@links = {}
@fromSelection: (selection) ->
appendChild = (container, selection, rangeIndex) ->
container.appendChild selection.getRangeAt(rangeIndex).cloneContents()
container = document.createElement "div"
appendChild(container, selection, i) for i in [0...selection.rangeCount]
new LinkGrabber(container, selection.toString())
@fromHTMLString: (htmlString) ->
container = document.createElement "div"
container.innerHTML = htmlString
new LinkGrabber(container, container.innerText)
allLinks: ->
this.gatherHTMLLinks()
this.gatherPlainLinks()
this.createLinkList()
gatherHTMLLinks: ->
aTags = @html.getElementsByTagName "a"
for tag in aTags when tag.href and not this.onBlackList(tag.href)
@links[tag.href] = true
onBlackList: (link) ->
for pattern in BLACKLIST
if link.match(pattern)
return true
return false
gatherPlainLinks: ->
regex = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig
matches = @selection.match(regex)
if matches
for match in matches
@links[match] = true
createLinkList: ->
url for url of @links | 170504 | # Copyright (c) 2012 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
BLACKLIST = [/^mailto\:/i, /^javascript/i]
class window.LinkGrabber
constructor: (@html, @selection) ->
@links = {}
@fromSelection: (selection) ->
appendChild = (container, selection, rangeIndex) ->
container.appendChild selection.getRangeAt(rangeIndex).cloneContents()
container = document.createElement "div"
appendChild(container, selection, i) for i in [0...selection.rangeCount]
new LinkGrabber(container, selection.toString())
@fromHTMLString: (htmlString) ->
container = document.createElement "div"
container.innerHTML = htmlString
new LinkGrabber(container, container.innerText)
allLinks: ->
this.gatherHTMLLinks()
this.gatherPlainLinks()
this.createLinkList()
gatherHTMLLinks: ->
aTags = @html.getElementsByTagName "a"
for tag in aTags when tag.href and not this.onBlackList(tag.href)
@links[tag.href] = true
onBlackList: (link) ->
for pattern in BLACKLIST
if link.match(pattern)
return true
return false
gatherPlainLinks: ->
regex = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig
matches = @selection.match(regex)
if matches
for match in matches
@links[match] = true
createLinkList: ->
url for url of @links | true | # Copyright (c) 2012 PI:NAME:<NAME>END_PI
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
BLACKLIST = [/^mailto\:/i, /^javascript/i]
class window.LinkGrabber
constructor: (@html, @selection) ->
@links = {}
@fromSelection: (selection) ->
appendChild = (container, selection, rangeIndex) ->
container.appendChild selection.getRangeAt(rangeIndex).cloneContents()
container = document.createElement "div"
appendChild(container, selection, i) for i in [0...selection.rangeCount]
new LinkGrabber(container, selection.toString())
@fromHTMLString: (htmlString) ->
container = document.createElement "div"
container.innerHTML = htmlString
new LinkGrabber(container, container.innerText)
allLinks: ->
this.gatherHTMLLinks()
this.gatherPlainLinks()
this.createLinkList()
gatherHTMLLinks: ->
aTags = @html.getElementsByTagName "a"
for tag in aTags when tag.href and not this.onBlackList(tag.href)
@links[tag.href] = true
onBlackList: (link) ->
for pattern in BLACKLIST
if link.match(pattern)
return true
return false
gatherPlainLinks: ->
regex = /(\b(https?|ftp|file):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/ig
matches = @selection.match(regex)
if matches
for match in matches
@links[match] = true
createLinkList: ->
url for url of @links |
[
{
"context": "equest\n headers:\n Authorization: 'chuckDefinesHisOwnTokens'\n uri: \"#{app.get('url')}v1/DailyReportPin",
"end": 519,
"score": 0.6726542115211487,
"start": 495,
"tag": "KEY",
"value": "chuckDefinesHisOwnTokens"
},
{
"context": " json: tru... | api/test/models/daily-report-ping.test.coffee | MollardMichael/scrumble | 27 | expect = require('chai').expect
request = require 'request'
fixtures = require '../utils/fixtures'
app = require '../../src/server'
describe 'api/DailyReportPing endpoint', ->
server = null
before (done) ->
server = app.listen(done)
after (done) ->
server.close(done)
beforeEach ->
fixtures.loadAll(app)
afterEach ->
fixtures.deleteAll(app)
describe 'POST /', ->
it 'should set time', (done) ->
request
headers:
Authorization: 'chuckDefinesHisOwnTokens'
uri: "#{app.get('url')}v1/DailyReportPings"
method: 'POST'
json: true
body:
name: 'test'
, (err, response, body) ->
expect(body.name).to.eql 'test'
expect(Date.parse(body.createdAt)).not.to.be.NaN
done()
| 98038 | expect = require('chai').expect
request = require 'request'
fixtures = require '../utils/fixtures'
app = require '../../src/server'
describe 'api/DailyReportPing endpoint', ->
server = null
before (done) ->
server = app.listen(done)
after (done) ->
server.close(done)
beforeEach ->
fixtures.loadAll(app)
afterEach ->
fixtures.deleteAll(app)
describe 'POST /', ->
it 'should set time', (done) ->
request
headers:
Authorization: '<KEY>'
uri: "#{app.get('url')}v1/DailyReportPings"
method: 'POST'
json: true
body:
name: '<NAME>'
, (err, response, body) ->
expect(body.name).to.eql '<NAME>'
expect(Date.parse(body.createdAt)).not.to.be.NaN
done()
| true | expect = require('chai').expect
request = require 'request'
fixtures = require '../utils/fixtures'
app = require '../../src/server'
describe 'api/DailyReportPing endpoint', ->
server = null
before (done) ->
server = app.listen(done)
after (done) ->
server.close(done)
beforeEach ->
fixtures.loadAll(app)
afterEach ->
fixtures.deleteAll(app)
describe 'POST /', ->
it 'should set time', (done) ->
request
headers:
Authorization: 'PI:KEY:<KEY>END_PI'
uri: "#{app.get('url')}v1/DailyReportPings"
method: 'POST'
json: true
body:
name: 'PI:NAME:<NAME>END_PI'
, (err, response, body) ->
expect(body.name).to.eql 'PI:NAME:<NAME>END_PI'
expect(Date.parse(body.createdAt)).not.to.be.NaN
done()
|
[
{
"context": " key: '1.1', stop: true, ->\n @call key: '1.1.1', ({tools: {find}}) ->\n find ({config}",
"end": 1576,
"score": 0.7880939841270447,
"start": 1571,
"tag": "KEY",
"value": "1.1.1"
},
{
"context": "l key: null, stop: true, ->\n @call key: '1.1.... | packages/core/test/plugins/tools/find.coffee | shivaylamba/meilisearch-gatsby-plugin-guide | 31 |
{tags} = require '../../test'
nikita = require '../../../src'
describe 'plugins.tools.find', ->
return unless tags.api
describe 'action', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ->
@tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
it 'traverse the parent hierarchy', ->
nikita.call a_key: 'a value', ->
@call ->
count = 0
@tools.find (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
it 'traverse from parent', ->
nikita.call a_key: 'a value', ->
@call -> @call -> @call -> @call (action) ->
count = 0
@tools.find action.parent.parent, (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
describe 'function', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ({tools}) ->
tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
describe 'usage', ->
it 'return the first value found', ->
nikita
.call key: '1', stop: true, ->
@call key: '1.1', stop: true, ->
@call key: '1.1.1', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith '1.1'
it 'null is interpreted as a value', ->
nikita
.call key: '1', stop: true, ->
@call key: null, stop: true, ->
@call key: '1.1.1', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith null
| 71546 |
{tags} = require '../../test'
nikita = require '../../../src'
describe 'plugins.tools.find', ->
return unless tags.api
describe 'action', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ->
@tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
it 'traverse the parent hierarchy', ->
nikita.call a_key: 'a value', ->
@call ->
count = 0
@tools.find (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
it 'traverse from parent', ->
nikita.call a_key: 'a value', ->
@call -> @call -> @call -> @call (action) ->
count = 0
@tools.find action.parent.parent, (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
describe 'function', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ({tools}) ->
tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
describe 'usage', ->
it 'return the first value found', ->
nikita
.call key: '1', stop: true, ->
@call key: '1.1', stop: true, ->
@call key: '<KEY>', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith '1.1'
it 'null is interpreted as a value', ->
nikita
.call key: '1', stop: true, ->
@call key: null, stop: true, ->
@call key: '<KEY>', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith null
| true |
{tags} = require '../../test'
nikita = require '../../../src'
describe 'plugins.tools.find', ->
return unless tags.api
describe 'action', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ->
@tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
it 'traverse the parent hierarchy', ->
nikita.call a_key: 'a value', ->
@call ->
count = 0
@tools.find (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
it 'traverse from parent', ->
nikita.call a_key: 'a value', ->
@call -> @call -> @call -> @call (action) ->
count = 0
@tools.find action.parent.parent, (action) ->
count++
return unless action.config.a_key
a_key: action.config.a_key, depth: action.metadata.depth
.should.be.resolvedWith a_key: 'a value', depth: 1
.then -> count.should.eql 2
describe 'function', ->
it 'start in current action', ->
nikita.call a_key: 'a value', ({tools}) ->
tools.find (action) ->
action.config.a_key
.should.be.resolvedWith 'a value'
describe 'usage', ->
it 'return the first value found', ->
nikita
.call key: '1', stop: true, ->
@call key: '1.1', stop: true, ->
@call key: 'PI:KEY:<KEY>END_PI', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith '1.1'
it 'null is interpreted as a value', ->
nikita
.call key: '1', stop: true, ->
@call key: null, stop: true, ->
@call key: 'PI:KEY:<KEY>END_PI', ({tools: {find}}) ->
find ({config}) ->
config.key if config.stop
.should.be.resolvedWith null
|
[
{
"context": "#################\n# Copyright (C) 2014-2017 by Vaughn Iverson\n# meteor-job-class is free software released ",
"end": 124,
"score": 0.9997482895851135,
"start": 110,
"tag": "NAME",
"value": "Vaughn Iverson"
},
{
"context": " on a saved job', () ->\n ... | test/index.coffee | vsivsi/meteor-job | 37 | ############################################################################
# Copyright (C) 2014-2017 by Vaughn Iverson
# meteor-job-class is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Unit tests
assert = require('chai').assert
rewire = require 'rewire'
sinon = require 'sinon'
Fiber = require 'fibers'
Job = rewire '../src/job_class.coffee'
# Mock DDP class
class DDP
call: (name, params, cb = null) ->
unless cb? and typeof cb is 'function'
switch name
when 'root_true'
return true
when 'root_false'
return false
when 'root_param'
return params[0]
when 'root_error'
throw new Error "Method failed"
else
throw new Error "Bad method in call"
else
switch name
when 'root_true'
process.nextTick () -> cb null, true
when 'root_false'
process.nextTick () -> cb null, false
when 'root_param'
process.nextTick () -> cb null, params[0]
when 'root_error'
process.nextTick () -> cb new Error "Method failed"
else
process.nextTick () -> cb new Error "Bad method in call"
return
connect: () ->
process.nextTick () -> cb(null)
close: () ->
process.nextTick () -> cb(null)
subscribe: () ->
process.nextTick () -> cb(null)
observe: () ->
process.nextTick () -> cb(null)
makeDdpStub = (action) ->
return (name, params, cb) ->
[err, res] = action name, params
# console.dir res
if cb?
return process.nextTick () -> cb err, res
else if err
throw err
return res
###########################################
describe 'Job', () ->
it 'has class constants', () ->
assert.isNumber Job.forever
assert.isObject Job.jobPriorities
assert.lengthOf Object.keys(Job.jobPriorities), 5
assert.isArray Job.jobRetryBackoffMethods
assert.lengthOf Job.jobRetryBackoffMethods, 2
assert.isArray Job.jobStatuses
assert.lengthOf Job.jobStatuses, 7
assert.isArray Job.jobLogLevels
assert.lengthOf Job.jobLogLevels, 4
assert.isArray Job.jobStatusCancellable
assert.lengthOf Job.jobStatusCancellable, 4
assert.isArray Job.jobStatusPausable
assert.lengthOf Job.jobStatusPausable, 2
assert.isArray Job.jobStatusRemovable
assert.lengthOf Job.jobStatusRemovable, 3
assert.isArray Job.jobStatusRestartable
assert.lengthOf Job.jobStatusRestartable, 2
assert.isArray Job.ddpPermissionLevels
assert.lengthOf Job.ddpPermissionLevels , 4
assert.isArray Job.ddpMethods
assert.lengthOf Job.ddpMethods, 18
assert.isObject Job.ddpMethodPermissions
assert.lengthOf Object.keys(Job.ddpMethodPermissions), Job.ddpMethods.length
it 'has a _ddp_apply class variable that defaults as undefined outside of Meteor', () ->
assert.isUndefined Job._ddp_apply
it 'has a processJobs method that is the JobQueue constructor', () ->
assert.equal Job.processJobs, Job.__get__ "JobQueue"
describe 'setDDP', () ->
ddp = new DDP()
describe 'default setup', () ->
it 'throws if given a non-ddp object', () ->
assert.throws (() -> Job.setDDP({})), /Bad ddp object/
it 'properly sets the default _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp
Job._ddp_apply 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'fails if subsequently called with a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp, 'test1'), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'setup with collection name', () ->
it 'properly sets the _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1'
Job._ddp_apply.test1 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly sets the _ddp_apply class variable when called with array', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, ['test2','test3']
Job._ddp_apply.test2 'test', [], () ->
Job._ddp_apply.test3 'test', [], () ->
assert.equal ddp.call.callCount, 2
ddp.call.restore()
done()
it 'fails if subsequently called without a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'Fiber support', () ->
ddp = new DDP()
it 'accepts a valid collection name and Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1', Fiber
fib = Fiber () ->
Job._ddp_apply.test1 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'accepts a default collection name and valid Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, Fiber
fib = Fiber () ->
Job._ddp_apply 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly returns values from method calls', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.isTrue Job._ddp_apply('root_true', [])
assert.isFalse Job._ddp_apply('root_false', [])
assert.deepEqual Job._ddp_apply('root_param', [['a', 1, null]]), ['a', 1, null]
done()
fib.run()
it 'properly propagates thrown errors within a Fiber', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.throws (() -> Job._ddp_apply 'root_error', []), /Method failed/
assert.throws (() -> Job._ddp_apply 'bad_method', []), /Bad method in call/
done()
fib.run()
afterEach () ->
Job._ddp_apply = undefined
describe 'private function', () ->
# Note! These are internal helper functions, NOT part of the external API!
describe 'methodCall', () ->
ddp = new DDP()
before () ->
sinon.spy(ddp, "call")
Job.setDDP ddp
methodCall = Job.__get__ 'methodCall'
it 'should be a function', () ->
assert.isFunction methodCall
it 'should invoke the correct ddp method', (done) ->
methodCall "root", "true", [], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
done()
it 'should pass the correct method parameters', (done) ->
methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
done()
it 'should invoke the after callback when provided', (done) ->
after = sinon.stub().returns(true)
methodCall("root", "false", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
done()
after
)
it "shouldn't invoke the after callback when error", (done) ->
after = sinon.stub().returns(true)
methodCall("root", "error", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
assert.throws (() -> throw err), /Method failed/
done()
after
)
it 'should invoke the correct ddp method without callback', () ->
res = methodCall "root", "true", []
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
it 'should pass the correct method parameters without callback', () ->
res = methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}]
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
it 'should invoke the after callback when provided without callback', () ->
after = sinon.stub().returns(true)
res = methodCall "root", "false", [], undefined, after
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
it "should throw on error when invoked without callback", () ->
after = sinon.stub().returns(true)
res = undefined
assert.throws (() -> res = methodCall("root", "error", [], undefined, after)), /Method failed/
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
afterEach () ->
ddp.call.reset()
after () ->
Job._ddp_apply = undefined
describe 'optionsHelp', () ->
optionsHelp = Job.__get__ 'optionsHelp'
foo = { bar: "bat" }
gizmo = () ->
it 'should return options and a callback when both are provided', () ->
res = optionsHelp [foo], gizmo
assert.deepEqual res, [foo, gizmo]
it 'should handle a missing callback and return only options', () ->
res = optionsHelp [foo]
assert.deepEqual res, [foo, undefined]
it 'should handle missing options and return empty options and the callback', () ->
res = optionsHelp [], gizmo
assert.deepEqual res, [{}, gizmo]
it 'should handle when both options and callback are missing', () ->
res = optionsHelp([], undefined)
assert.deepEqual res, [{}, undefined]
it 'should throw an error when an invalid callback is provided', () ->
assert.throws (()-> optionsHelp([foo], 5)), /options not an object or bad callback/
it 'should throw an error when a non-array is passed for options', () ->
assert.throws (()-> optionsHelp(foo, gizmo)), /must be an Array with zero or one elements/
it 'should throw an error when a bad options array is passed', () ->
assert.throws (()-> optionsHelp([foo, 5], gizmo)), /must be an Array with zero or one elements/
describe 'splitLongArray', () ->
splitLongArray = Job.__get__ 'splitLongArray'
longArray = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ]
it 'should properly split an array', () ->
res = splitLongArray longArray, 4
assert.deepEqual res, [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11] ]
it 'should handle remainders correctly', () ->
res = splitLongArray longArray, 5
assert.deepEqual res, [ [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [10, 11] ]
it 'should handle an empty array', () ->
res = splitLongArray [], 5
assert.deepEqual res, []
it 'should handle a single element array', () ->
res = splitLongArray [0], 5
assert.deepEqual res, [ [0] ]
it 'should throw if not given an array', () ->
assert.throws (() -> splitLongArray { foo: "bar"}, 5), /splitLongArray: bad params/
it 'should throw if given an out of range max value', () ->
assert.throws (() -> splitLongArray longArray, 0), /splitLongArray: bad params/
it 'should throw if given an invalid max value', () ->
assert.throws (() -> splitLongArray longArray, "cow"), /splitLongArray: bad params/
describe 'concatReduce', () ->
concatReduce = Job.__get__ 'concatReduce'
it 'should concat a to b', () ->
assert.deepEqual concatReduce([1],2), [1,2]
it 'should work with non array for the first param', () ->
assert.deepEqual concatReduce(1,2), [1,2]
describe 'reduceCallbacks', () ->
reduceCallbacks = Job.__get__ 'reduceCallbacks'
it 'should return undefined if given a falsy callback', () ->
assert.isUndefined reduceCallbacks(undefined, 5)
it 'should properly absorb the specified number of callbacks', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, true
cb null, false
cb null, true
assert spy.calledOnce
assert spy.calledWith null, true
it 'should properly reduce the callback results', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, false
cb null, false
cb null, false
assert spy.calledOnce
assert spy.calledWith null, false
it 'should properly reduce with a custom reduce function', () ->
concatReduce = Job.__get__ 'concatReduce'
spy = sinon.spy()
cb = reduceCallbacks spy, 3, concatReduce, []
cb null, false
cb null, true
cb null, false
assert spy.calledOnce, 'callback called too many times'
assert spy.calledWith(null, [false, true, false]), 'Returned wrong result'
it 'should throw if called too many times', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 2
cb null, true
cb null, true
assert.throws cb, /reduceCallbacks callback invoked more than requested/
it 'should throw if given a non-function callback', () ->
assert.throws (() -> reduceCallbacks 5), /Bad params given to reduceCallbacks/
it 'should throw if given an invalid number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 'cow'), /Bad params given to reduceCallbacks/
it 'should throw if given an out of range number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 0), /Bad params given to reduceCallbacks/
it 'should throw if given a non-function reduce', () ->
assert.throws (() -> reduceCallbacks (() -> ), 5, 5), /Bad params given to reduceCallbacks/
describe '_setImmediate', () ->
_setImmediate = Job.__get__ '_setImmediate'
it 'should invoke the provided callback with args', (done) ->
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
done()
_setImmediate cb, "foo", "bar"
describe '_setInterval', () ->
_setInterval = Job.__get__ '_setInterval'
_clearInterval = Job.__get__ '_clearInterval'
it 'should invoke the provided callback repeatedly with args', (done) ->
cancel = null
count = 0
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
count++
if count is 2
_clearInterval cancel
done()
else if count > 2
throw "Interval called too many times"
cancel = _setInterval cb, 10, "foo", "bar"
describe 'Job constructor', () ->
checkJob = (job) ->
assert.instanceOf job, Job
assert.equal job.root, 'root'
assert.equal job.type, 'work'
assert.deepEqual job.data, { foo: "bar" }
assert.isObject job._doc
doc = job._doc
assert.notProperty doc, '_id'
assert.isNull doc.runId
assert.equal job.type, doc.type
assert.deepEqual job.data, doc.data
assert.isString doc.status
assert.instanceOf doc.updated, Date
assert.isArray doc.depends
assert.isArray doc.resolved
assert.isNumber doc.priority
assert.isNumber doc.retries
assert.isNumber doc.retryWait
assert.isNumber doc.retried
assert.isString doc.retryBackoff
assert.instanceOf doc.retryUntil, Date
assert.isNumber doc.repeats
assert.isNumber doc.repeatWait
assert.isNumber doc.repeated
assert.instanceOf doc.repeatUntil, Date
assert.instanceOf doc.after, Date
assert.isArray doc.log
assert.isObject doc.progress
assert.instanceOf doc.created, Date
it 'should return a new valid Job object', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
it 'should work without "new"', () ->
job = Job('root', 'work', { foo: "bar" })
checkJob job
it 'should throw when given bad parameters', () ->
assert.throw Job, /new Job: bad parameter/
it 'should support using a valid job document', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
job2 = new Job('root', job.doc)
checkJob job2
it 'should support using a valid oobject for root', () ->
job = new Job({ root: 'root'}, 'work', { foo: "bar" })
checkJob job
job2 = new Job({ root: 'root'}, job.doc)
checkJob job2
describe 'job mutator method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.depends()', () ->
it 'should properly update the depends property', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
jobB = Job('root', 'work', {})
jobB._doc._id = 'bar'
j = job.depends [ jobA, jobB ]
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo', 'bar' ]
it 'should accept a singlet Job', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should accept an empty deps array and return the job unchanged', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
j = job.depends []
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should clear dependencies when passed a falsy value', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
job.depends null
assert.lengthOf doc.depends, 0
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.depends "badness"), /Bad input parameter/
it 'should throw when given an array containing non Jobs', () ->
assert.throw (() -> job.depends ["Badness"]), /Each provided object/
it 'should throw when given an array containing unsaved Jobs without an _id', () ->
jobA = Job('root', 'work', {})
assert.throw (() -> job.depends [ jobA ]), /Each provided object/
describe '.priority()', () ->
it 'should accept a numeric priority', () ->
j = job.priority 3
assert.equal j, job
assert.equal doc.priority, 3
it 'should accept a valid string priority', () ->
j = job.priority 'normal'
assert.equal j, job
assert.equal doc.priority, Job.jobPriorities['normal']
it 'should throw when given an invalid priority level', () ->
assert.throw (() -> job.priority 'super'), /Invalid string priority level provided/
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.priority []), /priority must be an integer or valid priority level/
it 'should throw when given a non-integer', () ->
assert.throw (() -> job.priority 3.14), /priority must be an integer or valid priority level/
describe '.retry()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.retry 3
assert.equal j, job
assert.equal doc.retries, 3 + 1 # This is correct, it adds one.
assert.equal doc.retryWait, 5*60*1000
assert.equal doc.retryBackoff, 'constant'
it 'should accept an option object', () ->
j = job.retry { retries: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000, backoff: 'exponential' }
assert.equal j, job
assert.equal doc.retries, 3 + 1
assert.ok doc.retryUntil > new Date()
assert.equal doc.retryWait, 5000
assert.equal doc.retryBackoff, 'exponential'
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.retry 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.retry -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.retry 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.retry { retries: 'badness' }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: -1 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: 3.14 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { backoff: 'bogus' }), /bad option: invalid retry backoff method/
assert.throw (() -> job.retry { until: 'bogus' }), /bad option: until must be a Date object/
describe '.repeat()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.repeat 3
assert.equal j, job
assert.equal doc.repeats, 3
it 'should accept an option object', () ->
j = job.repeat { repeats: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000 }
assert.equal j, job
assert.equal doc.repeats, 3
assert.ok(doc.repeatUntil > new Date())
assert.equal doc.repeatWait, 5000
it 'should accept an option object with later.js object', () ->
j = job.repeat { schedule: { schedules: [{h:[10]}], exceptions: [], other: () -> 0 }}
assert.equal j, job
assert.deepEqual doc.repeatWait, { schedules: [{h:[10]}], exceptions: [] }
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.repeat 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.repeat -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.repeat 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.repeat { repeats: 'badness' }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: -1 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: 3.14 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { until: 'bogus' }), /bad option: until must be a Date object/
assert.throw (() -> job.repeat { wait: 5, schedule: {}}), /bad options: wait and schedule options are mutually exclusive/
assert.throw (() -> job.repeat { schedule: 'bogus' }), /bad option, schedule option must be an object/
assert.throw (() -> job.repeat { schedule: {}}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: 5 }}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: [], exceptions: 5 }}), /bad option, schedule object exceptions attribute must be an Array/
describe '.after()', () ->
it 'should accept a valid Date', () ->
d = new Date()
j = job.after d
assert.equal j, job
assert.equal doc.after, d
it 'should accept an undefined value', () ->
j = job.after()
assert.equal j, job
assert.instanceOf doc.after, Date
assert doc.after <= new Date()
it 'should throw if given a bad parameter', () ->
assert.throw (() -> job.after { foo: "bar" }), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after 123), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after false), /Bad parameter, after requires a valid Date object/
describe '.delay()', () ->
it 'should accept a valid delay', () ->
j = job.delay 5000
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf() + 5000, 1000
it 'should accept an undefined parameter', () ->
j = job.delay()
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf(), 1000
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.delay -1.234), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay new Date()), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay false), /Bad parameter, delay requires a non-negative integer/
describe 'communicating', () ->
ddp = null
before () ->
ddp = new DDP()
Job.setDDP ddp
describe 'job status method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.save()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobSave'
doc = params[0]
options = params[1]
if options.cancelRepeats
throw new Error 'cancelRepeats'
if typeof doc is 'object'
res = "newId"
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
res = job.save()
assert.equal res, "newId"
it 'should work with a callback', (done) ->
job.save (err, res) ->
assert.equal res, "newId"
done()
it 'should properly pass cancelRepeats option', () ->
assert.throw (() -> job.save({ cancelRepeats: true })), /cancelRepeats/
it 'should properly pass cancelRepeats option with callback', () ->
assert.throw (() -> job.save({ cancelRepeats: true }, () -> )), /cancelRepeats/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.refresh()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
id = params[0]
options = params[1]
if options.getLog
throw new Error 'getLog'
if id is 'thisId'
res = { foo: 'bar' }
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
doc._id = 'thisId'
res = job.refresh()
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
it 'should work with a callback', (done) ->
doc._id = 'thisId'
job.refresh (err, res) ->
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
done()
it "shouldn't modify job when not found on server", () ->
doc._id = 'thatId'
res = job.refresh()
assert.isFalse res
assert.deepEqual job._doc, doc
it 'should properly pass getLog option', () ->
doc._id = 'thisId'
assert.throw (() -> job.refresh({ getLog: true })), /getLog/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.refresh()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.log()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobLog'
id = params[0]
runId = params[1]
msg = params[2]
level = params[3]?.level ? 'gerinfo'
if id is 'thisId' and runId is 'thatId' and msg is 'Hello' and level in Job.jobLogLevels
res = level
else
res = false
return [null, res]
it 'should add a valid log entry to the local state when invoked before a job is saved', () ->
j = job.log 'Hello', { level: 'success' }
assert.equal j, job
thisLog = doc.log[1] # [0] is the 'Created' log message
assert.equal thisLog.message, 'Hello'
assert.equal thisLog.level, 'success'
assert.instanceOf thisLog.time, Date
assert.closeTo thisLog.time.valueOf(), new Date().valueOf(), 1000
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello'
assert.equal res, 'info'
it 'should correctly pass level option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello', { level: 'danger' }
assert.equal res, 'danger'
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.log 'Hello', { level: 'success' }, (err, res) ->
assert.equal res, 'success'
done()
it 'should throw when passed an invalid message', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 43, { level: 'danger' }), /Log message must be a string/
it 'should throw when passed an invalid level', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 'Hello', { level: 'blargh' }), /Log level options must be one of Job.jobLogLevels/
assert.throw (() -> job.log 'Hello', { level: [] }), /Log level options must be one of Job.jobLogLevels/
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
it 'should echo the log to the console at the level requested', () ->
assert.doesNotThrow (() -> job.log 'Hello'), 'echo occurred without being requested'
assert.doesNotThrow (() -> job.log 'Hello', { echo: false }), 'echo occurred when explicitly disabled'
assert.throw (() -> job.log 'Hello', { echo: true }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'info' }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'success' }), /success/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'danger' }), /danger/
it "shouldn't echo the log to the console below the level requested", () ->
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'info' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'success' })
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'danger' }), /danger/
after () ->
Job.__set__ 'console', jobConsole
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.progress()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobProgress'
id = params[0]
runId = params[1]
completed = params[2]
total = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof completed is 'number' and
typeof total is 'number' and
0 <= completed <= total and
total > 0 )
res = 100 * completed / total
else
res = false
return [null, res]
it 'should add a valid progress update to the local state when invoked before a job is saved', () ->
j = job.progress 2.5, 10
assert.equal j, job
assert.deepEqual doc.progress, { completed: 2.5, total: 10, percent: 25 }
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.progress 5, 10
assert.equal res, 50
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.progress 7.5, 10, (err, res) ->
assert.equal res, 75
done()
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
it 'should progress updates to the console when requested', () ->
assert.doesNotThrow (() -> job.progress 0, 100)
assert.doesNotThrow (() -> job.progress 0, 100, { echo: false })
assert.throw (() -> job.progress 0, 100, { echo: true }), /info/
after () ->
Job.__set__ 'console', jobConsole
it 'should throw when given invalid paramters', () ->
assert.throw (() -> job.progress true, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, "hundred"), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -1, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 2, 1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, 0), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, -1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -2, -1), /job.progress: something is wrong with progress params/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.done()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobDone'
id = params[0]
runId = params[1]
result = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof result is 'object')
res = result
else if options.resultId
res = result.resultId
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.done()
assert.deepEqual res, {}
it 'should properly handle a result object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result =
foo: 'bar'
status: 0
res = job.done result
assert.deepEqual res, result
it 'should properly handle a non-object result', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result = "Done!"
res = job.done result
assert.deepEqual res, { value: result }
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.done (err, res) ->
assert.deepEqual res, {}
done()
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should properly pass the repeatId option', () ->
doc._id = 'someId'
doc.runId = 'otherId'
job.done { repeatId: "testID" }, { repeatId: true }, (err, res) ->
assert.deepEqual res, "testID"
done()
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.fail()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobFail'
id = params[0]
runId = params[1]
err = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof err is 'object')
if options.fatal
throw new Error "Fatal Error!"
res = err
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.fail()
assert.deepEqual res, { value: "No error information provided" }
it 'should properly handle an error string', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = 'This is an error'
res = job.fail err
assert.deepEqual res, { value: err }
it 'should properly handle an error object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = { message: 'This is an error' }
res = job.fail err
assert.equal res, err
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.fail (err, res) ->
assert.equal res.value, "No error information provided"
done()
it 'should properly handle the fatal option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.fail "Fatal error!", { fatal: true }), /Fatal Error!/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.fail()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.fail()), /an unsaved or non-running job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'job control operation', () ->
makeJobControl = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
id = params[0]
if id is 'thisId'
res = true
else
res = false
return [null, res]
it 'should properly invoke the DDP method', () ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op]()
assert.isTrue res
it 'should return false if the id is not on the server', () ->
assert.isFunction job[op]
doc._id = 'badId'
res = job[op]()
assert.isFalse res
it 'should work with a callback', (done) ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op] (err, res) ->
assert.isTrue res
done()
if op in ['pause', 'resume']
it 'should alter local state when called on an unsaved job', () ->
bad = 'badStatus'
doc.status = bad
res = job[op]()
assert.equal res, job
assert.notEqual doc.status, bad
it 'should alter local state when called on an unsaved job with callback', (done) ->
bad = 'badStatus'
doc.status = bad
res = job[op] (err, res) ->
assert.isTrue res
assert.notEqual doc.status, bad
done()
else
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job[op]()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeJobControl 'pause', 'jobPause'
makeJobControl 'resume', 'jobResume'
makeJobControl 'ready', 'jobReady'
makeJobControl 'cancel', 'jobCancel'
makeJobControl 'restart', 'jobRestart'
makeJobControl 'rerun', 'jobRerun'
makeJobControl 'remove', 'jobRemove'
describe 'class method', () ->
describe 'getWork', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
res = switch type
when 'work'
( Job('root', type, { i: 1 })._doc for i in [1..max] )
when 'nowork'
[]
return [null, res]
it 'should make a DDP method call and return a Job by default without callback', () ->
res = Job.getWork 'root', 'work', {}
assert.instanceOf res, Job
it 'should return undefined when no work is available without callback', () ->
res = Job.getWork 'root', 'nowork', {}
assert.isUndefined res
it 'should return an array of Jobs when options.maxJobs > 1 without callback', () ->
res = Job.getWork 'root', 'work', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
it 'should return an empty array when options.maxJobs > 1 and there is no work without callback', () ->
res = Job.getWork 'root', 'nowork', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 0
it 'should throw when given on invalid value for the timeout option', () ->
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: "Bad" })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: 0 })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: -1 })), /must be a positive integer/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'makeJob', () ->
jobDoc = () ->
j = new Job('root', 'work', {})._doc
j._id = { _str: 'skljfdf9s0ujfsdfl3' }
return j
it 'should return a valid job instance when called with a valid job document', () ->
res = new Job 'root', jobDoc()
assert.instanceOf res, Job
it 'should throw when passed invalid params', () ->
assert.throw (() -> new Job()), /bad parameter/
assert.throw (() -> new Job(5, jobDoc())), /bad parameter/
assert.throw (() -> new Job('work', {})), /bad parameter/
describe 'get Job(s) by ID', () ->
getJobStub = (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
ids = params[0]
one = (id) ->
j = switch id
when 'goodID'
Job('root', 'work', { i: 1 })._doc
else
undefined
return j
if ids instanceof Array
res = (one(j) for j in ids when j is 'goodID')
else
res = one(ids)
return [null, res]
describe 'getJob', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return a valid job instance when called with a good id', () ->
res = Job.getJob 'root', 'goodID'
assert.instanceOf res, Job
it 'should return undefined when called with a bad id', () ->
res = Job.getJob 'root', 'badID'
assert.isUndefined res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'getJobs', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return valid job instances for good IDs only', () ->
res = Job.getJobs 'root', ['goodID', 'badID', 'goodID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
assert.instanceOf res[1], Job
it 'should return an empty array for all bad IDs', () ->
res = Job.getJobs 'root', ['badID', 'badID', 'badID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 0
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'multijob operation', () ->
makeMulti = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
ids = params[0]
return [null, ids.indexOf('goodID') isnt -1]
it 'should return true if there are any good IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['goodID', 'badID', 'goodID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isTrue res
it 'should return false if there are all bad IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['badID', 'badID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isFalse res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeMulti 'pauseJobs', 'jobPause'
makeMulti 'resumeJobs', 'jobResume'
makeMulti 'cancelJobs', 'jobCancel'
makeMulti 'restartJobs', 'jobRestart'
makeMulti 'removeJobs', 'jobRemove'
describe 'control method', () ->
makeControl = (op) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{op}"
return [null, true]
it 'should return a boolean', () ->
assert.isFunction Job[op]
res = Job[op]('root')
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeControl 'startJobs'
makeControl 'stopJobs'
makeControl 'startJobServer'
makeControl 'shutdownJobServer'
###########################################
describe 'JobQueue', () ->
ddp = new DDP()
failCalls = 0
doneCalls = 0
numJobs = 5
before () ->
Job._ddp_apply = undefined
Job.setDDP ddp
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
err = null
res = null
makeJobDoc = (idx=0) ->
job = new Job('root', 'work', { idx: idx })
doc = job._doc
doc._id = 'thisId' + idx
doc.runId = 'thatId' + idx
doc.status = 'running'
return doc
switch name
when 'root_jobDone'
doneCalls++
res = true
when 'root_jobFail'
failCalls++
res = true
when 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
if numJobs is 0
res = []
else
switch type
when 'noWork'
res = []
when 'work'
numJobs--
res = [ makeJobDoc() ]
when 'workMax'
if max < numJobs
max = numJobs
numJobs -= max
res = (makeJobDoc(i) for i in [1..max])
when 'returnError'
err = new Error "MongoError: connection n to w.x.y.z:27017 timed out"
else
throw new Error "Bad method name: #{name}"
return [err, res]
beforeEach () ->
failCalls = 0
doneCalls = 0
numJobs = 5
it 'should throw when an invalid options are used', (done) ->
assert.throws (() ->
Job.processJobs 42, 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs '', 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs 'root', 42, { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', '', { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', ['noWork',''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { pollInterval: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { callbackStrict: 1 }, (job, cb) -> ),
/must be a boolean/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { errorCallback: 1 }, (job, cb) -> ),
/must be a function/
done()
it 'should return a valid JobQueue when called', (done) ->
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should return a valid JobQueue when called with array of job types', (done) ->
q = Job.processJobs 'root', ['noWork', 'noWork2'], { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should send shutdown notice to console when quiet is false', (done) ->
jobConsole = Job.__get__ 'console'
revert = Job.__set__
console:
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
assert.throws (() -> (q.shutdown () -> done())), /warning/
revert()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should invoke worker when work is returned', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 100 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
it 'should invoke worker when work is returned from a manual trigger', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 0 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
assert.equal q.pollInterval, Job.forever
assert.isNull q._interval
setTimeout(
() -> q.trigger()
20
)
it 'should successfully start in paused state and resume', (done) ->
flag = false
q = Job.processJobs('root', 'work', { pollInterval: 10 }, (job, cb) ->
assert.isTrue flag
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
).pause()
setTimeout(
() ->
flag = true
q.resume()
20
)
it 'should successfully accept multiple jobs from getWork', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
assert.equal q.length(), count-1, 'q.length is incorrect'
assert.equal q.running(), 1, 'q.running is incorrect'
if count is 5
assert.isTrue q.full(), 'q.full should be true'
assert.isFalse q.idle(), 'q.idle should be false'
job.done()
count--
if count is 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5, 'doneCalls is incorrect'
assert.equal failCalls, 0, 'failCalls is incorrect'
done()
cb null
)
it 'should successfully accept and process multiple simultaneous jobs concurrently', (done) ->
count = 0
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 5 }, (job, cb) ->
count++
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count
count--
job.done()
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully accept and process multiple simultaneous jobs in one worker', (done) ->
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5 }, (jobs, cb) ->
assert.equal jobs.length, 5
assert.equal q.length(), 0
assert.equal q.running(), 1
j.done() for j in jobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb()
)
it 'should successfully accept and process multiple simultaneous jobs concurrently and within workers', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 5 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 25
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully perform a soft shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
count--
assert.equal q.length(), count
assert.equal q.running(), 1
assert.isTrue q.full()
job.done()
if count is 4
q.shutdown { quiet: true, level: 'soft' }, () ->
assert count is 0
assert.equal q.length(), 0
assert.isFalse Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
)
it 'should successfully perform a normal shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count--
job.done()
if count is 4
q.shutdown { quiet: true, level: 'normal' }, () ->
assert.equal count, 3
assert.equal q.length(), 0
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 2
assert.equal failCalls, 3
done()
cb null
25
)
)
it 'should successfully perform a normal shutdown with both payload and concurrency', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 2, prefetch: 15 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
if count is 5
q.shutdown { quiet: true }, () ->
assert.equal q.length(), 0, 'jobs remain in task list'
assert.equal count, 0, 'count is wrong value'
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 10
assert.equal failCalls, 15
done()
cb null
25
)
)
it 'should successfully perform a hard shutdown', (done) ->
count = 0
time = 20
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count++
if count is 1
job.done()
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal q.length(), 0
assert.equal count, 1
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 1, 'wrong number of .done() calls'
assert.equal failCalls, 4, 'wrong number of .fail() calls'
done()
cb null # Other workers will never call back
time
)
time += 20
)
it 'should throw when using callbackStrict option and multiple callback invokes happen', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done()
cb()
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
25
)
)
it 'should throw when using callbackStrict option and multiple callback invokes happen 2', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done () ->
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb()
25
)
)
it 'should invoke errorCallback when an error is returned from getWork', (done) ->
ecb = (err, res) ->
assert.instanceOf err, Error
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
q = Job.processJobs('root', 'returnError', { pollInterval: 100, concurrency: 1, prefetch: 0, errorCallback: ecb }, (job, cb) -> )
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
| 151449 | ############################################################################
# Copyright (C) 2014-2017 by <NAME>
# meteor-job-class is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Unit tests
assert = require('chai').assert
rewire = require 'rewire'
sinon = require 'sinon'
Fiber = require 'fibers'
Job = rewire '../src/job_class.coffee'
# Mock DDP class
class DDP
call: (name, params, cb = null) ->
unless cb? and typeof cb is 'function'
switch name
when 'root_true'
return true
when 'root_false'
return false
when 'root_param'
return params[0]
when 'root_error'
throw new Error "Method failed"
else
throw new Error "Bad method in call"
else
switch name
when 'root_true'
process.nextTick () -> cb null, true
when 'root_false'
process.nextTick () -> cb null, false
when 'root_param'
process.nextTick () -> cb null, params[0]
when 'root_error'
process.nextTick () -> cb new Error "Method failed"
else
process.nextTick () -> cb new Error "Bad method in call"
return
connect: () ->
process.nextTick () -> cb(null)
close: () ->
process.nextTick () -> cb(null)
subscribe: () ->
process.nextTick () -> cb(null)
observe: () ->
process.nextTick () -> cb(null)
makeDdpStub = (action) ->
return (name, params, cb) ->
[err, res] = action name, params
# console.dir res
if cb?
return process.nextTick () -> cb err, res
else if err
throw err
return res
###########################################
describe 'Job', () ->
it 'has class constants', () ->
assert.isNumber Job.forever
assert.isObject Job.jobPriorities
assert.lengthOf Object.keys(Job.jobPriorities), 5
assert.isArray Job.jobRetryBackoffMethods
assert.lengthOf Job.jobRetryBackoffMethods, 2
assert.isArray Job.jobStatuses
assert.lengthOf Job.jobStatuses, 7
assert.isArray Job.jobLogLevels
assert.lengthOf Job.jobLogLevels, 4
assert.isArray Job.jobStatusCancellable
assert.lengthOf Job.jobStatusCancellable, 4
assert.isArray Job.jobStatusPausable
assert.lengthOf Job.jobStatusPausable, 2
assert.isArray Job.jobStatusRemovable
assert.lengthOf Job.jobStatusRemovable, 3
assert.isArray Job.jobStatusRestartable
assert.lengthOf Job.jobStatusRestartable, 2
assert.isArray Job.ddpPermissionLevels
assert.lengthOf Job.ddpPermissionLevels , 4
assert.isArray Job.ddpMethods
assert.lengthOf Job.ddpMethods, 18
assert.isObject Job.ddpMethodPermissions
assert.lengthOf Object.keys(Job.ddpMethodPermissions), Job.ddpMethods.length
it 'has a _ddp_apply class variable that defaults as undefined outside of Meteor', () ->
assert.isUndefined Job._ddp_apply
it 'has a processJobs method that is the JobQueue constructor', () ->
assert.equal Job.processJobs, Job.__get__ "JobQueue"
describe 'setDDP', () ->
ddp = new DDP()
describe 'default setup', () ->
it 'throws if given a non-ddp object', () ->
assert.throws (() -> Job.setDDP({})), /Bad ddp object/
it 'properly sets the default _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp
Job._ddp_apply 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'fails if subsequently called with a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp, 'test1'), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'setup with collection name', () ->
it 'properly sets the _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1'
Job._ddp_apply.test1 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly sets the _ddp_apply class variable when called with array', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, ['test2','test3']
Job._ddp_apply.test2 'test', [], () ->
Job._ddp_apply.test3 'test', [], () ->
assert.equal ddp.call.callCount, 2
ddp.call.restore()
done()
it 'fails if subsequently called without a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'Fiber support', () ->
ddp = new DDP()
it 'accepts a valid collection name and Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1', Fiber
fib = Fiber () ->
Job._ddp_apply.test1 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'accepts a default collection name and valid Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, Fiber
fib = Fiber () ->
Job._ddp_apply 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly returns values from method calls', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.isTrue Job._ddp_apply('root_true', [])
assert.isFalse Job._ddp_apply('root_false', [])
assert.deepEqual Job._ddp_apply('root_param', [['a', 1, null]]), ['a', 1, null]
done()
fib.run()
it 'properly propagates thrown errors within a Fiber', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.throws (() -> Job._ddp_apply 'root_error', []), /Method failed/
assert.throws (() -> Job._ddp_apply 'bad_method', []), /Bad method in call/
done()
fib.run()
afterEach () ->
Job._ddp_apply = undefined
describe 'private function', () ->
# Note! These are internal helper functions, NOT part of the external API!
describe 'methodCall', () ->
ddp = new DDP()
before () ->
sinon.spy(ddp, "call")
Job.setDDP ddp
methodCall = Job.__get__ 'methodCall'
it 'should be a function', () ->
assert.isFunction methodCall
it 'should invoke the correct ddp method', (done) ->
methodCall "root", "true", [], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
done()
it 'should pass the correct method parameters', (done) ->
methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
done()
it 'should invoke the after callback when provided', (done) ->
after = sinon.stub().returns(true)
methodCall("root", "false", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
done()
after
)
it "shouldn't invoke the after callback when error", (done) ->
after = sinon.stub().returns(true)
methodCall("root", "error", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
assert.throws (() -> throw err), /Method failed/
done()
after
)
it 'should invoke the correct ddp method without callback', () ->
res = methodCall "root", "true", []
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
it 'should pass the correct method parameters without callback', () ->
res = methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}]
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
it 'should invoke the after callback when provided without callback', () ->
after = sinon.stub().returns(true)
res = methodCall "root", "false", [], undefined, after
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
it "should throw on error when invoked without callback", () ->
after = sinon.stub().returns(true)
res = undefined
assert.throws (() -> res = methodCall("root", "error", [], undefined, after)), /Method failed/
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
afterEach () ->
ddp.call.reset()
after () ->
Job._ddp_apply = undefined
describe 'optionsHelp', () ->
optionsHelp = Job.__get__ 'optionsHelp'
foo = { bar: "bat" }
gizmo = () ->
it 'should return options and a callback when both are provided', () ->
res = optionsHelp [foo], gizmo
assert.deepEqual res, [foo, gizmo]
it 'should handle a missing callback and return only options', () ->
res = optionsHelp [foo]
assert.deepEqual res, [foo, undefined]
it 'should handle missing options and return empty options and the callback', () ->
res = optionsHelp [], gizmo
assert.deepEqual res, [{}, gizmo]
it 'should handle when both options and callback are missing', () ->
res = optionsHelp([], undefined)
assert.deepEqual res, [{}, undefined]
it 'should throw an error when an invalid callback is provided', () ->
assert.throws (()-> optionsHelp([foo], 5)), /options not an object or bad callback/
it 'should throw an error when a non-array is passed for options', () ->
assert.throws (()-> optionsHelp(foo, gizmo)), /must be an Array with zero or one elements/
it 'should throw an error when a bad options array is passed', () ->
assert.throws (()-> optionsHelp([foo, 5], gizmo)), /must be an Array with zero or one elements/
describe 'splitLongArray', () ->
splitLongArray = Job.__get__ 'splitLongArray'
longArray = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ]
it 'should properly split an array', () ->
res = splitLongArray longArray, 4
assert.deepEqual res, [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11] ]
it 'should handle remainders correctly', () ->
res = splitLongArray longArray, 5
assert.deepEqual res, [ [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [10, 11] ]
it 'should handle an empty array', () ->
res = splitLongArray [], 5
assert.deepEqual res, []
it 'should handle a single element array', () ->
res = splitLongArray [0], 5
assert.deepEqual res, [ [0] ]
it 'should throw if not given an array', () ->
assert.throws (() -> splitLongArray { foo: "bar"}, 5), /splitLongArray: bad params/
it 'should throw if given an out of range max value', () ->
assert.throws (() -> splitLongArray longArray, 0), /splitLongArray: bad params/
it 'should throw if given an invalid max value', () ->
assert.throws (() -> splitLongArray longArray, "cow"), /splitLongArray: bad params/
describe 'concatReduce', () ->
concatReduce = Job.__get__ 'concatReduce'
it 'should concat a to b', () ->
assert.deepEqual concatReduce([1],2), [1,2]
it 'should work with non array for the first param', () ->
assert.deepEqual concatReduce(1,2), [1,2]
describe 'reduceCallbacks', () ->
reduceCallbacks = Job.__get__ 'reduceCallbacks'
it 'should return undefined if given a falsy callback', () ->
assert.isUndefined reduceCallbacks(undefined, 5)
it 'should properly absorb the specified number of callbacks', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, true
cb null, false
cb null, true
assert spy.calledOnce
assert spy.calledWith null, true
it 'should properly reduce the callback results', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, false
cb null, false
cb null, false
assert spy.calledOnce
assert spy.calledWith null, false
it 'should properly reduce with a custom reduce function', () ->
concatReduce = Job.__get__ 'concatReduce'
spy = sinon.spy()
cb = reduceCallbacks spy, 3, concatReduce, []
cb null, false
cb null, true
cb null, false
assert spy.calledOnce, 'callback called too many times'
assert spy.calledWith(null, [false, true, false]), 'Returned wrong result'
it 'should throw if called too many times', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 2
cb null, true
cb null, true
assert.throws cb, /reduceCallbacks callback invoked more than requested/
it 'should throw if given a non-function callback', () ->
assert.throws (() -> reduceCallbacks 5), /Bad params given to reduceCallbacks/
it 'should throw if given an invalid number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 'cow'), /Bad params given to reduceCallbacks/
it 'should throw if given an out of range number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 0), /Bad params given to reduceCallbacks/
it 'should throw if given a non-function reduce', () ->
assert.throws (() -> reduceCallbacks (() -> ), 5, 5), /Bad params given to reduceCallbacks/
describe '_setImmediate', () ->
_setImmediate = Job.__get__ '_setImmediate'
it 'should invoke the provided callback with args', (done) ->
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
done()
_setImmediate cb, "foo", "bar"
describe '_setInterval', () ->
_setInterval = Job.__get__ '_setInterval'
_clearInterval = Job.__get__ '_clearInterval'
it 'should invoke the provided callback repeatedly with args', (done) ->
cancel = null
count = 0
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
count++
if count is 2
_clearInterval cancel
done()
else if count > 2
throw "Interval called too many times"
cancel = _setInterval cb, 10, "foo", "bar"
describe 'Job constructor', () ->
checkJob = (job) ->
assert.instanceOf job, Job
assert.equal job.root, 'root'
assert.equal job.type, 'work'
assert.deepEqual job.data, { foo: "bar" }
assert.isObject job._doc
doc = job._doc
assert.notProperty doc, '_id'
assert.isNull doc.runId
assert.equal job.type, doc.type
assert.deepEqual job.data, doc.data
assert.isString doc.status
assert.instanceOf doc.updated, Date
assert.isArray doc.depends
assert.isArray doc.resolved
assert.isNumber doc.priority
assert.isNumber doc.retries
assert.isNumber doc.retryWait
assert.isNumber doc.retried
assert.isString doc.retryBackoff
assert.instanceOf doc.retryUntil, Date
assert.isNumber doc.repeats
assert.isNumber doc.repeatWait
assert.isNumber doc.repeated
assert.instanceOf doc.repeatUntil, Date
assert.instanceOf doc.after, Date
assert.isArray doc.log
assert.isObject doc.progress
assert.instanceOf doc.created, Date
it 'should return a new valid Job object', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
it 'should work without "new"', () ->
job = Job('root', 'work', { foo: "bar" })
checkJob job
it 'should throw when given bad parameters', () ->
assert.throw Job, /new Job: bad parameter/
it 'should support using a valid job document', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
job2 = new Job('root', job.doc)
checkJob job2
it 'should support using a valid oobject for root', () ->
job = new Job({ root: 'root'}, 'work', { foo: "bar" })
checkJob job
job2 = new Job({ root: 'root'}, job.doc)
checkJob job2
describe 'job mutator method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.depends()', () ->
it 'should properly update the depends property', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
jobB = Job('root', 'work', {})
jobB._doc._id = 'bar'
j = job.depends [ jobA, jobB ]
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo', 'bar' ]
it 'should accept a singlet Job', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should accept an empty deps array and return the job unchanged', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
j = job.depends []
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should clear dependencies when passed a falsy value', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
job.depends null
assert.lengthOf doc.depends, 0
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.depends "badness"), /Bad input parameter/
it 'should throw when given an array containing non Jobs', () ->
assert.throw (() -> job.depends ["Badness"]), /Each provided object/
it 'should throw when given an array containing unsaved Jobs without an _id', () ->
jobA = Job('root', 'work', {})
assert.throw (() -> job.depends [ jobA ]), /Each provided object/
describe '.priority()', () ->
it 'should accept a numeric priority', () ->
j = job.priority 3
assert.equal j, job
assert.equal doc.priority, 3
it 'should accept a valid string priority', () ->
j = job.priority 'normal'
assert.equal j, job
assert.equal doc.priority, Job.jobPriorities['normal']
it 'should throw when given an invalid priority level', () ->
assert.throw (() -> job.priority 'super'), /Invalid string priority level provided/
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.priority []), /priority must be an integer or valid priority level/
it 'should throw when given a non-integer', () ->
assert.throw (() -> job.priority 3.14), /priority must be an integer or valid priority level/
describe '.retry()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.retry 3
assert.equal j, job
assert.equal doc.retries, 3 + 1 # This is correct, it adds one.
assert.equal doc.retryWait, 5*60*1000
assert.equal doc.retryBackoff, 'constant'
it 'should accept an option object', () ->
j = job.retry { retries: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000, backoff: 'exponential' }
assert.equal j, job
assert.equal doc.retries, 3 + 1
assert.ok doc.retryUntil > new Date()
assert.equal doc.retryWait, 5000
assert.equal doc.retryBackoff, 'exponential'
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.retry 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.retry -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.retry 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.retry { retries: 'badness' }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: -1 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: 3.14 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { backoff: 'bogus' }), /bad option: invalid retry backoff method/
assert.throw (() -> job.retry { until: 'bogus' }), /bad option: until must be a Date object/
describe '.repeat()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.repeat 3
assert.equal j, job
assert.equal doc.repeats, 3
it 'should accept an option object', () ->
j = job.repeat { repeats: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000 }
assert.equal j, job
assert.equal doc.repeats, 3
assert.ok(doc.repeatUntil > new Date())
assert.equal doc.repeatWait, 5000
it 'should accept an option object with later.js object', () ->
j = job.repeat { schedule: { schedules: [{h:[10]}], exceptions: [], other: () -> 0 }}
assert.equal j, job
assert.deepEqual doc.repeatWait, { schedules: [{h:[10]}], exceptions: [] }
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.repeat 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.repeat -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.repeat 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.repeat { repeats: 'badness' }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: -1 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: 3.14 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { until: 'bogus' }), /bad option: until must be a Date object/
assert.throw (() -> job.repeat { wait: 5, schedule: {}}), /bad options: wait and schedule options are mutually exclusive/
assert.throw (() -> job.repeat { schedule: 'bogus' }), /bad option, schedule option must be an object/
assert.throw (() -> job.repeat { schedule: {}}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: 5 }}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: [], exceptions: 5 }}), /bad option, schedule object exceptions attribute must be an Array/
describe '.after()', () ->
it 'should accept a valid Date', () ->
d = new Date()
j = job.after d
assert.equal j, job
assert.equal doc.after, d
it 'should accept an undefined value', () ->
j = job.after()
assert.equal j, job
assert.instanceOf doc.after, Date
assert doc.after <= new Date()
it 'should throw if given a bad parameter', () ->
assert.throw (() -> job.after { foo: "bar" }), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after 123), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after false), /Bad parameter, after requires a valid Date object/
describe '.delay()', () ->
it 'should accept a valid delay', () ->
j = job.delay 5000
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf() + 5000, 1000
it 'should accept an undefined parameter', () ->
j = job.delay()
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf(), 1000
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.delay -1.234), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay new Date()), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay false), /Bad parameter, delay requires a non-negative integer/
describe 'communicating', () ->
ddp = null
before () ->
ddp = new DDP()
Job.setDDP ddp
describe 'job status method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.save()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobSave'
doc = params[0]
options = params[1]
if options.cancelRepeats
throw new Error 'cancelRepeats'
if typeof doc is 'object'
res = "newId"
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
res = job.save()
assert.equal res, "newId"
it 'should work with a callback', (done) ->
job.save (err, res) ->
assert.equal res, "newId"
done()
it 'should properly pass cancelRepeats option', () ->
assert.throw (() -> job.save({ cancelRepeats: true })), /cancelRepeats/
it 'should properly pass cancelRepeats option with callback', () ->
assert.throw (() -> job.save({ cancelRepeats: true }, () -> )), /cancelRepeats/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.refresh()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
id = params[0]
options = params[1]
if options.getLog
throw new Error 'getLog'
if id is 'thisId'
res = { foo: 'bar' }
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
doc._id = 'thisId'
res = job.refresh()
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
it 'should work with a callback', (done) ->
doc._id = 'thisId'
job.refresh (err, res) ->
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
done()
it "shouldn't modify job when not found on server", () ->
doc._id = 'thatId'
res = job.refresh()
assert.isFalse res
assert.deepEqual job._doc, doc
it 'should properly pass getLog option', () ->
doc._id = 'thisId'
assert.throw (() -> job.refresh({ getLog: true })), /getLog/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.refresh()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.log()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobLog'
id = params[0]
runId = params[1]
msg = params[2]
level = params[3]?.level ? 'gerinfo'
if id is 'thisId' and runId is 'thatId' and msg is 'Hello' and level in Job.jobLogLevels
res = level
else
res = false
return [null, res]
it 'should add a valid log entry to the local state when invoked before a job is saved', () ->
j = job.log 'Hello', { level: 'success' }
assert.equal j, job
thisLog = doc.log[1] # [0] is the 'Created' log message
assert.equal thisLog.message, 'Hello'
assert.equal thisLog.level, 'success'
assert.instanceOf thisLog.time, Date
assert.closeTo thisLog.time.valueOf(), new Date().valueOf(), 1000
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello'
assert.equal res, 'info'
it 'should correctly pass level option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello', { level: 'danger' }
assert.equal res, 'danger'
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.log 'Hello', { level: 'success' }, (err, res) ->
assert.equal res, 'success'
done()
it 'should throw when passed an invalid message', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 43, { level: 'danger' }), /Log message must be a string/
it 'should throw when passed an invalid level', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 'Hello', { level: 'blargh' }), /Log level options must be one of Job.jobLogLevels/
assert.throw (() -> job.log 'Hello', { level: [] }), /Log level options must be one of Job.jobLogLevels/
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
it 'should echo the log to the console at the level requested', () ->
assert.doesNotThrow (() -> job.log 'Hello'), 'echo occurred without being requested'
assert.doesNotThrow (() -> job.log 'Hello', { echo: false }), 'echo occurred when explicitly disabled'
assert.throw (() -> job.log 'Hello', { echo: true }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'info' }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'success' }), /success/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'danger' }), /danger/
it "shouldn't echo the log to the console below the level requested", () ->
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'info' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'success' })
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'danger' }), /danger/
after () ->
Job.__set__ 'console', jobConsole
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.progress()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobProgress'
id = params[0]
runId = params[1]
completed = params[2]
total = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof completed is 'number' and
typeof total is 'number' and
0 <= completed <= total and
total > 0 )
res = 100 * completed / total
else
res = false
return [null, res]
it 'should add a valid progress update to the local state when invoked before a job is saved', () ->
j = job.progress 2.5, 10
assert.equal j, job
assert.deepEqual doc.progress, { completed: 2.5, total: 10, percent: 25 }
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.progress 5, 10
assert.equal res, 50
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.progress 7.5, 10, (err, res) ->
assert.equal res, 75
done()
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
it 'should progress updates to the console when requested', () ->
assert.doesNotThrow (() -> job.progress 0, 100)
assert.doesNotThrow (() -> job.progress 0, 100, { echo: false })
assert.throw (() -> job.progress 0, 100, { echo: true }), /info/
after () ->
Job.__set__ 'console', jobConsole
it 'should throw when given invalid paramters', () ->
assert.throw (() -> job.progress true, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, "hundred"), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -1, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 2, 1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, 0), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, -1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -2, -1), /job.progress: something is wrong with progress params/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.done()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobDone'
id = params[0]
runId = params[1]
result = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof result is 'object')
res = result
else if options.resultId
res = result.resultId
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.done()
assert.deepEqual res, {}
it 'should properly handle a result object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result =
foo: 'bar'
status: 0
res = job.done result
assert.deepEqual res, result
it 'should properly handle a non-object result', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result = "Done!"
res = job.done result
assert.deepEqual res, { value: result }
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.done (err, res) ->
assert.deepEqual res, {}
done()
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should properly pass the repeatId option', () ->
doc._id = 'someId'
doc.runId = 'otherId'
job.done { repeatId: "testID" }, { repeatId: true }, (err, res) ->
assert.deepEqual res, "testID"
done()
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.fail()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobFail'
id = params[0]
runId = params[1]
err = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof err is 'object')
if options.fatal
throw new Error "Fatal Error!"
res = err
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.fail()
assert.deepEqual res, { value: "No error information provided" }
it 'should properly handle an error string', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = 'This is an error'
res = job.fail err
assert.deepEqual res, { value: err }
it 'should properly handle an error object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = { message: 'This is an error' }
res = job.fail err
assert.equal res, err
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.fail (err, res) ->
assert.equal res.value, "No error information provided"
done()
it 'should properly handle the fatal option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.fail "Fatal error!", { fatal: true }), /Fatal Error!/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.fail()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.fail()), /an unsaved or non-running job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'job control operation', () ->
makeJobControl = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
id = params[0]
if id is 'thisId'
res = true
else
res = false
return [null, res]
it 'should properly invoke the DDP method', () ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op]()
assert.isTrue res
it 'should return false if the id is not on the server', () ->
assert.isFunction job[op]
doc._id = 'badId'
res = job[op]()
assert.isFalse res
it 'should work with a callback', (done) ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op] (err, res) ->
assert.isTrue res
done()
if op in ['pause', 'resume']
it 'should alter local state when called on an unsaved job', () ->
bad = 'badStatus'
doc.status = bad
res = job[op]()
assert.equal res, job
assert.notEqual doc.status, bad
it 'should alter local state when called on an unsaved job with callback', (done) ->
bad = 'badStatus'
doc.status = bad
res = job[op] (err, res) ->
assert.isTrue res
assert.notEqual doc.status, bad
done()
else
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job[op]()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeJobControl 'pause', 'jobPause'
makeJobControl 'resume', 'jobResume'
makeJobControl 'ready', 'jobReady'
makeJobControl 'cancel', 'jobCancel'
makeJobControl 'restart', 'jobRestart'
makeJobControl 'rerun', 'jobRerun'
makeJobControl 'remove', 'jobRemove'
describe 'class method', () ->
describe 'getWork', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
res = switch type
when 'work'
( Job('root', type, { i: 1 })._doc for i in [1..max] )
when 'nowork'
[]
return [null, res]
it 'should make a DDP method call and return a Job by default without callback', () ->
res = Job.getWork 'root', 'work', {}
assert.instanceOf res, Job
it 'should return undefined when no work is available without callback', () ->
res = Job.getWork 'root', 'nowork', {}
assert.isUndefined res
it 'should return an array of Jobs when options.maxJobs > 1 without callback', () ->
res = Job.getWork 'root', 'work', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
it 'should return an empty array when options.maxJobs > 1 and there is no work without callback', () ->
res = Job.getWork 'root', 'nowork', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 0
it 'should throw when given on invalid value for the timeout option', () ->
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: "Bad" })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: 0 })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: -1 })), /must be a positive integer/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'makeJob', () ->
jobDoc = () ->
j = new Job('root', 'work', {})._doc
j._id = { _str: 'skljfdf9s0ujfsdfl3' }
return j
it 'should return a valid job instance when called with a valid job document', () ->
res = new Job 'root', jobDoc()
assert.instanceOf res, Job
it 'should throw when passed invalid params', () ->
assert.throw (() -> new Job()), /bad parameter/
assert.throw (() -> new Job(5, jobDoc())), /bad parameter/
assert.throw (() -> new Job('work', {})), /bad parameter/
describe 'get Job(s) by ID', () ->
getJobStub = (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
ids = params[0]
one = (id) ->
j = switch id
when 'goodID'
Job('root', 'work', { i: 1 })._doc
else
undefined
return j
if ids instanceof Array
res = (one(j) for j in ids when j is 'goodID')
else
res = one(ids)
return [null, res]
describe 'getJob', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return a valid job instance when called with a good id', () ->
res = Job.getJob 'root', 'goodID'
assert.instanceOf res, Job
it 'should return undefined when called with a bad id', () ->
res = Job.getJob 'root', 'badID'
assert.isUndefined res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'getJobs', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return valid job instances for good IDs only', () ->
res = Job.getJobs 'root', ['goodID', 'badID', 'goodID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
assert.instanceOf res[1], Job
it 'should return an empty array for all bad IDs', () ->
res = Job.getJobs 'root', ['badID', 'badID', 'badID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 0
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'multijob operation', () ->
makeMulti = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
ids = params[0]
return [null, ids.indexOf('goodID') isnt -1]
it 'should return true if there are any good IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['goodID', 'badID', 'goodID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isTrue res
it 'should return false if there are all bad IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['badID', 'badID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isFalse res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeMulti 'pauseJobs', 'jobPause'
makeMulti 'resumeJobs', 'jobResume'
makeMulti 'cancelJobs', 'jobCancel'
makeMulti 'restartJobs', 'jobRestart'
makeMulti 'removeJobs', 'jobRemove'
describe 'control method', () ->
makeControl = (op) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{op}"
return [null, true]
it 'should return a boolean', () ->
assert.isFunction Job[op]
res = Job[op]('root')
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeControl 'startJobs'
makeControl 'stopJobs'
makeControl 'startJobServer'
makeControl 'shutdownJobServer'
###########################################
describe 'JobQueue', () ->
ddp = new DDP()
failCalls = 0
doneCalls = 0
numJobs = 5
before () ->
Job._ddp_apply = undefined
Job.setDDP ddp
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
err = null
res = null
makeJobDoc = (idx=0) ->
job = new Job('root', 'work', { idx: idx })
doc = job._doc
doc._id = 'thisId' + idx
doc.runId = 'thatId' + idx
doc.status = 'running'
return doc
switch name
when 'root_jobDone'
doneCalls++
res = true
when 'root_jobFail'
failCalls++
res = true
when 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
if numJobs is 0
res = []
else
switch type
when 'noWork'
res = []
when 'work'
numJobs--
res = [ makeJobDoc() ]
when 'workMax'
if max < numJobs
max = numJobs
numJobs -= max
res = (makeJobDoc(i) for i in [1..max])
when 'returnError'
err = new Error "MongoError: connection n to w.x.y.z:27017 timed out"
else
throw new Error "Bad method name: #{name}"
return [err, res]
beforeEach () ->
failCalls = 0
doneCalls = 0
numJobs = 5
it 'should throw when an invalid options are used', (done) ->
assert.throws (() ->
Job.processJobs 42, 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs '', 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs 'root', 42, { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', '', { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', ['noWork',''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { pollInterval: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { callbackStrict: 1 }, (job, cb) -> ),
/must be a boolean/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { errorCallback: 1 }, (job, cb) -> ),
/must be a function/
done()
it 'should return a valid JobQueue when called', (done) ->
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should return a valid JobQueue when called with array of job types', (done) ->
q = Job.processJobs 'root', ['noWork', 'noWork2'], { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should send shutdown notice to console when quiet is false', (done) ->
jobConsole = Job.__get__ 'console'
revert = Job.__set__
console:
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
assert.throws (() -> (q.shutdown () -> done())), /warning/
revert()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should invoke worker when work is returned', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 100 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
it 'should invoke worker when work is returned from a manual trigger', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 0 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
assert.equal q.pollInterval, Job.forever
assert.isNull q._interval
setTimeout(
() -> q.trigger()
20
)
it 'should successfully start in paused state and resume', (done) ->
flag = false
q = Job.processJobs('root', 'work', { pollInterval: 10 }, (job, cb) ->
assert.isTrue flag
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
).pause()
setTimeout(
() ->
flag = true
q.resume()
20
)
it 'should successfully accept multiple jobs from getWork', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
assert.equal q.length(), count-1, 'q.length is incorrect'
assert.equal q.running(), 1, 'q.running is incorrect'
if count is 5
assert.isTrue q.full(), 'q.full should be true'
assert.isFalse q.idle(), 'q.idle should be false'
job.done()
count--
if count is 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5, 'doneCalls is incorrect'
assert.equal failCalls, 0, 'failCalls is incorrect'
done()
cb null
)
it 'should successfully accept and process multiple simultaneous jobs concurrently', (done) ->
count = 0
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 5 }, (job, cb) ->
count++
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count
count--
job.done()
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully accept and process multiple simultaneous jobs in one worker', (done) ->
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5 }, (jobs, cb) ->
assert.equal jobs.length, 5
assert.equal q.length(), 0
assert.equal q.running(), 1
j.done() for j in jobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb()
)
it 'should successfully accept and process multiple simultaneous jobs concurrently and within workers', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 5 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 25
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully perform a soft shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
count--
assert.equal q.length(), count
assert.equal q.running(), 1
assert.isTrue q.full()
job.done()
if count is 4
q.shutdown { quiet: true, level: 'soft' }, () ->
assert count is 0
assert.equal q.length(), 0
assert.isFalse Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
)
it 'should successfully perform a normal shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count--
job.done()
if count is 4
q.shutdown { quiet: true, level: 'normal' }, () ->
assert.equal count, 3
assert.equal q.length(), 0
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 2
assert.equal failCalls, 3
done()
cb null
25
)
)
it 'should successfully perform a normal shutdown with both payload and concurrency', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 2, prefetch: 15 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
if count is 5
q.shutdown { quiet: true }, () ->
assert.equal q.length(), 0, 'jobs remain in task list'
assert.equal count, 0, 'count is wrong value'
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 10
assert.equal failCalls, 15
done()
cb null
25
)
)
it 'should successfully perform a hard shutdown', (done) ->
count = 0
time = 20
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count++
if count is 1
job.done()
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal q.length(), 0
assert.equal count, 1
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 1, 'wrong number of .done() calls'
assert.equal failCalls, 4, 'wrong number of .fail() calls'
done()
cb null # Other workers will never call back
time
)
time += 20
)
it 'should throw when using callbackStrict option and multiple callback invokes happen', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done()
cb()
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
25
)
)
it 'should throw when using callbackStrict option and multiple callback invokes happen 2', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done () ->
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb()
25
)
)
it 'should invoke errorCallback when an error is returned from getWork', (done) ->
ecb = (err, res) ->
assert.instanceOf err, Error
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
q = Job.processJobs('root', 'returnError', { pollInterval: 100, concurrency: 1, prefetch: 0, errorCallback: ecb }, (job, cb) -> )
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
| true | ############################################################################
# Copyright (C) 2014-2017 by PI:NAME:<NAME>END_PI
# meteor-job-class is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
# Unit tests
assert = require('chai').assert
rewire = require 'rewire'
sinon = require 'sinon'
Fiber = require 'fibers'
Job = rewire '../src/job_class.coffee'
# Mock DDP class
class DDP
call: (name, params, cb = null) ->
unless cb? and typeof cb is 'function'
switch name
when 'root_true'
return true
when 'root_false'
return false
when 'root_param'
return params[0]
when 'root_error'
throw new Error "Method failed"
else
throw new Error "Bad method in call"
else
switch name
when 'root_true'
process.nextTick () -> cb null, true
when 'root_false'
process.nextTick () -> cb null, false
when 'root_param'
process.nextTick () -> cb null, params[0]
when 'root_error'
process.nextTick () -> cb new Error "Method failed"
else
process.nextTick () -> cb new Error "Bad method in call"
return
connect: () ->
process.nextTick () -> cb(null)
close: () ->
process.nextTick () -> cb(null)
subscribe: () ->
process.nextTick () -> cb(null)
observe: () ->
process.nextTick () -> cb(null)
makeDdpStub = (action) ->
return (name, params, cb) ->
[err, res] = action name, params
# console.dir res
if cb?
return process.nextTick () -> cb err, res
else if err
throw err
return res
###########################################
describe 'Job', () ->
it 'has class constants', () ->
assert.isNumber Job.forever
assert.isObject Job.jobPriorities
assert.lengthOf Object.keys(Job.jobPriorities), 5
assert.isArray Job.jobRetryBackoffMethods
assert.lengthOf Job.jobRetryBackoffMethods, 2
assert.isArray Job.jobStatuses
assert.lengthOf Job.jobStatuses, 7
assert.isArray Job.jobLogLevels
assert.lengthOf Job.jobLogLevels, 4
assert.isArray Job.jobStatusCancellable
assert.lengthOf Job.jobStatusCancellable, 4
assert.isArray Job.jobStatusPausable
assert.lengthOf Job.jobStatusPausable, 2
assert.isArray Job.jobStatusRemovable
assert.lengthOf Job.jobStatusRemovable, 3
assert.isArray Job.jobStatusRestartable
assert.lengthOf Job.jobStatusRestartable, 2
assert.isArray Job.ddpPermissionLevels
assert.lengthOf Job.ddpPermissionLevels , 4
assert.isArray Job.ddpMethods
assert.lengthOf Job.ddpMethods, 18
assert.isObject Job.ddpMethodPermissions
assert.lengthOf Object.keys(Job.ddpMethodPermissions), Job.ddpMethods.length
it 'has a _ddp_apply class variable that defaults as undefined outside of Meteor', () ->
assert.isUndefined Job._ddp_apply
it 'has a processJobs method that is the JobQueue constructor', () ->
assert.equal Job.processJobs, Job.__get__ "JobQueue"
describe 'setDDP', () ->
ddp = new DDP()
describe 'default setup', () ->
it 'throws if given a non-ddp object', () ->
assert.throws (() -> Job.setDDP({})), /Bad ddp object/
it 'properly sets the default _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp
Job._ddp_apply 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'fails if subsequently called with a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp, 'test1'), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'setup with collection name', () ->
it 'properly sets the _ddp_apply class variable', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1'
Job._ddp_apply.test1 'test', [], () ->
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly sets the _ddp_apply class variable when called with array', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, ['test2','test3']
Job._ddp_apply.test2 'test', [], () ->
Job._ddp_apply.test3 'test', [], () ->
assert.equal ddp.call.callCount, 2
ddp.call.restore()
done()
it 'fails if subsequently called without a collection name', (done) ->
assert.throws (() -> Job.setDDP ddp), /Job.setDDP must specify/
done()
after () ->
Job._ddp_apply = undefined
describe 'Fiber support', () ->
ddp = new DDP()
it 'accepts a valid collection name and Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, 'test1', Fiber
fib = Fiber () ->
Job._ddp_apply.test1 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'accepts a default collection name and valid Fiber object and properly yields and runs', (done) ->
sinon.stub(ddp, "call").yieldsAsync()
Job.setDDP ddp, Fiber
fib = Fiber () ->
Job._ddp_apply 'test', []
fib.run()
assert ddp.call.calledOnce
ddp.call.restore()
done()
it 'properly returns values from method calls', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.isTrue Job._ddp_apply('root_true', [])
assert.isFalse Job._ddp_apply('root_false', [])
assert.deepEqual Job._ddp_apply('root_param', [['a', 1, null]]), ['a', 1, null]
done()
fib.run()
it 'properly propagates thrown errors within a Fiber', (done) ->
Job.setDDP ddp, Fiber
fib = Fiber () ->
assert.throws (() -> Job._ddp_apply 'root_error', []), /Method failed/
assert.throws (() -> Job._ddp_apply 'bad_method', []), /Bad method in call/
done()
fib.run()
afterEach () ->
Job._ddp_apply = undefined
describe 'private function', () ->
# Note! These are internal helper functions, NOT part of the external API!
describe 'methodCall', () ->
ddp = new DDP()
before () ->
sinon.spy(ddp, "call")
Job.setDDP ddp
methodCall = Job.__get__ 'methodCall'
it 'should be a function', () ->
assert.isFunction methodCall
it 'should invoke the correct ddp method', (done) ->
methodCall "root", "true", [], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
done()
it 'should pass the correct method parameters', (done) ->
methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}], (err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
done()
it 'should invoke the after callback when provided', (done) ->
after = sinon.stub().returns(true)
methodCall("root", "false", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
done()
after
)
it "shouldn't invoke the after callback when error", (done) ->
after = sinon.stub().returns(true)
methodCall("root", "error", []
(err, res) ->
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
assert.throws (() -> throw err), /Method failed/
done()
after
)
it 'should invoke the correct ddp method without callback', () ->
res = methodCall "root", "true", []
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_true")
assert.isTrue res
it 'should pass the correct method parameters without callback', () ->
res = methodCall "root", "param", ['a', 1, [1,2,3], { foo: 'bar'}]
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_param", ['a', 1, [1,2,3], { foo: 'bar'}])
assert.equal res, 'a'
it 'should invoke the after callback when provided without callback', () ->
after = sinon.stub().returns(true)
res = methodCall "root", "false", [], undefined, after
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_false", [])
assert after.calledOnce
assert.isTrue res
it "should throw on error when invoked without callback", () ->
after = sinon.stub().returns(true)
res = undefined
assert.throws (() -> res = methodCall("root", "error", [], undefined, after)), /Method failed/
assert ddp.call.calledOnce
assert ddp.call.calledWith("root_error", [])
assert.equal after.callCount, 0, "After shouldn't be called"
assert.isUndefined res, "Result isn't undefined"
afterEach () ->
ddp.call.reset()
after () ->
Job._ddp_apply = undefined
describe 'optionsHelp', () ->
optionsHelp = Job.__get__ 'optionsHelp'
foo = { bar: "bat" }
gizmo = () ->
it 'should return options and a callback when both are provided', () ->
res = optionsHelp [foo], gizmo
assert.deepEqual res, [foo, gizmo]
it 'should handle a missing callback and return only options', () ->
res = optionsHelp [foo]
assert.deepEqual res, [foo, undefined]
it 'should handle missing options and return empty options and the callback', () ->
res = optionsHelp [], gizmo
assert.deepEqual res, [{}, gizmo]
it 'should handle when both options and callback are missing', () ->
res = optionsHelp([], undefined)
assert.deepEqual res, [{}, undefined]
it 'should throw an error when an invalid callback is provided', () ->
assert.throws (()-> optionsHelp([foo], 5)), /options not an object or bad callback/
it 'should throw an error when a non-array is passed for options', () ->
assert.throws (()-> optionsHelp(foo, gizmo)), /must be an Array with zero or one elements/
it 'should throw an error when a bad options array is passed', () ->
assert.throws (()-> optionsHelp([foo, 5], gizmo)), /must be an Array with zero or one elements/
describe 'splitLongArray', () ->
splitLongArray = Job.__get__ 'splitLongArray'
longArray = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ]
it 'should properly split an array', () ->
res = splitLongArray longArray, 4
assert.deepEqual res, [ [0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11] ]
it 'should handle remainders correctly', () ->
res = splitLongArray longArray, 5
assert.deepEqual res, [ [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [10, 11] ]
it 'should handle an empty array', () ->
res = splitLongArray [], 5
assert.deepEqual res, []
it 'should handle a single element array', () ->
res = splitLongArray [0], 5
assert.deepEqual res, [ [0] ]
it 'should throw if not given an array', () ->
assert.throws (() -> splitLongArray { foo: "bar"}, 5), /splitLongArray: bad params/
it 'should throw if given an out of range max value', () ->
assert.throws (() -> splitLongArray longArray, 0), /splitLongArray: bad params/
it 'should throw if given an invalid max value', () ->
assert.throws (() -> splitLongArray longArray, "cow"), /splitLongArray: bad params/
describe 'concatReduce', () ->
concatReduce = Job.__get__ 'concatReduce'
it 'should concat a to b', () ->
assert.deepEqual concatReduce([1],2), [1,2]
it 'should work with non array for the first param', () ->
assert.deepEqual concatReduce(1,2), [1,2]
describe 'reduceCallbacks', () ->
reduceCallbacks = Job.__get__ 'reduceCallbacks'
it 'should return undefined if given a falsy callback', () ->
assert.isUndefined reduceCallbacks(undefined, 5)
it 'should properly absorb the specified number of callbacks', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, true
cb null, false
cb null, true
assert spy.calledOnce
assert spy.calledWith null, true
it 'should properly reduce the callback results', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 3
cb null, false
cb null, false
cb null, false
assert spy.calledOnce
assert spy.calledWith null, false
it 'should properly reduce with a custom reduce function', () ->
concatReduce = Job.__get__ 'concatReduce'
spy = sinon.spy()
cb = reduceCallbacks spy, 3, concatReduce, []
cb null, false
cb null, true
cb null, false
assert spy.calledOnce, 'callback called too many times'
assert spy.calledWith(null, [false, true, false]), 'Returned wrong result'
it 'should throw if called too many times', () ->
spy = sinon.spy()
cb = reduceCallbacks spy, 2
cb null, true
cb null, true
assert.throws cb, /reduceCallbacks callback invoked more than requested/
it 'should throw if given a non-function callback', () ->
assert.throws (() -> reduceCallbacks 5), /Bad params given to reduceCallbacks/
it 'should throw if given an invalid number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 'cow'), /Bad params given to reduceCallbacks/
it 'should throw if given an out of range number of callbacks to absorb', () ->
assert.throws (() -> reduceCallbacks (() -> ), 0), /Bad params given to reduceCallbacks/
it 'should throw if given a non-function reduce', () ->
assert.throws (() -> reduceCallbacks (() -> ), 5, 5), /Bad params given to reduceCallbacks/
describe '_setImmediate', () ->
_setImmediate = Job.__get__ '_setImmediate'
it 'should invoke the provided callback with args', (done) ->
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
done()
_setImmediate cb, "foo", "bar"
describe '_setInterval', () ->
_setInterval = Job.__get__ '_setInterval'
_clearInterval = Job.__get__ '_clearInterval'
it 'should invoke the provided callback repeatedly with args', (done) ->
cancel = null
count = 0
cb = (a, b) ->
assert.equal a, "foo"
assert.equal b, "bar"
count++
if count is 2
_clearInterval cancel
done()
else if count > 2
throw "Interval called too many times"
cancel = _setInterval cb, 10, "foo", "bar"
describe 'Job constructor', () ->
checkJob = (job) ->
assert.instanceOf job, Job
assert.equal job.root, 'root'
assert.equal job.type, 'work'
assert.deepEqual job.data, { foo: "bar" }
assert.isObject job._doc
doc = job._doc
assert.notProperty doc, '_id'
assert.isNull doc.runId
assert.equal job.type, doc.type
assert.deepEqual job.data, doc.data
assert.isString doc.status
assert.instanceOf doc.updated, Date
assert.isArray doc.depends
assert.isArray doc.resolved
assert.isNumber doc.priority
assert.isNumber doc.retries
assert.isNumber doc.retryWait
assert.isNumber doc.retried
assert.isString doc.retryBackoff
assert.instanceOf doc.retryUntil, Date
assert.isNumber doc.repeats
assert.isNumber doc.repeatWait
assert.isNumber doc.repeated
assert.instanceOf doc.repeatUntil, Date
assert.instanceOf doc.after, Date
assert.isArray doc.log
assert.isObject doc.progress
assert.instanceOf doc.created, Date
it 'should return a new valid Job object', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
it 'should work without "new"', () ->
job = Job('root', 'work', { foo: "bar" })
checkJob job
it 'should throw when given bad parameters', () ->
assert.throw Job, /new Job: bad parameter/
it 'should support using a valid job document', () ->
job = new Job('root', 'work', { foo: "bar" })
checkJob job
job2 = new Job('root', job.doc)
checkJob job2
it 'should support using a valid oobject for root', () ->
job = new Job({ root: 'root'}, 'work', { foo: "bar" })
checkJob job
job2 = new Job({ root: 'root'}, job.doc)
checkJob job2
describe 'job mutator method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.depends()', () ->
it 'should properly update the depends property', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
jobB = Job('root', 'work', {})
jobB._doc._id = 'bar'
j = job.depends [ jobA, jobB ]
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo', 'bar' ]
it 'should accept a singlet Job', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should accept an empty deps array and return the job unchanged', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
j = job.depends []
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
it 'should clear dependencies when passed a falsy value', () ->
jobA = Job('root', 'work', {})
jobA._doc._id = 'foo'
j = job.depends jobA
assert.equal j, job
assert.deepEqual doc.depends, [ 'foo' ]
job.depends null
assert.lengthOf doc.depends, 0
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.depends "badness"), /Bad input parameter/
it 'should throw when given an array containing non Jobs', () ->
assert.throw (() -> job.depends ["Badness"]), /Each provided object/
it 'should throw when given an array containing unsaved Jobs without an _id', () ->
jobA = Job('root', 'work', {})
assert.throw (() -> job.depends [ jobA ]), /Each provided object/
describe '.priority()', () ->
it 'should accept a numeric priority', () ->
j = job.priority 3
assert.equal j, job
assert.equal doc.priority, 3
it 'should accept a valid string priority', () ->
j = job.priority 'normal'
assert.equal j, job
assert.equal doc.priority, Job.jobPriorities['normal']
it 'should throw when given an invalid priority level', () ->
assert.throw (() -> job.priority 'super'), /Invalid string priority level provided/
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.priority []), /priority must be an integer or valid priority level/
it 'should throw when given a non-integer', () ->
assert.throw (() -> job.priority 3.14), /priority must be an integer or valid priority level/
describe '.retry()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.retry 3
assert.equal j, job
assert.equal doc.retries, 3 + 1 # This is correct, it adds one.
assert.equal doc.retryWait, 5*60*1000
assert.equal doc.retryBackoff, 'constant'
it 'should accept an option object', () ->
j = job.retry { retries: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000, backoff: 'exponential' }
assert.equal j, job
assert.equal doc.retries, 3 + 1
assert.ok doc.retryUntil > new Date()
assert.equal doc.retryWait, 5000
assert.equal doc.retryBackoff, 'exponential'
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.retry 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.retry -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.retry 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.retry { retries: 'badness' }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: -1 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { retries: 3.14 }), /bad option: retries must be an integer/
assert.throw (() -> job.retry { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.retry { backoff: 'bogus' }), /bad option: invalid retry backoff method/
assert.throw (() -> job.retry { until: 'bogus' }), /bad option: until must be a Date object/
describe '.repeat()', () ->
it 'should accept a non-negative integer parameter', () ->
j = job.repeat 3
assert.equal j, job
assert.equal doc.repeats, 3
it 'should accept an option object', () ->
j = job.repeat { repeats: 3, until: new Date(new Date().valueOf() + 60000), wait: 5000 }
assert.equal j, job
assert.equal doc.repeats, 3
assert.ok(doc.repeatUntil > new Date())
assert.equal doc.repeatWait, 5000
it 'should accept an option object with later.js object', () ->
j = job.repeat { schedule: { schedules: [{h:[10]}], exceptions: [], other: () -> 0 }}
assert.equal j, job
assert.deepEqual doc.repeatWait, { schedules: [{h:[10]}], exceptions: [] }
it 'should throw when given a bad parameter', () ->
assert.throw (() -> job.repeat 'badness'), /bad parameter: accepts either an integer/
it 'should throw when given a negative integer', () ->
assert.throw (() -> job.repeat -1), /bad parameter: accepts either an integer/
it 'should throw when given a numeric non-integer', () ->
assert.throw (() -> job.repeat 3.14), /bad parameter: accepts either an integer/
it 'should throw when given bad options', () ->
assert.throw (() -> job.repeat { repeats: 'badness' }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: -1 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { repeats: 3.14 }), /bad option: repeats must be an integer/
assert.throw (() -> job.repeat { wait: 'badness' }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: -1 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { wait: 3.14 }), /bad option: wait must be an integer/
assert.throw (() -> job.repeat { until: 'bogus' }), /bad option: until must be a Date object/
assert.throw (() -> job.repeat { wait: 5, schedule: {}}), /bad options: wait and schedule options are mutually exclusive/
assert.throw (() -> job.repeat { schedule: 'bogus' }), /bad option, schedule option must be an object/
assert.throw (() -> job.repeat { schedule: {}}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: 5 }}), /bad option, schedule object requires a schedules attribute of type Array/
assert.throw (() -> job.repeat { schedule: { schedules: [], exceptions: 5 }}), /bad option, schedule object exceptions attribute must be an Array/
describe '.after()', () ->
it 'should accept a valid Date', () ->
d = new Date()
j = job.after d
assert.equal j, job
assert.equal doc.after, d
it 'should accept an undefined value', () ->
j = job.after()
assert.equal j, job
assert.instanceOf doc.after, Date
assert doc.after <= new Date()
it 'should throw if given a bad parameter', () ->
assert.throw (() -> job.after { foo: "bar" }), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after 123), /Bad parameter, after requires a valid Date object/
assert.throw (() -> job.after false), /Bad parameter, after requires a valid Date object/
describe '.delay()', () ->
it 'should accept a valid delay', () ->
j = job.delay 5000
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf() + 5000, 1000
it 'should accept an undefined parameter', () ->
j = job.delay()
assert.equal j, job
assert.instanceOf doc.after, Date
assert.closeTo doc.after.valueOf(), new Date().valueOf(), 1000
it 'should throw when given an invalid parameter', () ->
assert.throw (() -> job.delay -1.234), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay new Date()), /Bad parameter, delay requires a non-negative integer/
assert.throw (() -> job.delay false), /Bad parameter, delay requires a non-negative integer/
describe 'communicating', () ->
ddp = null
before () ->
ddp = new DDP()
Job.setDDP ddp
describe 'job status method', () ->
job = null
doc = null
beforeEach () ->
job = Job('root', 'work', {})
doc = job._doc
describe '.save()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobSave'
doc = params[0]
options = params[1]
if options.cancelRepeats
throw new Error 'cancelRepeats'
if typeof doc is 'object'
res = "newId"
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
res = job.save()
assert.equal res, "newId"
it 'should work with a callback', (done) ->
job.save (err, res) ->
assert.equal res, "newId"
done()
it 'should properly pass cancelRepeats option', () ->
assert.throw (() -> job.save({ cancelRepeats: true })), /cancelRepeats/
it 'should properly pass cancelRepeats option with callback', () ->
assert.throw (() -> job.save({ cancelRepeats: true }, () -> )), /cancelRepeats/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.refresh()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
id = params[0]
options = params[1]
if options.getLog
throw new Error 'getLog'
if id is 'thisId'
res = { foo: 'bar' }
else
res = null
return [null, res]
it 'should make valid DDP call when invoked', () ->
doc._id = 'thisId'
res = job.refresh()
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
it 'should work with a callback', (done) ->
doc._id = 'thisId'
job.refresh (err, res) ->
assert.deepEqual job._doc, { foo: 'bar' }
assert.equal res, job
done()
it "shouldn't modify job when not found on server", () ->
doc._id = 'thatId'
res = job.refresh()
assert.isFalse res
assert.deepEqual job._doc, doc
it 'should properly pass getLog option', () ->
doc._id = 'thisId'
assert.throw (() -> job.refresh({ getLog: true })), /getLog/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.refresh()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.log()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobLog'
id = params[0]
runId = params[1]
msg = params[2]
level = params[3]?.level ? 'gerinfo'
if id is 'thisId' and runId is 'thatId' and msg is 'Hello' and level in Job.jobLogLevels
res = level
else
res = false
return [null, res]
it 'should add a valid log entry to the local state when invoked before a job is saved', () ->
j = job.log 'Hello', { level: 'success' }
assert.equal j, job
thisLog = doc.log[1] # [0] is the 'Created' log message
assert.equal thisLog.message, 'Hello'
assert.equal thisLog.level, 'success'
assert.instanceOf thisLog.time, Date
assert.closeTo thisLog.time.valueOf(), new Date().valueOf(), 1000
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello'
assert.equal res, 'info'
it 'should correctly pass level option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.log 'Hello', { level: 'danger' }
assert.equal res, 'danger'
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.log 'Hello', { level: 'success' }, (err, res) ->
assert.equal res, 'success'
done()
it 'should throw when passed an invalid message', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 43, { level: 'danger' }), /Log message must be a string/
it 'should throw when passed an invalid level', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.log 'Hello', { level: 'blargh' }), /Log level options must be one of Job.jobLogLevels/
assert.throw (() -> job.log 'Hello', { level: [] }), /Log level options must be one of Job.jobLogLevels/
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
it 'should echo the log to the console at the level requested', () ->
assert.doesNotThrow (() -> job.log 'Hello'), 'echo occurred without being requested'
assert.doesNotThrow (() -> job.log 'Hello', { echo: false }), 'echo occurred when explicitly disabled'
assert.throw (() -> job.log 'Hello', { echo: true }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'info' }), /info/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'success' }), /success/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: true, level: 'danger' }), /danger/
it "shouldn't echo the log to the console below the level requested", () ->
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'info' })
assert.doesNotThrow (() -> job.log 'Hello', { echo: 'warning', level: 'success' })
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'warning' }), /warning/
assert.throw (() -> job.log 'Hello', { echo: 'warning', level: 'danger' }), /danger/
after () ->
Job.__set__ 'console', jobConsole
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.progress()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobProgress'
id = params[0]
runId = params[1]
completed = params[2]
total = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof completed is 'number' and
typeof total is 'number' and
0 <= completed <= total and
total > 0 )
res = 100 * completed / total
else
res = false
return [null, res]
it 'should add a valid progress update to the local state when invoked before a job is saved', () ->
j = job.progress 2.5, 10
assert.equal j, job
assert.deepEqual doc.progress, { completed: 2.5, total: 10, percent: 25 }
it 'should make valid DDP call when invoked on a saved job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.progress 5, 10
assert.equal res, 50
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.progress 7.5, 10, (err, res) ->
assert.equal res, 75
done()
describe 'echo option', () ->
jobConsole = null
before () ->
jobConsole = Job.__get__ 'console'
Job.__set__ 'console',
info: (params...) -> throw new Error 'info'
it 'should progress updates to the console when requested', () ->
assert.doesNotThrow (() -> job.progress 0, 100)
assert.doesNotThrow (() -> job.progress 0, 100, { echo: false })
assert.throw (() -> job.progress 0, 100, { echo: true }), /info/
after () ->
Job.__set__ 'console', jobConsole
it 'should throw when given invalid paramters', () ->
assert.throw (() -> job.progress true, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, "hundred"), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -1, 100), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 2, 1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, 0), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress 0, -1), /job.progress: something is wrong with progress params/
assert.throw (() -> job.progress -2, -1), /job.progress: something is wrong with progress params/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.done()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobDone'
id = params[0]
runId = params[1]
result = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof result is 'object')
res = result
else if options.resultId
res = result.resultId
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.done()
assert.deepEqual res, {}
it 'should properly handle a result object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result =
foo: 'bar'
status: 0
res = job.done result
assert.deepEqual res, result
it 'should properly handle a non-object result', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
result = "Done!"
res = job.done result
assert.deepEqual res, { value: result }
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.done (err, res) ->
assert.deepEqual res, {}
done()
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.done()), /an unsaved or non-running job/
it 'should properly pass the repeatId option', () ->
doc._id = 'someId'
doc.runId = 'otherId'
job.done { repeatId: "testID" }, { repeatId: true }, (err, res) ->
assert.deepEqual res, "testID"
done()
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe '.fail()', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_jobFail'
id = params[0]
runId = params[1]
err = params[2]
options = params[3]
if ( id is 'thisId' and
runId is 'thatId' and
typeof err is 'object')
if options.fatal
throw new Error "Fatal Error!"
res = err
else
res = false
return [null, res]
it 'should make valid DDP call when invoked on a running job', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
res = job.fail()
assert.deepEqual res, { value: "No error information provided" }
it 'should properly handle an error string', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = 'This is an error'
res = job.fail err
assert.deepEqual res, { value: err }
it 'should properly handle an error object', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
err = { message: 'This is an error' }
res = job.fail err
assert.equal res, err
it 'should work with a callback', (done) ->
doc._id = 'thisId'
doc.runId = 'thatId'
job.fail (err, res) ->
assert.equal res.value, "No error information provided"
done()
it 'should properly handle the fatal option', () ->
doc._id = 'thisId'
doc.runId = 'thatId'
assert.throw (() -> job.fail "Fatal error!", { fatal: true }), /Fatal Error!/
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job.fail()), /an unsaved or non-running job/
it 'should throw when called on a nonrunning job', () ->
doc._id = 'thisId'
assert.throw (() -> job.fail()), /an unsaved or non-running job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'job control operation', () ->
makeJobControl = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
id = params[0]
if id is 'thisId'
res = true
else
res = false
return [null, res]
it 'should properly invoke the DDP method', () ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op]()
assert.isTrue res
it 'should return false if the id is not on the server', () ->
assert.isFunction job[op]
doc._id = 'badId'
res = job[op]()
assert.isFalse res
it 'should work with a callback', (done) ->
assert.isFunction job[op]
doc._id = 'thisId'
res = job[op] (err, res) ->
assert.isTrue res
done()
if op in ['pause', 'resume']
it 'should alter local state when called on an unsaved job', () ->
bad = 'badStatus'
doc.status = bad
res = job[op]()
assert.equal res, job
assert.notEqual doc.status, bad
it 'should alter local state when called on an unsaved job with callback', (done) ->
bad = 'badStatus'
doc.status = bad
res = job[op] (err, res) ->
assert.isTrue res
assert.notEqual doc.status, bad
done()
else
it 'should throw when called on an unsaved job', () ->
assert.throw (() -> job[op]()), /on an unsaved job/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeJobControl 'pause', 'jobPause'
makeJobControl 'resume', 'jobResume'
makeJobControl 'ready', 'jobReady'
makeJobControl 'cancel', 'jobCancel'
makeJobControl 'restart', 'jobRestart'
makeJobControl 'rerun', 'jobRerun'
makeJobControl 'remove', 'jobRemove'
describe 'class method', () ->
describe 'getWork', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
res = switch type
when 'work'
( Job('root', type, { i: 1 })._doc for i in [1..max] )
when 'nowork'
[]
return [null, res]
it 'should make a DDP method call and return a Job by default without callback', () ->
res = Job.getWork 'root', 'work', {}
assert.instanceOf res, Job
it 'should return undefined when no work is available without callback', () ->
res = Job.getWork 'root', 'nowork', {}
assert.isUndefined res
it 'should return an array of Jobs when options.maxJobs > 1 without callback', () ->
res = Job.getWork 'root', 'work', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
it 'should return an empty array when options.maxJobs > 1 and there is no work without callback', () ->
res = Job.getWork 'root', 'nowork', { maxJobs: 2 }
assert.isArray res
assert.lengthOf res, 0
it 'should throw when given on invalid value for the timeout option', () ->
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: "Bad" })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: 0 })), /must be a positive integer/
assert.throw (() -> Job.getWork('root', 'nowork', { workTimeout: -1 })), /must be a positive integer/
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'makeJob', () ->
jobDoc = () ->
j = new Job('root', 'work', {})._doc
j._id = { _str: 'skljfdf9s0ujfsdfl3' }
return j
it 'should return a valid job instance when called with a valid job document', () ->
res = new Job 'root', jobDoc()
assert.instanceOf res, Job
it 'should throw when passed invalid params', () ->
assert.throw (() -> new Job()), /bad parameter/
assert.throw (() -> new Job(5, jobDoc())), /bad parameter/
assert.throw (() -> new Job('work', {})), /bad parameter/
describe 'get Job(s) by ID', () ->
getJobStub = (name, params) ->
throw new Error 'Bad method name' unless name is 'root_getJob'
ids = params[0]
one = (id) ->
j = switch id
when 'goodID'
Job('root', 'work', { i: 1 })._doc
else
undefined
return j
if ids instanceof Array
res = (one(j) for j in ids when j is 'goodID')
else
res = one(ids)
return [null, res]
describe 'getJob', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return a valid job instance when called with a good id', () ->
res = Job.getJob 'root', 'goodID'
assert.instanceOf res, Job
it 'should return undefined when called with a bad id', () ->
res = Job.getJob 'root', 'badID'
assert.isUndefined res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'getJobs', () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub getJobStub
it 'should return valid job instances for good IDs only', () ->
res = Job.getJobs 'root', ['goodID', 'badID', 'goodID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 2
assert.instanceOf res[0], Job
assert.instanceOf res[1], Job
it 'should return an empty array for all bad IDs', () ->
res = Job.getJobs 'root', ['badID', 'badID', 'badID']
assert Job._ddp_apply.calledOnce, 'getJob method called more than once'
assert.isArray res
assert.lengthOf res, 0
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
describe 'multijob operation', () ->
makeMulti = (op, method) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{method}"
ids = params[0]
return [null, ids.indexOf('goodID') isnt -1]
it 'should return true if there are any good IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['goodID', 'badID', 'goodID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isTrue res
it 'should return false if there are all bad IDs', () ->
assert.isFunction Job[op]
res = Job[op]('root', ['badID', 'badID'])
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
assert.isFalse res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeMulti 'pauseJobs', 'jobPause'
makeMulti 'resumeJobs', 'jobResume'
makeMulti 'cancelJobs', 'jobCancel'
makeMulti 'restartJobs', 'jobRestart'
makeMulti 'removeJobs', 'jobRemove'
describe 'control method', () ->
makeControl = (op) ->
describe op, () ->
before () ->
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
throw new Error "Bad method name: #{name}" unless name is "root_#{op}"
return [null, true]
it 'should return a boolean', () ->
assert.isFunction Job[op]
res = Job[op]('root')
assert Job._ddp_apply.calledOnce, "#{op} method called more than once"
assert.isBoolean res
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
makeControl 'startJobs'
makeControl 'stopJobs'
makeControl 'startJobServer'
makeControl 'shutdownJobServer'
###########################################
describe 'JobQueue', () ->
ddp = new DDP()
failCalls = 0
doneCalls = 0
numJobs = 5
before () ->
Job._ddp_apply = undefined
Job.setDDP ddp
sinon.stub(Job, "_ddp_apply").callsFake makeDdpStub (name, params) ->
err = null
res = null
makeJobDoc = (idx=0) ->
job = new Job('root', 'work', { idx: idx })
doc = job._doc
doc._id = 'thisId' + idx
doc.runId = 'thatId' + idx
doc.status = 'running'
return doc
switch name
when 'root_jobDone'
doneCalls++
res = true
when 'root_jobFail'
failCalls++
res = true
when 'root_getWork'
type = params[0][0]
max = params[1]?.maxJobs ? 1
if numJobs is 0
res = []
else
switch type
when 'noWork'
res = []
when 'work'
numJobs--
res = [ makeJobDoc() ]
when 'workMax'
if max < numJobs
max = numJobs
numJobs -= max
res = (makeJobDoc(i) for i in [1..max])
when 'returnError'
err = new Error "MongoError: connection n to w.x.y.z:27017 timed out"
else
throw new Error "Bad method name: #{name}"
return [err, res]
beforeEach () ->
failCalls = 0
doneCalls = 0
numJobs = 5
it 'should throw when an invalid options are used', (done) ->
assert.throws (() ->
Job.processJobs 42, 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs '', 'noWork', { }, (job, cb) -> ),
/must be nonempty string/
assert.throws (() ->
Job.processJobs 'root', 42, { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', '', { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', [''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', ['noWork',''], { }, (job, cb) -> ),
/must be nonempty string or array of nonempty strings/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { pollInterval: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { concurrency: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { payload: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { prefetch: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: 'Bad' }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { workTimeout: -1 }, (job, cb) -> ),
/must be a positive integer/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { callbackStrict: 1 }, (job, cb) -> ),
/must be a boolean/
assert.throws (() ->
Job.processJobs 'root', 'noWork', { errorCallback: 1 }, (job, cb) -> ),
/must be a function/
done()
it 'should return a valid JobQueue when called', (done) ->
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should return a valid JobQueue when called with array of job types', (done) ->
q = Job.processJobs 'root', ['noWork', 'noWork2'], { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should send shutdown notice to console when quiet is false', (done) ->
jobConsole = Job.__get__ 'console'
revert = Job.__set__
console:
info: (params...) -> throw new Error 'info'
log: (params...) -> throw new Error 'success'
warn: (params...) -> throw new Error 'warning'
error: (params...) -> throw new Error 'danger'
q = Job.processJobs 'root', 'noWork', { pollInterval: 100 }, (job, cb) ->
job.done()
cb null
assert.instanceOf q, Job.processJobs
assert.throws (() -> (q.shutdown () -> done())), /warning/
revert()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
it 'should invoke worker when work is returned', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 100 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
it 'should invoke worker when work is returned from a manual trigger', (done) ->
q = Job.processJobs 'root', 'work', { pollInterval: 0 }, (job, cb) ->
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
assert.equal q.pollInterval, Job.forever
assert.isNull q._interval
setTimeout(
() -> q.trigger()
20
)
it 'should successfully start in paused state and resume', (done) ->
flag = false
q = Job.processJobs('root', 'work', { pollInterval: 10 }, (job, cb) ->
assert.isTrue flag
job.done()
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb null
).pause()
setTimeout(
() ->
flag = true
q.resume()
20
)
it 'should successfully accept multiple jobs from getWork', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
assert.equal q.length(), count-1, 'q.length is incorrect'
assert.equal q.running(), 1, 'q.running is incorrect'
if count is 5
assert.isTrue q.full(), 'q.full should be true'
assert.isFalse q.idle(), 'q.idle should be false'
job.done()
count--
if count is 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5, 'doneCalls is incorrect'
assert.equal failCalls, 0, 'failCalls is incorrect'
done()
cb null
)
it 'should successfully accept and process multiple simultaneous jobs concurrently', (done) ->
count = 0
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 5 }, (job, cb) ->
count++
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count
count--
job.done()
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully accept and process multiple simultaneous jobs in one worker', (done) ->
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5 }, (jobs, cb) ->
assert.equal jobs.length, 5
assert.equal q.length(), 0
assert.equal q.running(), 1
j.done() for j in jobs
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb()
)
it 'should successfully accept and process multiple simultaneous jobs concurrently and within workers', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 5 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.length(), 0
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
unless count > 0
q.shutdown { quiet: true }, () ->
assert.equal doneCalls, 25
assert.equal failCalls, 0
done()
cb null
25
)
)
it 'should successfully perform a soft shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, prefetch: 4 }, (job, cb) ->
count--
assert.equal q.length(), count
assert.equal q.running(), 1
assert.isTrue q.full()
job.done()
if count is 4
q.shutdown { quiet: true, level: 'soft' }, () ->
assert count is 0
assert.equal q.length(), 0
assert.isFalse Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 5
assert.equal failCalls, 0
done()
cb null
)
it 'should successfully perform a normal shutdown', (done) ->
count = 5
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count--
job.done()
if count is 4
q.shutdown { quiet: true, level: 'normal' }, () ->
assert.equal count, 3
assert.equal q.length(), 0
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 2
assert.equal failCalls, 3
done()
cb null
25
)
)
it 'should successfully perform a normal shutdown with both payload and concurrency', (done) ->
count = 0
numJobs = 25
q = Job.processJobs('root', 'workMax', { pollInterval: 100, payload: 5, concurrency: 2, prefetch: 15 }, (jobs, cb) ->
count += jobs.length
setTimeout(
() ->
assert.equal q.running(), count / 5
count -= jobs.length
j.done() for j in jobs
if count is 5
q.shutdown { quiet: true }, () ->
assert.equal q.length(), 0, 'jobs remain in task list'
assert.equal count, 0, 'count is wrong value'
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 10
assert.equal failCalls, 15
done()
cb null
25
)
)
it 'should successfully perform a hard shutdown', (done) ->
count = 0
time = 20
q = Job.processJobs('root', 'workMax', { pollInterval: 100, concurrency: 2, prefetch: 3 }, (job, cb) ->
setTimeout(
() ->
count++
if count is 1
job.done()
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal q.length(), 0
assert.equal count, 1
assert.isTrue Job._ddp_apply.calledWith("root_jobFail")
assert.equal doneCalls, 1, 'wrong number of .done() calls'
assert.equal failCalls, 4, 'wrong number of .fail() calls'
done()
cb null # Other workers will never call back
time
)
time += 20
)
it 'should throw when using callbackStrict option and multiple callback invokes happen', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done()
cb()
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
25
)
)
it 'should throw when using callbackStrict option and multiple callback invokes happen 2', (done) ->
q = Job.processJobs('root', 'work', { callbackStrict: true, pollInterval: 100, concurrency: 1, prefetch: 0 }, (job, cb) ->
setTimeout(
() ->
job.done () ->
assert.throws(cb, /callback was invoked multiple times/)
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 1
assert.equal failCalls, 0
done()
cb()
25
)
)
it 'should invoke errorCallback when an error is returned from getWork', (done) ->
ecb = (err, res) ->
assert.instanceOf err, Error
q.shutdown { level: 'hard', quiet: true }, () ->
assert.equal doneCalls, 0
assert.equal failCalls, 0
done()
q = Job.processJobs('root', 'returnError', { pollInterval: 100, concurrency: 1, prefetch: 0, errorCallback: ecb }, (job, cb) -> )
afterEach () ->
Job._ddp_apply.resetHistory()
after () ->
Job._ddp_apply.restore()
|
[
{
"context": " <minutes> for specified services\n#\n# Authors:\n# Jesse Newland, Josh Nicols, Jacob Bednarz, Chris Lundquist, Chr",
"end": 3173,
"score": 0.9998573064804077,
"start": 3160,
"tag": "NAME",
"value": "Jesse Newland"
},
{
"context": "specified services\n#\n# Authors:\n#... | src/scripts/pagerduty.coffee | silvrwolfboy/hubot-pager-me | 0 | # Description:
# Interact with PagerDuty services, schedules, and incidents with Hubot. Schedules with "hidden" in the name will be ignored.
#
# Commands:
# hubot pager me as <email> - remember your pager email is <email>
# hubot pager forget me - forget your pager email
# hubot Am I on call - return if I'm currently on call or not
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
# hubot pager trigger <user> [<severity>] <msg> - create a new incident with <msg> and assign it to <user>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager trigger <schedule> [<severity>] <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager sup --canary - return the current incidents, including Nines' canary incidents
# hubot pager incident <incident> - return the incident NNN
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
# hubot pager notes <incident> - show notes for incident #<incident>
# hubot pager problems - return all open incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
# hubot pager overrides <schedule> - show upcoming overrides for the next month
# hubot pager override <schedule> delete <id> - delete an override by its ID
# hubot pager services - list services
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
#
# Authors:
# Jesse Newland, Josh Nicols, Jacob Bednarz, Chris Lundquist, Chris Streeter, Joseph Pierri, Greg Hoin, Michael Warkentin
pagerduty = require('../pagerduty')
async = require('async')
inspect = require('util').inspect
moment = require('moment-timezone')
request = require 'request'
Scrolls = require('../../../../lib/scrolls').context({script: 'pagerduty'})
pagerDutyUserEmail = process.env.HUBOT_PAGERDUTY_USERNAME
pagerDutyServiceApiKey = process.env.HUBOT_PAGERDUTY_SERVICE_API_KEY
pagerDutyEventsAPIURL = 'https://events.pagerduty.com/v2/enqueue'
module.exports = (robot) ->
robot.respond /pager( me)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
emailNote = if hubotUser.pagerdutyEmail
"You've told me your PagerDuty email is #{hubotUser.pagerdutyEmail}"
else if hubotUser.email_address
"I'm assuming your PagerDuty email is #{hubotUser.email_address}. Change it with `#{robot.name} pager me as you@yourdomain.com`"
if user
msg.send "I found your PagerDuty user #{user.html_url}, #{emailNote}"
else
msg.send "I couldn't find your user :( #{emailNote}"
cmds = robot.helpCommands()
cmds = (cmd for cmd in cmds when cmd.match(/hubot (pager |who's on call)/))
msg.send cmds.join("\n")
# hubot pager me as <email> - remember your pager email is <email>
robot.respond /pager(?: me)? as (.*)$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
email = msg.match[1]
hubotUser.pagerdutyEmail = email
msg.send "Okay, I'll remember your PagerDuty email is #{email}"
# hubot pager forget me - forget your pager email
robot.respond /pager forget me$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
hubotUser.pagerdutyEmail = undefined
msg.send "Okay, I've forgotten your PagerDuty email"
# hubot pager incident <incident> - return the incident NNN
robot.respond /(pager|major)( me)? incident (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getIncident msg.match[3], (err, incident) ->
if err?
robot.emit 'error', err, msg
return
msg.send formatIncident(incident)
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager problems - return all open incidents
robot.respond /(pager|major)( me)? (inc|incidents|sup|problems)( --canary)?$/i, (msg) ->
pagerduty.getIncidents "triggered,acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
unless msg.match[4]
incidents = incidents.filter (inc) ->
!/(ninesapp\/|Prometheus )canary/.test(inc.title)
if incidents.length == 0
msg.send "No open incidents"
return
buffer = "Triggered:\n----------\n"
for junk, incident of incidents.reverse()
if incident.status == 'triggered'
buffer = buffer + formatIncident(incident)
buffer = buffer + "\nAcknowledged:\n-------------\n"
for junk, incident of incidents.reverse()
if incident.status == 'acknowledged'
buffer = buffer + formatIncident(incident)
msg.send buffer
# hubot pager trigger (no user/schedule)
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)$/i, (msg) ->
msg.reply "Please include a user or schedule to page, like 'hubot pager infrastructure everything is on fire'."
# hubot pager trigger <user> <severity> <msg> - create a new incident with <msg> and assign it to <user>. Severity must be one of: critical, error, warning or info.
# hubot pager trigger <schedule> <severity> <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. Severity must be one of: critical, error, warning or info.
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)( (critical|error|warning|info) )?(.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
fromUserName = hubotUser.name
query = msg.match[3]
severity = msg.match[5]
reason = msg.match[6]
description = "#{reason} - @#{fromUserName}"
supportedSeverities = ['critical', 'error', 'warning', 'info']
if severity not in supportedSeverities
severity = 'critical'
# Figure out who we are
campfireUserToPagerDutyUser msg, hubotUser, false, (triggeredByPagerDutyUser) ->
triggeredByPagerDutyUserEmail = if triggeredByPagerDutyUser?
emailForUser(triggeredByPagerDutyUser)
else if pagerDutyUserEmail
pagerDutyUserEmail
unless triggeredByPagerDutyUserEmail
msg.send "Sorry, I can't figure your PagerDuty account, and I don't have my own :( Can you tell me your PagerDuty email with `#{robot.name} pager me as you@yourdomain.com`?"
return
# Figure out what we're trying to page
reassignmentParametersForUserOrScheduleOrEscalationPolicy msg, query, (err, results) ->
if err?
robot.emit 'error', err, msg
# reassignmentParametersForUserOrScheduleOrEscalationPolicy constructs explicit,
# human-consumable errors with good messages. Send it to the user if we got one.
if err.message != ""
msg.reply err.message
return
pagerDutyIntegrationAPI msg, "trigger", query, description, severity, (err, json) ->
if err?
robot.emit 'error', err, msg
return
msg.reply ":pager: triggered! now assigning it to the right user..."
incidentKey = json.dedup_key
setTimeout () ->
pagerduty.get "/incidents", {incident_key: incidentKey}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json?.incidents.length == 0
msg.reply "Couldn't find the incident we just created to reassign. Please try again :/"
return
incident = json.incidents[0]
data = {"type": "incident_reference"}
if results.assigned_to_user?
data['assignments'] = [{"assignee": {"id": results.assigned_to_user, "type": "user_reference"}}]
if results.escalation_policy?
data['escalation_policy'] = {"id": results.escalation_policy, "type": "escalation_policy_reference"}
headers = {from: triggeredByPagerDutyUserEmail}
pagerduty.put "/incidents/#{incident.id}", {'incident': data}, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if not json?.incident
msg.reply "Problem reassigning the incident :/"
return
msg.reply ":pager: assigned to #{results.name}!"
, 7000 # set timeout to 7s. sometimes PagerDuty needs a bit of time for events to propagate as incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
robot.respond /(?:pager|major)(?: me)? ack(?:nowledge)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# only acknowledge triggered things, since it doesn't make sense to re-acknowledge if it's already in re-acknowledge
# if it ever doesn't need acknowledge again, it means it's timed out and has become 'triggered' again anyways
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
robot.respond /(pager|major)( me)? ack(nowledge)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[4]?
pagerduty.getIncidents 'triggered,acknowledged', (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
msg.send err.message
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to acknowledge. Acknowledge someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to acknowledge"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only acknowledge triggered things
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
robot.respond /(?:pager|major)(?: me)? res(?:olve)?(?:d)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# allow resolving of triggered and acknowedlge, since being explicit
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'resolved')
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
robot.respond /(pager|major)( me)? res(olve)?(d)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[5]?
pagerduty.getIncidents "acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
robot.emit 'error', err, msg
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to resolve. Resolve someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to resolve"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only resolve things that are acknowledged
updateIncidents(msg, incidentNumbers, 'acknowledged', 'resolved')
# hubot pager notes <incident> - show notes for incident #<incident>
robot.respond /(pager|major)( me)? notes (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
pagerduty.get "/incidents/#{incidentId}/notes", {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
buffer = ""
for note in json.notes
buffer += "#{note.created_at} #{note.user.summary}: #{note.content}\n"
if not buffer
buffer = "No notes!"
msg.send buffer
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
robot.respond /(pager|major)( me)? note ([\d\w]+) (.+)$/i, (msg) ->
msg.finish()
hubotUser = robot.getUserBySlackUser(msg.message.user)
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
content = msg.match[4]
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userEmail = emailForUser(user)
return unless userEmail
data =
note:
content: content
headers = {from: userEmail}
pagerduty.post "/incidents/#{incidentId}/notes", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json && json.note
msg.send "Got it! Note created: #{json.note.content}"
else
msg.send "Sorry, I couldn't do it :("
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
robot.respond /(pager|major)( me)? schedules( (.+))?$/i, (msg) ->
query = {}
if msg.match[4]
query['query'] = msg.match[4]
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
renderSchedule = (schedule, cb) ->
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}>")
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager overrides <schedule> - show upcoming overrides for the next month
robot.respond /(pager|major)( me)? (schedule|overrides)( ([\w\-]+))?( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
query = {
since: moment().format(),
until: moment().add(30, 'days').format()
}
if !msg.match[5]
msg.reply "Please specify a schedule with 'pager #{msg.match[3]} <name>.'' Use 'pager schedules' to list all schedules."
return
if msg.match[7]
timezone = msg.match[7]
else
timezone = 'UTC'
msg.send "Retrieving schedules. This may take a few seconds..."
withScheduleMatching msg, msg.match[5], (schedule) ->
scheduleId = schedule.id
return unless scheduleId
if msg.match[3] && msg.match[3].match /overrides/
url = "/schedules/#{scheduleId}/overrides"
query['editable'] = 'true'
query['overflow'] = 'true'
key = "overrides"
else
url = "/oncalls"
key = "oncalls"
query['schedule_ids'] = [scheduleId]
query['include'] = ['users']
pagerduty.getAll url, query, key, (err, entries) ->
if err?
robot.emit 'error', err, msg
return
unless entries.length > 0
msg.send "None found!"
return
sortedEntries = entries.sort (a, b) ->
moment(a.start).unix() - moment(b.start).unix()
msg.send formatOncalls(sortedEntries, timezone)
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
robot.respond /(pager|major)( me)? my schedule( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
if msg.match[4]
timezone = msg.match[4]
else
timezone = 'UTC'
query = {
since: moment().format(),
until: moment().add(30, 'days').format(),
user_ids: [user.id]
include: ['users']
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
robot.emit 'error', err, msg
return
if oncalls.length == 0
msg.send 'You are not oncall!'
return
msg.send formatOncalls(oncalls, timezone)
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
robot.respond /(pager|major)( me)? (override) ([\w\-]+) ([\w\-:\+]+) - ([\w\-:\+]+)( (.*))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
if msg.match[8]
overrideUser = robot.brain.userForName(msg.match[8])
unless overrideUser
msg.send "Sorry, I don't seem to know who that is. Are you sure they are in chat?"
return
else
overrideUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, overrideUser, (user) ->
userId = user.id
unless userId
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
unless moment(msg.match[5]).isValid() && moment(msg.match[6]).isValid()
msg.send "Please use a http://momentjs.com/ compatible date!"
return
start_time = moment(msg.match[5]).format()
end_time = moment(msg.match[6]).format()
override = {
'start': start_time,
'end': end_time,
'user': {
'id': userId,
"type": "user_reference"
},
}
data = { 'override': override }
pagerduty.post "/schedules/#{scheduleId}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.override
msg.send "That didn't work. Check Hubot's logs for an error!"
return
start = moment(json.override.start)
end = moment(json.override.end)
msg.send "Override setup! #{json.override.user.summary} has the pager from #{start.format()} until #{end.format()}"
# hubot pager override <schedule> delete <id> - delete an override by its ID
robot.respond /(pager|major)( me)? (overrides?) ([\w\-]*) (delete) (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
pagerduty.delete "/schedules/#{scheduleId}/overrides/#{msg.match[6]}", (err, success) ->
unless success
msg.send "Something went weird."
return
msg.send ":boom:"
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
robot.respond /pager( me)? (.+) (\d+)$/i, (msg) ->
msg.finish()
# skip hubot pager incident NNN
if msg.match[2] == 'incident'
return
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
unless userId
return
if !msg.match[2] || msg.match[2] == 'me'
msg.reply "Please specify a schedule with 'pager me infrastructure 60'. Use 'pager schedules' to list all schedules."
return
withScheduleMatching msg, msg.match[2], (matchingSchedule) ->
unless matchingSchedule.id
return
start = moment().format()
minutes = parseInt msg.match[3]
end = moment().add(minutes, 'minutes').format()
override = {
'start': start,
'end': end,
'user': {
'id': userId,
"type": "user_reference",
},
}
withCurrentOncall msg, matchingSchedule, (err, old_username, schedule) ->
if err?
robot.emit 'error', err, msg
return
data = { 'override': override }
pagerduty.post "/schedules/#{schedule.id}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json.override
msg.send "Something went weird."
return
start = moment(json.override.start)
end = moment(json.override.end)
getPagerDutyUser userId, (err, user) ->
if err?
robot.emit 'error', err, msg
return
msg.send "Rejoice, @#{old_username}! @#{user.name} has the pager on #{schedule.name} until #{end.format()}"
# hubot Am I on call - return if I'm currently on call or not
robot.respond /am i on (call|oncall|on-call)/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Finding schedules, this may take a few seconds..."
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
renderSchedule = (s, cb) ->
if not memberOfSchedule(s, userId)
cb(null, {member: false})
return
withCurrentOncallId msg, s, (err, oncallUserid, oncallUsername, schedule) ->
if err?
cb(err)
return
if userId == oncallUserid
cb(null, {member: true, body: "* Yes, you are on call for #{schedule.name} - https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
else
cb(null, {member: true, body: "* No, you are NOT on call for #{schedule.name} (but #{oncallUsername} is)- https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
unless userId?
msg.send "Couldn't figure out the pagerduty user connected to your account."
return
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
if (schedules.every (s) -> not memberOfSchedule(s, userId))
msg.send "You are not assigned to any schedules"
return
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
results = (r.body for r in results when r.member)
unless results.length
results = ["You are not oncall this month!"]
msg.send results.join("\n")
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
robot.respond /who(’s|'s|s| is|se)? (on call|oncall|on-call)( (?:for )?(.+))?/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
scheduleName = msg.match[4]
renderSchedule = (s, cb) ->
withCurrentOncallUser msg, s, (err, user, schedule) ->
if err?
cb(err)
return
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: schedule.name, username: user.name})
if !pagerEnabledForScheduleOrEscalation(schedule) || user.name == "hubot" || user.name == undefined
cb(null, "No human on call")
return
slackHandle = guessSlackHandleFromEmail(user)
slackString = " (#{slackHandle})" if slackHandle
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}'s> oncall is #{user.name}#{slackString}")
renderScheduleNoUser = (s, cb) ->
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: s.name})
if !pagerEnabledForScheduleOrEscalation(s)
cb(null, undefined)
return
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{s.id}|#{s.name}>")
if scheduleName?
withScheduleMatching msg, scheduleName, (s) ->
renderSchedule s, (err, text) ->
if err?
robot.emit 'error'
return
msg.send text
return
else
msg.send "Due to rate limiting please include the schedule name to also see who's on call. E.g. `.who's on call for <schedule>`. Schedule names are being retrieved..."
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
async.map schedules, renderScheduleNoUser, (err, results) ->
if err?
Scrolls.log("error", {at: 'who-is-on-call/map-schedules/error', error: err})
robot.emit 'error', err, msg
return
results = (result for result in results when result?)
Scrolls.log("info", {at: 'who-is-on-call/map-schedules'})
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager services - list services
robot.respond /(pager|major)( me)? services$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getAll "/services", {}, "services", (err, services) ->
if err?
robot.emit 'error', err, msg
return
if services.length == 0
msg.send 'No services found!'
return
renderService = (service, cb) ->
cb(null, "* #{service.id}: #{service.name} (#{service.status}) - https://#{pagerduty.subdomain}.pagerduty.com/services/#{service.id}")
async.map services, renderService, (err, results) ->
if err?
robot.emit 'error', err, msg
return
msg.send results.join("\n")
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
robot.respond /(pager|major)( me)? maintenance (\d+) (.+)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
minutes = msg.match[3]
service_ids = msg.match[4].split(' ')
start_time = moment().format()
end_time = moment().add(minutes, 'minutes').format()
maintenance_window = {
'start_time': start_time,
'end_time': end_time,
'type': 'maintenance_window',
'services': service_ids.map (service_id) ->
{
'id': service_id,
"type": "service_reference"
}
}
data = { 'maintenance_window': maintenance_window }
headers = {'from': requesterEmail}
msg.send "Opening maintenance window for: #{service_ids}"
pagerduty.post "/maintenance_windows", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.maintenance_window
msg.send "That didn't work. Check Hubot's logs for an error!"
return
msg.send "Maintenance window created! ID: #{json.maintenance_window.id} Ends: #{json.maintenance_window.end_time}"
# Determine whether a schedule's participants are available to be paged.
#
# s :: Object
# Decoded JSON from the Pagerduty Schedules or Escalation API.
#
# Returns a Boolean instance.
pagerEnabledForScheduleOrEscalation = (s) ->
description = s.description or ""
return description.indexOf('#nopage') == -1
parseIncidentNumbers = (match) ->
match.split(/[ ,]+/).map (incidentNumber) ->
parseInt(incidentNumber)
emailForUser = (user) ->
user.pagerdutyEmail || user.email_address || user.email || user.profile?.email
campfireUserToPagerDutyUser = (msg, user, required, cb) ->
if typeof required is 'function'
cb = required
required = true
email = emailForUser(user) || process.env.HUBOT_PAGERDUTY_TEST_EMAIL
speakerEmail = emailForUser(msg.message.user)
if not email
if not required
cb null
return
else
possessive = if email is speakerEmail
"your"
else
"#{user.name}'s"
addressee = if email is speakerEmail
"you"
else
"#{user.name}"
msg.send "Sorry, I can't figure out #{possessive} email address :( Can #{addressee} tell me with `#{robot.name} pager me as you@yourdomain.com`?"
return
pagerduty.get "/users", {query: email}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json.users.length isnt 1
if json.users.length is 0 and not required
cb null
return
else
user = tryToFind(email, json.users)
if !user
msg.send "Sorry, I expected to get 1 user back for #{email}, but only found a list that didn't include the requested email :sweat:. Can you make sure that is actually a real user on PagerDuty?"
else
cb(user)
return
cb(json.users[0])
tryToFind = (email, users) ->
users.find (user) ->
user.email == email
oneScheduleMatching = (msg, q, cb) ->
query = {
query: q
}
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
# Single result returned
if schedules?.length == 1
schedule = schedules[0]
# Multiple results returned and one is exact (case-insensitive)
if schedules?.length > 1
matchingExactly = schedules.filter (s) ->
s.name.toLowerCase() == q.toLowerCase()
if matchingExactly.length == 1
schedule = matchingExactly[0]
cb(schedule)
withScheduleMatching = (msg, q, cb) ->
oneScheduleMatching msg, q, (schedule) ->
if schedule
cb(schedule)
else
# maybe look for a specific name match here?
msg.send "I couldn't determine exactly which schedule you meant by #{q}. Can you be more specific?"
return
reassignmentParametersForUserOrScheduleOrEscalationPolicy = (msg, string, cb) ->
if campfireUser = robot.brain.userForName(string)
campfireUserToPagerDutyUser msg, campfireUser, (user) ->
cb(null, { assigned_to_user: user.id, name: user.name })
else
pagerduty.get "/escalation_policies", query: string, (err, json) ->
if err?
robot.emit 'error', err, msg
return
escalationPolicy = null
if json?.escalation_policies?.length == 1
escalationPolicy = json.escalation_policies[0]
# Multiple results returned and one is exact (case-insensitive)
else if json?.escalation_policies?.length > 1
matchingExactly = json.escalation_policies.filter (es) ->
es.name.toLowerCase() == string.toLowerCase()
if matchingExactly.length == 1
escalationPolicy = matchingExactly[0]
if escalationPolicy?
unless pagerEnabledForScheduleOrEscalation(escalationPolicy)
error = new Error("Found the #{escalationPolicy.name} escalation policy but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
cb(null, { escalation_policy: escalationPolicy.id, name: escalationPolicy.name })
return
oneScheduleMatching msg, string, (schedule) ->
if schedule
unless pagerEnabledForScheduleOrEscalation(schedule)
error = new Error("Found the #{schedule.name} schedule but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
withCurrentOncallUser msg, schedule, (err, user, schedule) ->
if err?
cb(err, null)
return
cb(null, { assigned_to_user: user.id, name: user.name })
return
error = new Error("Couldn't find a user, unique schedule or escalation policy matching #{string} to page, see /who's on call for schedules you can page.")
cb(error, null)
withCurrentOncall = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null)
return
cb(null, user.name, s)
withCurrentOncallId = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null, null)
return
cb(null, user.id, user.name, s)
withCurrentOncallUser = (msg, schedule, cb) ->
oneHour = moment().add(1, 'hours').format()
now = moment().format()
query = {
since: now,
until: oneHour,
schedule_ids: [schedule.id]
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
cb(err, null, null)
return
unless oncalls and oncalls.length > 0
cb(null, "nobody", schedule)
return
userId = oncalls[0].user.id
getPagerDutyUser userId, (err, user) ->
if err?
cb(err)
return
cb(null, user, schedule)
getPagerDutyUser = (userId, cb) ->
pagerduty.get "/users/#{userId}", (err, json) ->
if err?
cb(err)
return
if not json.user
cb(null, "nobody")
return
cb(null, json.user)
pagerDutyIntegrationAPI = (msg, cmd, affected, description, severity, cb) ->
unless pagerDutyServiceApiKey?
msg.send "PagerDuty API service key is missing."
msg.send "Ensure that HUBOT_PAGERDUTY_SERVICE_API_KEY is set."
return
data = null
switch cmd
when "trigger"
payload = {summary: description, source: affected, severity: severity}
data = {routing_key: pagerDutyServiceApiKey, event_action: "trigger", payload: payload}
pagerDutyIntegrationPost msg, data, cb
formatIncident = (inc) ->
# { pd_nagios_object: 'service',
# HOSTNAME: 'fs1a',
# SERVICEDESC: 'snapshot_repositories',
# SERVICESTATE: 'CRITICAL',
# HOSTSTATE: 'UP' },
summary = if inc.trigger_summary_data
if inc.trigger_summary_data.pd_nagios_object == 'service'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.SERVICEDESC}"
else if inc.trigger_summary_data.pd_nagios_object == 'host'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.HOSTSTATE}"
# email services
else if inc.trigger_summary_data.subject
inc.trigger_summary_data.subject
else if inc.trigger_summary_data.description
inc.trigger_summary_data.description
else
""
else
"#{inc.title} #{inc.summary}"
names = []
for assigned in inc.assignments
names.push assigned.assignee.summary
if names
assigned_to = "- assigned to #{names.join(",")}"
else
assigned_to = "- nobody currently assigned"
"#{inc.incident_number}: #{inc.created_at} #{summary} #{assigned_to}\n"
updateIncidents = (msg, incidentNumbers, statusFilter, updatedStatus) ->
campfireUserToPagerDutyUser msg, msg.message.user, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
pagerduty.getIncidents statusFilter, (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
foundIncidents = []
for incident in incidents
# FIXME this isn't working very consistently
if incidentNumbers.indexOf(incident.incident_number) > -1
foundIncidents.push(incident)
if foundIncidents.length == 0
msg.reply "Couldn't find incident(s) #{incidentNumbers.join(', ')}. Use `#{robot.name} pager incidents` for listing."
else
# loljson
data = {
incidents: foundIncidents.map (incident) ->
{
'id': incident.id,
"type": "incident_reference",
'status': updatedStatus
}
}
headers = {from: requesterEmail}
pagerduty.put "/incidents", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json?.incidents
msg.reply "Problem updating incidents #{incidentNumbers.join(',')}"
return
buffer = "Incident"
buffer += "s" if json.incidents.length > 1
buffer += " "
buffer += (incident.incident_number for incident in json.incidents).join(", ")
buffer += " #{updatedStatus}"
msg.reply buffer
pagerDutyIntegrationPost = (msg, body, cb) ->
request.post {uri: pagerDutyEventsAPIURL, json: true, body: body}, (err, res, body) ->
if err?
cb(err)
return
switch res.statusCode
when 200, 201, 202
cb(null, body)
else
cb(new PagerDutyError("#{res.statusCode} back from #{path}"))
allUserEmails = (cb) ->
pagerduty.getAll "/users", {}, "users", (err, returnedUsers) ->
if err?
cb(err)
return
users = {}
for user in returnedUsers
users[user.id] = user.email
cb(null, users)
incidentsForEmail = (incidents, userEmail, cb) ->
allUserEmails (err, userEmails) ->
if err?
cb(err)
return
filtered = []
for incident in incidents
for assignment in incident.assignments
assignedEmail = userEmails[assignment.assignee.id]
if assignedEmail is userEmail
filtered.push incident
cb(null, filtered)
memberOfSchedule = (schedule, userId) ->
schedule.users.some (scheduleUser) ->
scheduleUser.id == userId
formatOncalls = (oncalls, timezone) ->
buffer = ""
schedules = {}
for oncall in oncalls
startTime = moment(oncall.start).tz(timezone).format()
endTime = moment(oncall.end).tz(timezone).format()
time = "#{startTime} - #{endTime}"
username = guessSlackHandleFromEmail(oncall.user) || oncall.user.summary
if oncall.schedule?
scheduleId = oncall.schedule.id
if scheduleId not of schedules
schedules[scheduleId] = []
if time not in schedules[scheduleId]
schedules[scheduleId].push time
buffer += "• #{time} #{username} (<#{oncall.schedule.html_url}|#{oncall.schedule.summary}>)\n"
else if oncall.escalation_policy?
# no schedule embedded
epSummary = oncall.escalation_policy.summary
epURL = oncall.escalation_policy.html_url
buffer += "• #{time} #{username} (<#{epURL}|#{epSummary}>)\n"
else
# override
buffer += "• #{time} #{username}\n"
buffer
chunkMessageLines = (messageLines, boundary) ->
allChunks = []
thisChunk = []
charCount = 0
for line in messageLines
if charCount >= boundary
allChunks.push(thisChunk)
charCount = 0
thisChunk = []
thisChunk.push(line)
charCount += line.length
allChunks.push(thisChunk)
allChunks
guessSlackHandleFromEmail = (user) ->
# Context: https://github.slack.com/archives/C0GNSSLUF/p1539181657000100
if user.email == "jp@github.com"
"`josh`"
else if user.email.search(/github\.com/)
user.email.replace(/(.+)\@github\.com/, '`$1`')
else
null
| 96955 | # Description:
# Interact with PagerDuty services, schedules, and incidents with Hubot. Schedules with "hidden" in the name will be ignored.
#
# Commands:
# hubot pager me as <email> - remember your pager email is <email>
# hubot pager forget me - forget your pager email
# hubot Am I on call - return if I'm currently on call or not
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
# hubot pager trigger <user> [<severity>] <msg> - create a new incident with <msg> and assign it to <user>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager trigger <schedule> [<severity>] <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager sup --canary - return the current incidents, including Nines' canary incidents
# hubot pager incident <incident> - return the incident NNN
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
# hubot pager notes <incident> - show notes for incident #<incident>
# hubot pager problems - return all open incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
# hubot pager overrides <schedule> - show upcoming overrides for the next month
# hubot pager override <schedule> delete <id> - delete an override by its ID
# hubot pager services - list services
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
#
# Authors:
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
pagerduty = require('../pagerduty')
async = require('async')
inspect = require('util').inspect
moment = require('moment-timezone')
request = require 'request'
Scrolls = require('../../../../lib/scrolls').context({script: 'pagerduty'})
pagerDutyUserEmail = process.env.HUBOT_PAGERDUTY_USERNAME
pagerDutyServiceApiKey = process.env.HUBOT_PAGERDUTY_SERVICE_API_KEY
pagerDutyEventsAPIURL = 'https://events.pagerduty.com/v2/enqueue'
module.exports = (robot) ->
robot.respond /pager( me)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
emailNote = if hubotUser.pagerdutyEmail
"You've told me your PagerDuty email is #{hubotUser.pagerdutyEmail}"
else if hubotUser.email_address
"I'm assuming your PagerDuty email is #{hubotUser.email_address}. Change it with `#{robot.name} pager me as <EMAIL>`"
if user
msg.send "I found your PagerDuty user #{user.html_url}, #{emailNote}"
else
msg.send "I couldn't find your user :( #{emailNote}"
cmds = robot.helpCommands()
cmds = (cmd for cmd in cmds when cmd.match(/hubot (pager |who's on call)/))
msg.send cmds.join("\n")
# hubot pager me as <email> - remember your pager email is <email>
robot.respond /pager(?: me)? as (.*)$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
email = msg.match[1]
hubotUser.pagerdutyEmail = email
msg.send "Okay, I'll remember your PagerDuty email is #{email}"
# hubot pager forget me - forget your pager email
robot.respond /pager forget me$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
hubotUser.pagerdutyEmail = undefined
msg.send "Okay, I've forgotten your PagerDuty email"
# hubot pager incident <incident> - return the incident NNN
robot.respond /(pager|major)( me)? incident (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getIncident msg.match[3], (err, incident) ->
if err?
robot.emit 'error', err, msg
return
msg.send formatIncident(incident)
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager problems - return all open incidents
robot.respond /(pager|major)( me)? (inc|incidents|sup|problems)( --canary)?$/i, (msg) ->
pagerduty.getIncidents "triggered,acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
unless msg.match[4]
incidents = incidents.filter (inc) ->
!/(ninesapp\/|Prometheus )canary/.test(inc.title)
if incidents.length == 0
msg.send "No open incidents"
return
buffer = "Triggered:\n----------\n"
for junk, incident of incidents.reverse()
if incident.status == 'triggered'
buffer = buffer + formatIncident(incident)
buffer = buffer + "\nAcknowledged:\n-------------\n"
for junk, incident of incidents.reverse()
if incident.status == 'acknowledged'
buffer = buffer + formatIncident(incident)
msg.send buffer
# hubot pager trigger (no user/schedule)
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)$/i, (msg) ->
msg.reply "Please include a user or schedule to page, like 'hubot pager infrastructure everything is on fire'."
# hubot pager trigger <user> <severity> <msg> - create a new incident with <msg> and assign it to <user>. Severity must be one of: critical, error, warning or info.
# hubot pager trigger <schedule> <severity> <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. Severity must be one of: critical, error, warning or info.
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)( (critical|error|warning|info) )?(.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
fromUserName = hubotUser.name
query = msg.match[3]
severity = msg.match[5]
reason = msg.match[6]
description = "#{reason} - @#{fromUserName}"
supportedSeverities = ['critical', 'error', 'warning', 'info']
if severity not in supportedSeverities
severity = 'critical'
# Figure out who we are
campfireUserToPagerDutyUser msg, hubotUser, false, (triggeredByPagerDutyUser) ->
triggeredByPagerDutyUserEmail = if triggeredByPagerDutyUser?
emailForUser(triggeredByPagerDutyUser)
else if pagerDutyUserEmail
pagerDutyUserEmail
unless triggeredByPagerDutyUserEmail
msg.send "Sorry, I can't figure your PagerDuty account, and I don't have my own :( Can you tell me your PagerDuty email with `#{robot.name} pager me as <EMAIL>`?"
return
# Figure out what we're trying to page
reassignmentParametersForUserOrScheduleOrEscalationPolicy msg, query, (err, results) ->
if err?
robot.emit 'error', err, msg
# reassignmentParametersForUserOrScheduleOrEscalationPolicy constructs explicit,
# human-consumable errors with good messages. Send it to the user if we got one.
if err.message != ""
msg.reply err.message
return
pagerDutyIntegrationAPI msg, "trigger", query, description, severity, (err, json) ->
if err?
robot.emit 'error', err, msg
return
msg.reply ":pager: triggered! now assigning it to the right user..."
incidentKey = json.dedup_key
setTimeout () ->
pagerduty.get "/incidents", {incident_key: incidentKey}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json?.incidents.length == 0
msg.reply "Couldn't find the incident we just created to reassign. Please try again :/"
return
incident = json.incidents[0]
data = {"type": "incident_reference"}
if results.assigned_to_user?
data['assignments'] = [{"assignee": {"id": results.assigned_to_user, "type": "user_reference"}}]
if results.escalation_policy?
data['escalation_policy'] = {"id": results.escalation_policy, "type": "escalation_policy_reference"}
headers = {from: triggeredByPagerDutyUserEmail}
pagerduty.put "/incidents/#{incident.id}", {'incident': data}, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if not json?.incident
msg.reply "Problem reassigning the incident :/"
return
msg.reply ":pager: assigned to #{results.name}!"
, 7000 # set timeout to 7s. sometimes PagerDuty needs a bit of time for events to propagate as incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
robot.respond /(?:pager|major)(?: me)? ack(?:nowledge)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# only acknowledge triggered things, since it doesn't make sense to re-acknowledge if it's already in re-acknowledge
# if it ever doesn't need acknowledge again, it means it's timed out and has become 'triggered' again anyways
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
robot.respond /(pager|major)( me)? ack(nowledge)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[4]?
pagerduty.getIncidents 'triggered,acknowledged', (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
msg.send err.message
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to acknowledge. Acknowledge someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to acknowledge"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only acknowledge triggered things
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
robot.respond /(?:pager|major)(?: me)? res(?:olve)?(?:d)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# allow resolving of triggered and acknowedlge, since being explicit
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'resolved')
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
robot.respond /(pager|major)( me)? res(olve)?(d)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[5]?
pagerduty.getIncidents "acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
robot.emit 'error', err, msg
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to resolve. Resolve someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to resolve"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only resolve things that are acknowledged
updateIncidents(msg, incidentNumbers, 'acknowledged', 'resolved')
# hubot pager notes <incident> - show notes for incident #<incident>
robot.respond /(pager|major)( me)? notes (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
pagerduty.get "/incidents/#{incidentId}/notes", {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
buffer = ""
for note in json.notes
buffer += "#{note.created_at} #{note.user.summary}: #{note.content}\n"
if not buffer
buffer = "No notes!"
msg.send buffer
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
robot.respond /(pager|major)( me)? note ([\d\w]+) (.+)$/i, (msg) ->
msg.finish()
hubotUser = robot.getUserBySlackUser(msg.message.user)
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
content = msg.match[4]
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userEmail = emailForUser(user)
return unless userEmail
data =
note:
content: content
headers = {from: userEmail}
pagerduty.post "/incidents/#{incidentId}/notes", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json && json.note
msg.send "Got it! Note created: #{json.note.content}"
else
msg.send "Sorry, I couldn't do it :("
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
robot.respond /(pager|major)( me)? schedules( (.+))?$/i, (msg) ->
query = {}
if msg.match[4]
query['query'] = msg.match[4]
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
renderSchedule = (schedule, cb) ->
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}>")
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager overrides <schedule> - show upcoming overrides for the next month
robot.respond /(pager|major)( me)? (schedule|overrides)( ([\w\-]+))?( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
query = {
since: moment().format(),
until: moment().add(30, 'days').format()
}
if !msg.match[5]
msg.reply "Please specify a schedule with 'pager #{msg.match[3]} <name>.'' Use 'pager schedules' to list all schedules."
return
if msg.match[7]
timezone = msg.match[7]
else
timezone = 'UTC'
msg.send "Retrieving schedules. This may take a few seconds..."
withScheduleMatching msg, msg.match[5], (schedule) ->
scheduleId = schedule.id
return unless scheduleId
if msg.match[3] && msg.match[3].match /overrides/
url = "/schedules/#{scheduleId}/overrides"
query['editable'] = 'true'
query['overflow'] = 'true'
key = "overrides"
else
url = "/oncalls"
key = "<KEY>"
query['schedule_ids'] = [scheduleId]
query['include'] = ['users']
pagerduty.getAll url, query, key, (err, entries) ->
if err?
robot.emit 'error', err, msg
return
unless entries.length > 0
msg.send "None found!"
return
sortedEntries = entries.sort (a, b) ->
moment(a.start).unix() - moment(b.start).unix()
msg.send formatOncalls(sortedEntries, timezone)
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
robot.respond /(pager|major)( me)? my schedule( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
if msg.match[4]
timezone = msg.match[4]
else
timezone = 'UTC'
query = {
since: moment().format(),
until: moment().add(30, 'days').format(),
user_ids: [user.id]
include: ['users']
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
robot.emit 'error', err, msg
return
if oncalls.length == 0
msg.send 'You are not oncall!'
return
msg.send formatOncalls(oncalls, timezone)
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
robot.respond /(pager|major)( me)? (override) ([\w\-]+) ([\w\-:\+]+) - ([\w\-:\+]+)( (.*))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
if msg.match[8]
overrideUser = robot.brain.userForName(msg.match[8])
unless overrideUser
msg.send "Sorry, I don't seem to know who that is. Are you sure they are in chat?"
return
else
overrideUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, overrideUser, (user) ->
userId = user.id
unless userId
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
unless moment(msg.match[5]).isValid() && moment(msg.match[6]).isValid()
msg.send "Please use a http://momentjs.com/ compatible date!"
return
start_time = moment(msg.match[5]).format()
end_time = moment(msg.match[6]).format()
override = {
'start': start_time,
'end': end_time,
'user': {
'id': userId,
"type": "user_reference"
},
}
data = { 'override': override }
pagerduty.post "/schedules/#{scheduleId}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.override
msg.send "That didn't work. Check Hubot's logs for an error!"
return
start = moment(json.override.start)
end = moment(json.override.end)
msg.send "Override setup! #{json.override.user.summary} has the pager from #{start.format()} until #{end.format()}"
# hubot pager override <schedule> delete <id> - delete an override by its ID
robot.respond /(pager|major)( me)? (overrides?) ([\w\-]*) (delete) (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
pagerduty.delete "/schedules/#{scheduleId}/overrides/#{msg.match[6]}", (err, success) ->
unless success
msg.send "Something went weird."
return
msg.send ":boom:"
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
robot.respond /pager( me)? (.+) (\d+)$/i, (msg) ->
msg.finish()
# skip hubot pager incident NNN
if msg.match[2] == 'incident'
return
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
unless userId
return
if !msg.match[2] || msg.match[2] == 'me'
msg.reply "Please specify a schedule with 'pager me infrastructure 60'. Use 'pager schedules' to list all schedules."
return
withScheduleMatching msg, msg.match[2], (matchingSchedule) ->
unless matchingSchedule.id
return
start = moment().format()
minutes = parseInt msg.match[3]
end = moment().add(minutes, 'minutes').format()
override = {
'start': start,
'end': end,
'user': {
'id': userId,
"type": "user_reference",
},
}
withCurrentOncall msg, matchingSchedule, (err, old_username, schedule) ->
if err?
robot.emit 'error', err, msg
return
data = { 'override': override }
pagerduty.post "/schedules/#{schedule.id}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json.override
msg.send "Something went weird."
return
start = moment(json.override.start)
end = moment(json.override.end)
getPagerDutyUser userId, (err, user) ->
if err?
robot.emit 'error', err, msg
return
msg.send "Rejoice, @#{old_username}! @#{user.name} has the pager on #{schedule.name} until #{end.format()}"
# hubot Am I on call - return if I'm currently on call or not
robot.respond /am i on (call|oncall|on-call)/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Finding schedules, this may take a few seconds..."
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
renderSchedule = (s, cb) ->
if not memberOfSchedule(s, userId)
cb(null, {member: false})
return
withCurrentOncallId msg, s, (err, oncallUserid, oncallUsername, schedule) ->
if err?
cb(err)
return
if userId == oncallUserid
cb(null, {member: true, body: "* Yes, you are on call for #{schedule.name} - https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
else
cb(null, {member: true, body: "* No, you are NOT on call for #{schedule.name} (but #{oncallUsername} is)- https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
unless userId?
msg.send "Couldn't figure out the pagerduty user connected to your account."
return
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
if (schedules.every (s) -> not memberOfSchedule(s, userId))
msg.send "You are not assigned to any schedules"
return
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
results = (r.body for r in results when r.member)
unless results.length
results = ["You are not oncall this month!"]
msg.send results.join("\n")
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
robot.respond /who(’s|'s|s| is|se)? (on call|oncall|on-call)( (?:for )?(.+))?/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
scheduleName = msg.match[4]
renderSchedule = (s, cb) ->
withCurrentOncallUser msg, s, (err, user, schedule) ->
if err?
cb(err)
return
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: schedule.name, username: user.name})
if !pagerEnabledForScheduleOrEscalation(schedule) || user.name == "hubot" || user.name == undefined
cb(null, "No human on call")
return
slackHandle = guessSlackHandleFromEmail(user)
slackString = " (#{slackHandle})" if slackHandle
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}'s> oncall is #{user.name}#{slackString}")
renderScheduleNoUser = (s, cb) ->
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: s.name})
if !pagerEnabledForScheduleOrEscalation(s)
cb(null, undefined)
return
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{s.id}|#{s.name}>")
if scheduleName?
withScheduleMatching msg, scheduleName, (s) ->
renderSchedule s, (err, text) ->
if err?
robot.emit 'error'
return
msg.send text
return
else
msg.send "Due to rate limiting please include the schedule name to also see who's on call. E.g. `.who's on call for <schedule>`. Schedule names are being retrieved..."
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
async.map schedules, renderScheduleNoUser, (err, results) ->
if err?
Scrolls.log("error", {at: 'who-is-on-call/map-schedules/error', error: err})
robot.emit 'error', err, msg
return
results = (result for result in results when result?)
Scrolls.log("info", {at: 'who-is-on-call/map-schedules'})
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager services - list services
robot.respond /(pager|major)( me)? services$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getAll "/services", {}, "services", (err, services) ->
if err?
robot.emit 'error', err, msg
return
if services.length == 0
msg.send 'No services found!'
return
renderService = (service, cb) ->
cb(null, "* #{service.id}: #{service.name} (#{service.status}) - https://#{pagerduty.subdomain}.pagerduty.com/services/#{service.id}")
async.map services, renderService, (err, results) ->
if err?
robot.emit 'error', err, msg
return
msg.send results.join("\n")
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
robot.respond /(pager|major)( me)? maintenance (\d+) (.+)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
minutes = msg.match[3]
service_ids = msg.match[4].split(' ')
start_time = moment().format()
end_time = moment().add(minutes, 'minutes').format()
maintenance_window = {
'start_time': start_time,
'end_time': end_time,
'type': 'maintenance_window',
'services': service_ids.map (service_id) ->
{
'id': service_id,
"type": "service_reference"
}
}
data = { 'maintenance_window': maintenance_window }
headers = {'from': requesterEmail}
msg.send "Opening maintenance window for: #{service_ids}"
pagerduty.post "/maintenance_windows", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.maintenance_window
msg.send "That didn't work. Check Hubot's logs for an error!"
return
msg.send "Maintenance window created! ID: #{json.maintenance_window.id} Ends: #{json.maintenance_window.end_time}"
# Determine whether a schedule's participants are available to be paged.
#
# s :: Object
# Decoded JSON from the Pagerduty Schedules or Escalation API.
#
# Returns a Boolean instance.
pagerEnabledForScheduleOrEscalation = (s) ->
description = s.description or ""
return description.indexOf('#nopage') == -1
parseIncidentNumbers = (match) ->
match.split(/[ ,]+/).map (incidentNumber) ->
parseInt(incidentNumber)
emailForUser = (user) ->
user.pagerdutyEmail || user.email_address || user.email || user.profile?.email
campfireUserToPagerDutyUser = (msg, user, required, cb) ->
if typeof required is 'function'
cb = required
required = true
email = emailForUser(user) || process.env.HUBOT_PAGERDUTY_TEST_EMAIL
speakerEmail = emailForUser(msg.message.user)
if not email
if not required
cb null
return
else
possessive = if email is speakerEmail
"your"
else
"#{user.name}'s"
addressee = if email is speakerEmail
"you"
else
"#{user.name}"
msg.send "Sorry, I can't figure out #{possessive} email address :( Can #{addressee} tell me with `#{robot.name} pager me as <EMAIL>`?"
return
pagerduty.get "/users", {query: email}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json.users.length isnt 1
if json.users.length is 0 and not required
cb null
return
else
user = tryToFind(email, json.users)
if !user
msg.send "Sorry, I expected to get 1 user back for #{email}, but only found a list that didn't include the requested email :sweat:. Can you make sure that is actually a real user on PagerDuty?"
else
cb(user)
return
cb(json.users[0])
tryToFind = (email, users) ->
users.find (user) ->
user.email == email
oneScheduleMatching = (msg, q, cb) ->
query = {
query: q
}
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
# Single result returned
if schedules?.length == 1
schedule = schedules[0]
# Multiple results returned and one is exact (case-insensitive)
if schedules?.length > 1
matchingExactly = schedules.filter (s) ->
s.name.toLowerCase() == q.toLowerCase()
if matchingExactly.length == 1
schedule = matchingExactly[0]
cb(schedule)
withScheduleMatching = (msg, q, cb) ->
oneScheduleMatching msg, q, (schedule) ->
if schedule
cb(schedule)
else
# maybe look for a specific name match here?
msg.send "I couldn't determine exactly which schedule you meant by #{q}. Can you be more specific?"
return
reassignmentParametersForUserOrScheduleOrEscalationPolicy = (msg, string, cb) ->
if campfireUser = robot.brain.userForName(string)
campfireUserToPagerDutyUser msg, campfireUser, (user) ->
cb(null, { assigned_to_user: user.id, name: user.name })
else
pagerduty.get "/escalation_policies", query: string, (err, json) ->
if err?
robot.emit 'error', err, msg
return
escalationPolicy = null
if json?.escalation_policies?.length == 1
escalationPolicy = json.escalation_policies[0]
# Multiple results returned and one is exact (case-insensitive)
else if json?.escalation_policies?.length > 1
matchingExactly = json.escalation_policies.filter (es) ->
es.name.toLowerCase() == string.toLowerCase()
if matchingExactly.length == 1
escalationPolicy = matchingExactly[0]
if escalationPolicy?
unless pagerEnabledForScheduleOrEscalation(escalationPolicy)
error = new Error("Found the #{escalationPolicy.name} escalation policy but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
cb(null, { escalation_policy: escalationPolicy.id, name: escalationPolicy.name })
return
oneScheduleMatching msg, string, (schedule) ->
if schedule
unless pagerEnabledForScheduleOrEscalation(schedule)
error = new Error("Found the #{schedule.name} schedule but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
withCurrentOncallUser msg, schedule, (err, user, schedule) ->
if err?
cb(err, null)
return
cb(null, { assigned_to_user: user.id, name: user.name })
return
error = new Error("Couldn't find a user, unique schedule or escalation policy matching #{string} to page, see /who's on call for schedules you can page.")
cb(error, null)
withCurrentOncall = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null)
return
cb(null, user.name, s)
withCurrentOncallId = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null, null)
return
cb(null, user.id, user.name, s)
withCurrentOncallUser = (msg, schedule, cb) ->
oneHour = moment().add(1, 'hours').format()
now = moment().format()
query = {
since: now,
until: oneHour,
schedule_ids: [schedule.id]
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
cb(err, null, null)
return
unless oncalls and oncalls.length > 0
cb(null, "nobody", schedule)
return
userId = oncalls[0].user.id
getPagerDutyUser userId, (err, user) ->
if err?
cb(err)
return
cb(null, user, schedule)
getPagerDutyUser = (userId, cb) ->
pagerduty.get "/users/#{userId}", (err, json) ->
if err?
cb(err)
return
if not json.user
cb(null, "nobody")
return
cb(null, json.user)
pagerDutyIntegrationAPI = (msg, cmd, affected, description, severity, cb) ->
unless pagerDutyServiceApiKey?
msg.send "PagerDuty API service key is missing."
msg.send "Ensure that HUBOT_PAGERDUTY_SERVICE_API_KEY is set."
return
data = null
switch cmd
when "trigger"
payload = {summary: description, source: affected, severity: severity}
data = {routing_key: pagerDutyServiceApiKey, event_action: "trigger", payload: payload}
pagerDutyIntegrationPost msg, data, cb
formatIncident = (inc) ->
# { pd_nagios_object: 'service',
# HOSTNAME: 'fs1a',
# SERVICEDESC: 'snapshot_repositories',
# SERVICESTATE: 'CRITICAL',
# HOSTSTATE: 'UP' },
summary = if inc.trigger_summary_data
if inc.trigger_summary_data.pd_nagios_object == 'service'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.SERVICEDESC}"
else if inc.trigger_summary_data.pd_nagios_object == 'host'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.HOSTSTATE}"
# email services
else if inc.trigger_summary_data.subject
inc.trigger_summary_data.subject
else if inc.trigger_summary_data.description
inc.trigger_summary_data.description
else
""
else
"#{inc.title} #{inc.summary}"
names = []
for assigned in inc.assignments
names.push assigned.assignee.summary
if names
assigned_to = "- assigned to #{names.join(",")}"
else
assigned_to = "- nobody currently assigned"
"#{inc.incident_number}: #{inc.created_at} #{summary} #{assigned_to}\n"
updateIncidents = (msg, incidentNumbers, statusFilter, updatedStatus) ->
campfireUserToPagerDutyUser msg, msg.message.user, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
pagerduty.getIncidents statusFilter, (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
foundIncidents = []
for incident in incidents
# FIXME this isn't working very consistently
if incidentNumbers.indexOf(incident.incident_number) > -1
foundIncidents.push(incident)
if foundIncidents.length == 0
msg.reply "Couldn't find incident(s) #{incidentNumbers.join(', ')}. Use `#{robot.name} pager incidents` for listing."
else
# loljson
data = {
incidents: foundIncidents.map (incident) ->
{
'id': incident.id,
"type": "incident_reference",
'status': updatedStatus
}
}
headers = {from: requesterEmail}
pagerduty.put "/incidents", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json?.incidents
msg.reply "Problem updating incidents #{incidentNumbers.join(',')}"
return
buffer = "Incident"
buffer += "s" if json.incidents.length > 1
buffer += " "
buffer += (incident.incident_number for incident in json.incidents).join(", ")
buffer += " #{updatedStatus}"
msg.reply buffer
pagerDutyIntegrationPost = (msg, body, cb) ->
request.post {uri: pagerDutyEventsAPIURL, json: true, body: body}, (err, res, body) ->
if err?
cb(err)
return
switch res.statusCode
when 200, 201, 202
cb(null, body)
else
cb(new PagerDutyError("#{res.statusCode} back from #{path}"))
allUserEmails = (cb) ->
pagerduty.getAll "/users", {}, "users", (err, returnedUsers) ->
if err?
cb(err)
return
users = {}
for user in returnedUsers
users[user.id] = user.email
cb(null, users)
incidentsForEmail = (incidents, userEmail, cb) ->
allUserEmails (err, userEmails) ->
if err?
cb(err)
return
filtered = []
for incident in incidents
for assignment in incident.assignments
assignedEmail = userEmails[assignment.assignee.id]
if assignedEmail is userEmail
filtered.push incident
cb(null, filtered)
memberOfSchedule = (schedule, userId) ->
schedule.users.some (scheduleUser) ->
scheduleUser.id == userId
formatOncalls = (oncalls, timezone) ->
buffer = ""
schedules = {}
for oncall in oncalls
startTime = moment(oncall.start).tz(timezone).format()
endTime = moment(oncall.end).tz(timezone).format()
time = "#{startTime} - #{endTime}"
username = guessSlackHandleFromEmail(oncall.user) || oncall.user.summary
if oncall.schedule?
scheduleId = oncall.schedule.id
if scheduleId not of schedules
schedules[scheduleId] = []
if time not in schedules[scheduleId]
schedules[scheduleId].push time
buffer += "• #{time} #{username} (<#{oncall.schedule.html_url}|#{oncall.schedule.summary}>)\n"
else if oncall.escalation_policy?
# no schedule embedded
epSummary = oncall.escalation_policy.summary
epURL = oncall.escalation_policy.html_url
buffer += "• #{time} #{username} (<#{epURL}|#{epSummary}>)\n"
else
# override
buffer += "• #{time} #{username}\n"
buffer
chunkMessageLines = (messageLines, boundary) ->
allChunks = []
thisChunk = []
charCount = 0
for line in messageLines
if charCount >= boundary
allChunks.push(thisChunk)
charCount = 0
thisChunk = []
thisChunk.push(line)
charCount += line.length
allChunks.push(thisChunk)
allChunks
guessSlackHandleFromEmail = (user) ->
# Context: https://github.slack.com/archives/C0GNSSLUF/p1539181657000100
if user.email == "<EMAIL>"
"`josh`"
else if user.email.search(/github\.com/)
user.email.replace(/(.+)\@github\.com/, '`$1`')
else
null
| true | # Description:
# Interact with PagerDuty services, schedules, and incidents with Hubot. Schedules with "hidden" in the name will be ignored.
#
# Commands:
# hubot pager me as <email> - remember your pager email is <email>
# hubot pager forget me - forget your pager email
# hubot Am I on call - return if I'm currently on call or not
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
# hubot pager trigger <user> [<severity>] <msg> - create a new incident with <msg> and assign it to <user>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager trigger <schedule> [<severity>] <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. If specified, <severity> must be one of: critical, error, warning or info. If not specified, <severity> will default to 'critical'.
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager sup --canary - return the current incidents, including Nines' canary incidents
# hubot pager incident <incident> - return the incident NNN
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
# hubot pager notes <incident> - show notes for incident #<incident>
# hubot pager problems - return all open incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
# hubot pager overrides <schedule> - show upcoming overrides for the next month
# hubot pager override <schedule> delete <id> - delete an override by its ID
# hubot pager services - list services
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
#
# Authors:
# PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
pagerduty = require('../pagerduty')
async = require('async')
inspect = require('util').inspect
moment = require('moment-timezone')
request = require 'request'
Scrolls = require('../../../../lib/scrolls').context({script: 'pagerduty'})
pagerDutyUserEmail = process.env.HUBOT_PAGERDUTY_USERNAME
pagerDutyServiceApiKey = process.env.HUBOT_PAGERDUTY_SERVICE_API_KEY
pagerDutyEventsAPIURL = 'https://events.pagerduty.com/v2/enqueue'
module.exports = (robot) ->
robot.respond /pager( me)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
emailNote = if hubotUser.pagerdutyEmail
"You've told me your PagerDuty email is #{hubotUser.pagerdutyEmail}"
else if hubotUser.email_address
"I'm assuming your PagerDuty email is #{hubotUser.email_address}. Change it with `#{robot.name} pager me as PI:EMAIL:<EMAIL>END_PI`"
if user
msg.send "I found your PagerDuty user #{user.html_url}, #{emailNote}"
else
msg.send "I couldn't find your user :( #{emailNote}"
cmds = robot.helpCommands()
cmds = (cmd for cmd in cmds when cmd.match(/hubot (pager |who's on call)/))
msg.send cmds.join("\n")
# hubot pager me as <email> - remember your pager email is <email>
robot.respond /pager(?: me)? as (.*)$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
email = msg.match[1]
hubotUser.pagerdutyEmail = email
msg.send "Okay, I'll remember your PagerDuty email is #{email}"
# hubot pager forget me - forget your pager email
robot.respond /pager forget me$/i, (msg) ->
hubotUser = robot.getUserBySlackUser(msg.message.user)
hubotUser.pagerdutyEmail = undefined
msg.send "Okay, I've forgotten your PagerDuty email"
# hubot pager incident <incident> - return the incident NNN
robot.respond /(pager|major)( me)? incident (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getIncident msg.match[3], (err, incident) ->
if err?
robot.emit 'error', err, msg
return
msg.send formatIncident(incident)
# hubot pager incidents - return the current incidents
# hubot pager sup - return the current incidents
# hubot pager problems - return all open incidents
robot.respond /(pager|major)( me)? (inc|incidents|sup|problems)( --canary)?$/i, (msg) ->
pagerduty.getIncidents "triggered,acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
unless msg.match[4]
incidents = incidents.filter (inc) ->
!/(ninesapp\/|Prometheus )canary/.test(inc.title)
if incidents.length == 0
msg.send "No open incidents"
return
buffer = "Triggered:\n----------\n"
for junk, incident of incidents.reverse()
if incident.status == 'triggered'
buffer = buffer + formatIncident(incident)
buffer = buffer + "\nAcknowledged:\n-------------\n"
for junk, incident of incidents.reverse()
if incident.status == 'acknowledged'
buffer = buffer + formatIncident(incident)
msg.send buffer
# hubot pager trigger (no user/schedule)
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)$/i, (msg) ->
msg.reply "Please include a user or schedule to page, like 'hubot pager infrastructure everything is on fire'."
# hubot pager trigger <user> <severity> <msg> - create a new incident with <msg> and assign it to <user>. Severity must be one of: critical, error, warning or info.
# hubot pager trigger <schedule> <severity> <msg> - create a new incident with <msg> and assign it the user currently on call for <schedule>. Severity must be one of: critical, error, warning or info.
robot.respond /(pager|major)( me)? (?:trigger|page) ([\w\-]+)( (critical|error|warning|info) )?(.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
fromUserName = hubotUser.name
query = msg.match[3]
severity = msg.match[5]
reason = msg.match[6]
description = "#{reason} - @#{fromUserName}"
supportedSeverities = ['critical', 'error', 'warning', 'info']
if severity not in supportedSeverities
severity = 'critical'
# Figure out who we are
campfireUserToPagerDutyUser msg, hubotUser, false, (triggeredByPagerDutyUser) ->
triggeredByPagerDutyUserEmail = if triggeredByPagerDutyUser?
emailForUser(triggeredByPagerDutyUser)
else if pagerDutyUserEmail
pagerDutyUserEmail
unless triggeredByPagerDutyUserEmail
msg.send "Sorry, I can't figure your PagerDuty account, and I don't have my own :( Can you tell me your PagerDuty email with `#{robot.name} pager me as PI:EMAIL:<EMAIL>END_PI`?"
return
# Figure out what we're trying to page
reassignmentParametersForUserOrScheduleOrEscalationPolicy msg, query, (err, results) ->
if err?
robot.emit 'error', err, msg
# reassignmentParametersForUserOrScheduleOrEscalationPolicy constructs explicit,
# human-consumable errors with good messages. Send it to the user if we got one.
if err.message != ""
msg.reply err.message
return
pagerDutyIntegrationAPI msg, "trigger", query, description, severity, (err, json) ->
if err?
robot.emit 'error', err, msg
return
msg.reply ":pager: triggered! now assigning it to the right user..."
incidentKey = json.dedup_key
setTimeout () ->
pagerduty.get "/incidents", {incident_key: incidentKey}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json?.incidents.length == 0
msg.reply "Couldn't find the incident we just created to reassign. Please try again :/"
return
incident = json.incidents[0]
data = {"type": "incident_reference"}
if results.assigned_to_user?
data['assignments'] = [{"assignee": {"id": results.assigned_to_user, "type": "user_reference"}}]
if results.escalation_policy?
data['escalation_policy'] = {"id": results.escalation_policy, "type": "escalation_policy_reference"}
headers = {from: triggeredByPagerDutyUserEmail}
pagerduty.put "/incidents/#{incident.id}", {'incident': data}, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if not json?.incident
msg.reply "Problem reassigning the incident :/"
return
msg.reply ":pager: assigned to #{results.name}!"
, 7000 # set timeout to 7s. sometimes PagerDuty needs a bit of time for events to propagate as incidents
# hubot pager ack <incident> - ack incident #<incident>
# hubot pager ack <incident1> <incident2> ... <incidentN> - ack all specified incidents
robot.respond /(?:pager|major)(?: me)? ack(?:nowledge)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# only acknowledge triggered things, since it doesn't make sense to re-acknowledge if it's already in re-acknowledge
# if it ever doesn't need acknowledge again, it means it's timed out and has become 'triggered' again anyways
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager ack - ack triggered incidents assigned to you
# hubot pager ack! - ack all triggered incidents, not just yours
robot.respond /(pager|major)( me)? ack(nowledge)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[4]?
pagerduty.getIncidents 'triggered,acknowledged', (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
msg.send err.message
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to acknowledge. Acknowledge someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to acknowledge"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only acknowledge triggered things
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'acknowledged')
# hubot pager resolve <incident> - resolve incident #<incident>
# hubot pager resolve <incident1> <incident2> ... <incidentN> - resolve all specified incidents
robot.respond /(?:pager|major)(?: me)? res(?:olve)?(?:d)? (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentNumbers = parseIncidentNumbers(msg.match[1])
# allow resolving of triggered and acknowedlge, since being explicit
updateIncidents(msg, incidentNumbers, 'triggered,acknowledged', 'resolved')
# hubot pager resolve - resolve acknowledged incidents assigned to you
# hubot pager resolve! - resolve all acknowledged, not just yours
robot.respond /(pager|major)( me)? res(olve)?(d)?(!)?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
force = msg.match[5]?
pagerduty.getIncidents "acknowledged", (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
email = emailForUser(hubotUser)
incidentsForEmail incidents, email, (err, filteredIncidents) ->
if err?
robot.emit 'error', err, msg
return
if force
filteredIncidents = incidents
if filteredIncidents.length is 0
# nothing assigned to the user, but there were others
if incidents.length > 0 and not force
msg.send "Nothing assigned to you to resolve. Resolve someone else's incident with `hubot pager ack <nnn>`"
else
msg.send "Nothing to resolve"
return
incidentNumbers = (incident.incident_number for incident in filteredIncidents)
# only resolve things that are acknowledged
updateIncidents(msg, incidentNumbers, 'acknowledged', 'resolved')
# hubot pager notes <incident> - show notes for incident #<incident>
robot.respond /(pager|major)( me)? notes (.+)$/i, (msg) ->
msg.finish()
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
pagerduty.get "/incidents/#{incidentId}/notes", {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
buffer = ""
for note in json.notes
buffer += "#{note.created_at} #{note.user.summary}: #{note.content}\n"
if not buffer
buffer = "No notes!"
msg.send buffer
# hubot pager note <incident> <content> - add note to incident #<incident> with <content>
robot.respond /(pager|major)( me)? note ([\d\w]+) (.+)$/i, (msg) ->
msg.finish()
hubotUser = robot.getUserBySlackUser(msg.message.user)
if pagerduty.missingEnvironmentForApi(msg)
return
incidentId = msg.match[3]
content = msg.match[4]
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userEmail = emailForUser(user)
return unless userEmail
data =
note:
content: content
headers = {from: userEmail}
pagerduty.post "/incidents/#{incidentId}/notes", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json && json.note
msg.send "Got it! Note created: #{json.note.content}"
else
msg.send "Sorry, I couldn't do it :("
# hubot pager schedules - list schedules
# hubot pager schedules <search> - list schedules matching <search>
robot.respond /(pager|major)( me)? schedules( (.+))?$/i, (msg) ->
query = {}
if msg.match[4]
query['query'] = msg.match[4]
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
renderSchedule = (schedule, cb) ->
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}>")
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager schedule <schedule> - show <schedule>'s shifts for the upcoming month
# hubot pager overrides <schedule> - show upcoming overrides for the next month
robot.respond /(pager|major)( me)? (schedule|overrides)( ([\w\-]+))?( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
query = {
since: moment().format(),
until: moment().add(30, 'days').format()
}
if !msg.match[5]
msg.reply "Please specify a schedule with 'pager #{msg.match[3]} <name>.'' Use 'pager schedules' to list all schedules."
return
if msg.match[7]
timezone = msg.match[7]
else
timezone = 'UTC'
msg.send "Retrieving schedules. This may take a few seconds..."
withScheduleMatching msg, msg.match[5], (schedule) ->
scheduleId = schedule.id
return unless scheduleId
if msg.match[3] && msg.match[3].match /overrides/
url = "/schedules/#{scheduleId}/overrides"
query['editable'] = 'true'
query['overflow'] = 'true'
key = "overrides"
else
url = "/oncalls"
key = "PI:KEY:<KEY>END_PI"
query['schedule_ids'] = [scheduleId]
query['include'] = ['users']
pagerduty.getAll url, query, key, (err, entries) ->
if err?
robot.emit 'error', err, msg
return
unless entries.length > 0
msg.send "None found!"
return
sortedEntries = entries.sort (a, b) ->
moment(a.start).unix() - moment(b.start).unix()
msg.send formatOncalls(sortedEntries, timezone)
# hubot pager my schedule - show my on call shifts for the upcoming month in all schedules
robot.respond /(pager|major)( me)? my schedule( ([^ ]+))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
if msg.match[4]
timezone = msg.match[4]
else
timezone = 'UTC'
query = {
since: moment().format(),
until: moment().add(30, 'days').format(),
user_ids: [user.id]
include: ['users']
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
robot.emit 'error', err, msg
return
if oncalls.length == 0
msg.send 'You are not oncall!'
return
msg.send formatOncalls(oncalls, timezone)
# hubot pager override <schedule> <start> - <end> [username] - Create an schedule override from <start> until <end>. If [username] is left off, defaults to you. start and end should date-parsable dates, like 2014-06-24T09:06:45-07:00, see http://momentjs.com/docs/#/parsing/string/ for examples.
robot.respond /(pager|major)( me)? (override) ([\w\-]+) ([\w\-:\+]+) - ([\w\-:\+]+)( (.*))?$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
if msg.match[8]
overrideUser = robot.brain.userForName(msg.match[8])
unless overrideUser
msg.send "Sorry, I don't seem to know who that is. Are you sure they are in chat?"
return
else
overrideUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, overrideUser, (user) ->
userId = user.id
unless userId
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
unless moment(msg.match[5]).isValid() && moment(msg.match[6]).isValid()
msg.send "Please use a http://momentjs.com/ compatible date!"
return
start_time = moment(msg.match[5]).format()
end_time = moment(msg.match[6]).format()
override = {
'start': start_time,
'end': end_time,
'user': {
'id': userId,
"type": "user_reference"
},
}
data = { 'override': override }
pagerduty.post "/schedules/#{scheduleId}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.override
msg.send "That didn't work. Check Hubot's logs for an error!"
return
start = moment(json.override.start)
end = moment(json.override.end)
msg.send "Override setup! #{json.override.user.summary} has the pager from #{start.format()} until #{end.format()}"
# hubot pager override <schedule> delete <id> - delete an override by its ID
robot.respond /(pager|major)( me)? (overrides?) ([\w\-]*) (delete) (.*)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
withScheduleMatching msg, msg.match[4], (schedule) ->
scheduleId = schedule.id
unless scheduleId
return
pagerduty.delete "/schedules/#{scheduleId}/overrides/#{msg.match[6]}", (err, success) ->
unless success
msg.send "Something went weird."
return
msg.send ":boom:"
# hubot pager me <schedule> <minutes> - take the pager for <minutes> minutes
robot.respond /pager( me)? (.+) (\d+)$/i, (msg) ->
msg.finish()
# skip hubot pager incident NNN
if msg.match[2] == 'incident'
return
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
unless userId
return
if !msg.match[2] || msg.match[2] == 'me'
msg.reply "Please specify a schedule with 'pager me infrastructure 60'. Use 'pager schedules' to list all schedules."
return
withScheduleMatching msg, msg.match[2], (matchingSchedule) ->
unless matchingSchedule.id
return
start = moment().format()
minutes = parseInt msg.match[3]
end = moment().add(minutes, 'minutes').format()
override = {
'start': start,
'end': end,
'user': {
'id': userId,
"type": "user_reference",
},
}
withCurrentOncall msg, matchingSchedule, (err, old_username, schedule) ->
if err?
robot.emit 'error', err, msg
return
data = { 'override': override }
pagerduty.post "/schedules/#{schedule.id}/overrides", data, {}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json.override
msg.send "Something went weird."
return
start = moment(json.override.start)
end = moment(json.override.end)
getPagerDutyUser userId, (err, user) ->
if err?
robot.emit 'error', err, msg
return
msg.send "Rejoice, @#{old_username}! @#{user.name} has the pager on #{schedule.name} until #{end.format()}"
# hubot Am I on call - return if I'm currently on call or not
robot.respond /am i on (call|oncall|on-call)/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Finding schedules, this may take a few seconds..."
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
userId = user.id
renderSchedule = (s, cb) ->
if not memberOfSchedule(s, userId)
cb(null, {member: false})
return
withCurrentOncallId msg, s, (err, oncallUserid, oncallUsername, schedule) ->
if err?
cb(err)
return
if userId == oncallUserid
cb(null, {member: true, body: "* Yes, you are on call for #{schedule.name} - https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
else
cb(null, {member: true, body: "* No, you are NOT on call for #{schedule.name} (but #{oncallUsername} is)- https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}"})
unless userId?
msg.send "Couldn't figure out the pagerduty user connected to your account."
return
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
if (schedules.every (s) -> not memberOfSchedule(s, userId))
msg.send "You are not assigned to any schedules"
return
async.map schedules, renderSchedule, (err, results) ->
if err?
robot.emit 'error', err, msg
return
results = (r.body for r in results when r.member)
unless results.length
results = ["You are not oncall this month!"]
msg.send results.join("\n")
# hubot who's on call - return a list of services and who is on call for them
# hubot who's on call for <schedule> - return the username of who's on call for any schedule matching <search>
robot.respond /who(’s|'s|s| is|se)? (on call|oncall|on-call)( (?:for )?(.+))?/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
msg.send "Retrieving schedules. This may take a few seconds..."
scheduleName = msg.match[4]
renderSchedule = (s, cb) ->
withCurrentOncallUser msg, s, (err, user, schedule) ->
if err?
cb(err)
return
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: schedule.name, username: user.name})
if !pagerEnabledForScheduleOrEscalation(schedule) || user.name == "hubot" || user.name == undefined
cb(null, "No human on call")
return
slackHandle = guessSlackHandleFromEmail(user)
slackString = " (#{slackHandle})" if slackHandle
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{schedule.id}|#{schedule.name}'s> oncall is #{user.name}#{slackString}")
renderScheduleNoUser = (s, cb) ->
Scrolls.log("info", {at: 'who-is-on-call/renderSchedule', schedule: s.name})
if !pagerEnabledForScheduleOrEscalation(s)
cb(null, undefined)
return
cb(null, "• <https://#{pagerduty.subdomain}.pagerduty.com/schedules##{s.id}|#{s.name}>")
if scheduleName?
withScheduleMatching msg, scheduleName, (s) ->
renderSchedule s, (err, text) ->
if err?
robot.emit 'error'
return
msg.send text
return
else
msg.send "Due to rate limiting please include the schedule name to also see who's on call. E.g. `.who's on call for <schedule>`. Schedule names are being retrieved..."
pagerduty.getSchedules (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
if schedules.length == 0
msg.send 'No schedules found!'
return
async.map schedules, renderScheduleNoUser, (err, results) ->
if err?
Scrolls.log("error", {at: 'who-is-on-call/map-schedules/error', error: err})
robot.emit 'error', err, msg
return
results = (result for result in results when result?)
Scrolls.log("info", {at: 'who-is-on-call/map-schedules'})
for chunk in chunkMessageLines(results, 7000)
msg.send chunk.join("\n")
# hubot pager services - list services
robot.respond /(pager|major)( me)? services$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
pagerduty.getAll "/services", {}, "services", (err, services) ->
if err?
robot.emit 'error', err, msg
return
if services.length == 0
msg.send 'No services found!'
return
renderService = (service, cb) ->
cb(null, "* #{service.id}: #{service.name} (#{service.status}) - https://#{pagerduty.subdomain}.pagerduty.com/services/#{service.id}")
async.map services, renderService, (err, results) ->
if err?
robot.emit 'error', err, msg
return
msg.send results.join("\n")
# hubot pager maintenance <minutes> <service_id1> <service_id2> ... <service_idN> - schedule a maintenance window for <minutes> for specified services
robot.respond /(pager|major)( me)? maintenance (\d+) (.+)$/i, (msg) ->
if pagerduty.missingEnvironmentForApi(msg)
return
hubotUser = robot.getUserBySlackUser(msg.message.user)
campfireUserToPagerDutyUser msg, hubotUser, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
minutes = msg.match[3]
service_ids = msg.match[4].split(' ')
start_time = moment().format()
end_time = moment().add(minutes, 'minutes').format()
maintenance_window = {
'start_time': start_time,
'end_time': end_time,
'type': 'maintenance_window',
'services': service_ids.map (service_id) ->
{
'id': service_id,
"type": "service_reference"
}
}
data = { 'maintenance_window': maintenance_window }
headers = {'from': requesterEmail}
msg.send "Opening maintenance window for: #{service_ids}"
pagerduty.post "/maintenance_windows", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json && json.maintenance_window
msg.send "That didn't work. Check Hubot's logs for an error!"
return
msg.send "Maintenance window created! ID: #{json.maintenance_window.id} Ends: #{json.maintenance_window.end_time}"
# Determine whether a schedule's participants are available to be paged.
#
# s :: Object
# Decoded JSON from the Pagerduty Schedules or Escalation API.
#
# Returns a Boolean instance.
pagerEnabledForScheduleOrEscalation = (s) ->
description = s.description or ""
return description.indexOf('#nopage') == -1
parseIncidentNumbers = (match) ->
match.split(/[ ,]+/).map (incidentNumber) ->
parseInt(incidentNumber)
emailForUser = (user) ->
user.pagerdutyEmail || user.email_address || user.email || user.profile?.email
campfireUserToPagerDutyUser = (msg, user, required, cb) ->
if typeof required is 'function'
cb = required
required = true
email = emailForUser(user) || process.env.HUBOT_PAGERDUTY_TEST_EMAIL
speakerEmail = emailForUser(msg.message.user)
if not email
if not required
cb null
return
else
possessive = if email is speakerEmail
"your"
else
"#{user.name}'s"
addressee = if email is speakerEmail
"you"
else
"#{user.name}"
msg.send "Sorry, I can't figure out #{possessive} email address :( Can #{addressee} tell me with `#{robot.name} pager me as PI:EMAIL:<EMAIL>END_PI`?"
return
pagerduty.get "/users", {query: email}, (err, json) ->
if err?
robot.emit 'error', err, msg
return
if json.users.length isnt 1
if json.users.length is 0 and not required
cb null
return
else
user = tryToFind(email, json.users)
if !user
msg.send "Sorry, I expected to get 1 user back for #{email}, but only found a list that didn't include the requested email :sweat:. Can you make sure that is actually a real user on PagerDuty?"
else
cb(user)
return
cb(json.users[0])
tryToFind = (email, users) ->
users.find (user) ->
user.email == email
oneScheduleMatching = (msg, q, cb) ->
query = {
query: q
}
pagerduty.getSchedules query, (err, schedules) ->
if err?
robot.emit 'error', err, msg
return
# Single result returned
if schedules?.length == 1
schedule = schedules[0]
# Multiple results returned and one is exact (case-insensitive)
if schedules?.length > 1
matchingExactly = schedules.filter (s) ->
s.name.toLowerCase() == q.toLowerCase()
if matchingExactly.length == 1
schedule = matchingExactly[0]
cb(schedule)
withScheduleMatching = (msg, q, cb) ->
oneScheduleMatching msg, q, (schedule) ->
if schedule
cb(schedule)
else
# maybe look for a specific name match here?
msg.send "I couldn't determine exactly which schedule you meant by #{q}. Can you be more specific?"
return
reassignmentParametersForUserOrScheduleOrEscalationPolicy = (msg, string, cb) ->
if campfireUser = robot.brain.userForName(string)
campfireUserToPagerDutyUser msg, campfireUser, (user) ->
cb(null, { assigned_to_user: user.id, name: user.name })
else
pagerduty.get "/escalation_policies", query: string, (err, json) ->
if err?
robot.emit 'error', err, msg
return
escalationPolicy = null
if json?.escalation_policies?.length == 1
escalationPolicy = json.escalation_policies[0]
# Multiple results returned and one is exact (case-insensitive)
else if json?.escalation_policies?.length > 1
matchingExactly = json.escalation_policies.filter (es) ->
es.name.toLowerCase() == string.toLowerCase()
if matchingExactly.length == 1
escalationPolicy = matchingExactly[0]
if escalationPolicy?
unless pagerEnabledForScheduleOrEscalation(escalationPolicy)
error = new Error("Found the #{escalationPolicy.name} escalation policy but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
cb(null, { escalation_policy: escalationPolicy.id, name: escalationPolicy.name })
return
oneScheduleMatching msg, string, (schedule) ->
if schedule
unless pagerEnabledForScheduleOrEscalation(schedule)
error = new Error("Found the #{schedule.name} schedule but it is marked #nopage, see /who's on call for schedules you can page.")
cb(error, null)
return
withCurrentOncallUser msg, schedule, (err, user, schedule) ->
if err?
cb(err, null)
return
cb(null, { assigned_to_user: user.id, name: user.name })
return
error = new Error("Couldn't find a user, unique schedule or escalation policy matching #{string} to page, see /who's on call for schedules you can page.")
cb(error, null)
withCurrentOncall = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null)
return
cb(null, user.name, s)
withCurrentOncallId = (msg, schedule, cb) ->
withCurrentOncallUser msg, schedule, (err, user, s) ->
if err?
cb(err, null, null, null)
return
cb(null, user.id, user.name, s)
withCurrentOncallUser = (msg, schedule, cb) ->
oneHour = moment().add(1, 'hours').format()
now = moment().format()
query = {
since: now,
until: oneHour,
schedule_ids: [schedule.id]
}
pagerduty.getAll "/oncalls", query, "oncalls", (err, oncalls) ->
if err?
cb(err, null, null)
return
unless oncalls and oncalls.length > 0
cb(null, "nobody", schedule)
return
userId = oncalls[0].user.id
getPagerDutyUser userId, (err, user) ->
if err?
cb(err)
return
cb(null, user, schedule)
getPagerDutyUser = (userId, cb) ->
pagerduty.get "/users/#{userId}", (err, json) ->
if err?
cb(err)
return
if not json.user
cb(null, "nobody")
return
cb(null, json.user)
pagerDutyIntegrationAPI = (msg, cmd, affected, description, severity, cb) ->
unless pagerDutyServiceApiKey?
msg.send "PagerDuty API service key is missing."
msg.send "Ensure that HUBOT_PAGERDUTY_SERVICE_API_KEY is set."
return
data = null
switch cmd
when "trigger"
payload = {summary: description, source: affected, severity: severity}
data = {routing_key: pagerDutyServiceApiKey, event_action: "trigger", payload: payload}
pagerDutyIntegrationPost msg, data, cb
formatIncident = (inc) ->
# { pd_nagios_object: 'service',
# HOSTNAME: 'fs1a',
# SERVICEDESC: 'snapshot_repositories',
# SERVICESTATE: 'CRITICAL',
# HOSTSTATE: 'UP' },
summary = if inc.trigger_summary_data
if inc.trigger_summary_data.pd_nagios_object == 'service'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.SERVICEDESC}"
else if inc.trigger_summary_data.pd_nagios_object == 'host'
"#{inc.trigger_summary_data.HOSTNAME}/#{inc.trigger_summary_data.HOSTSTATE}"
# email services
else if inc.trigger_summary_data.subject
inc.trigger_summary_data.subject
else if inc.trigger_summary_data.description
inc.trigger_summary_data.description
else
""
else
"#{inc.title} #{inc.summary}"
names = []
for assigned in inc.assignments
names.push assigned.assignee.summary
if names
assigned_to = "- assigned to #{names.join(",")}"
else
assigned_to = "- nobody currently assigned"
"#{inc.incident_number}: #{inc.created_at} #{summary} #{assigned_to}\n"
updateIncidents = (msg, incidentNumbers, statusFilter, updatedStatus) ->
campfireUserToPagerDutyUser msg, msg.message.user, (user) ->
requesterEmail = emailForUser(user)
unless requesterEmail
return
pagerduty.getIncidents statusFilter, (err, incidents) ->
if err?
robot.emit 'error', err, msg
return
foundIncidents = []
for incident in incidents
# FIXME this isn't working very consistently
if incidentNumbers.indexOf(incident.incident_number) > -1
foundIncidents.push(incident)
if foundIncidents.length == 0
msg.reply "Couldn't find incident(s) #{incidentNumbers.join(', ')}. Use `#{robot.name} pager incidents` for listing."
else
# loljson
data = {
incidents: foundIncidents.map (incident) ->
{
'id': incident.id,
"type": "incident_reference",
'status': updatedStatus
}
}
headers = {from: requesterEmail}
pagerduty.put "/incidents", data, headers, (err, json) ->
if err?
robot.emit 'error', err, msg
return
unless json?.incidents
msg.reply "Problem updating incidents #{incidentNumbers.join(',')}"
return
buffer = "Incident"
buffer += "s" if json.incidents.length > 1
buffer += " "
buffer += (incident.incident_number for incident in json.incidents).join(", ")
buffer += " #{updatedStatus}"
msg.reply buffer
pagerDutyIntegrationPost = (msg, body, cb) ->
request.post {uri: pagerDutyEventsAPIURL, json: true, body: body}, (err, res, body) ->
if err?
cb(err)
return
switch res.statusCode
when 200, 201, 202
cb(null, body)
else
cb(new PagerDutyError("#{res.statusCode} back from #{path}"))
allUserEmails = (cb) ->
pagerduty.getAll "/users", {}, "users", (err, returnedUsers) ->
if err?
cb(err)
return
users = {}
for user in returnedUsers
users[user.id] = user.email
cb(null, users)
incidentsForEmail = (incidents, userEmail, cb) ->
allUserEmails (err, userEmails) ->
if err?
cb(err)
return
filtered = []
for incident in incidents
for assignment in incident.assignments
assignedEmail = userEmails[assignment.assignee.id]
if assignedEmail is userEmail
filtered.push incident
cb(null, filtered)
memberOfSchedule = (schedule, userId) ->
schedule.users.some (scheduleUser) ->
scheduleUser.id == userId
formatOncalls = (oncalls, timezone) ->
buffer = ""
schedules = {}
for oncall in oncalls
startTime = moment(oncall.start).tz(timezone).format()
endTime = moment(oncall.end).tz(timezone).format()
time = "#{startTime} - #{endTime}"
username = guessSlackHandleFromEmail(oncall.user) || oncall.user.summary
if oncall.schedule?
scheduleId = oncall.schedule.id
if scheduleId not of schedules
schedules[scheduleId] = []
if time not in schedules[scheduleId]
schedules[scheduleId].push time
buffer += "• #{time} #{username} (<#{oncall.schedule.html_url}|#{oncall.schedule.summary}>)\n"
else if oncall.escalation_policy?
# no schedule embedded
epSummary = oncall.escalation_policy.summary
epURL = oncall.escalation_policy.html_url
buffer += "• #{time} #{username} (<#{epURL}|#{epSummary}>)\n"
else
# override
buffer += "• #{time} #{username}\n"
buffer
chunkMessageLines = (messageLines, boundary) ->
allChunks = []
thisChunk = []
charCount = 0
for line in messageLines
if charCount >= boundary
allChunks.push(thisChunk)
charCount = 0
thisChunk = []
thisChunk.push(line)
charCount += line.length
allChunks.push(thisChunk)
allChunks
guessSlackHandleFromEmail = (user) ->
# Context: https://github.slack.com/archives/C0GNSSLUF/p1539181657000100
if user.email == "PI:EMAIL:<EMAIL>END_PI"
"`josh`"
else if user.email.search(/github\.com/)
user.email.replace(/(.+)\@github\.com/, '`$1`')
else
null
|
[
{
"context": "window.photos = [\n {key: 'adelson', width: 1078, height: 838},\n {key: '176', width",
"end": 34,
"score": 0.896380603313446,
"start": 27,
"tag": "KEY",
"value": "adelson"
},
{
"context": "y: 'adelson', width: 1078, height: 838},\n {key: '176', width: 2048, height: 1... | server/intrinsic/static/js/intrinsic/experiments/intrinsic_compare_tut.coffee | paulu/opensurfaces | 137 | window.photos = [
{key: 'adelson', width: 1078, height: 838},
{key: '176', width: 2048, height: 1360},
{key: '835', width: 2048, height: 1362},
{key: '103703', width: 2048, height: 1371},
{key: '98082', width: 2048, height: 1360},
{key: '66', width: 2048, height: 1356}
{key: '104550', width: 2048, height: 1365}
{key: '108505', width: 2048, height: 3072}
]
window.build_content = (c) ->
ret = $.extend({}, c)
if c.image_key?
ret.photo = window.photos_by_key[c.image_key]
if c.x1? and c.x1? and c.x2? and c.y2?
ret.points = [
{
x_orig: c.x1 / ret.photo.width,
y_orig: c.y1 / ret.photo.height,
label: "1",
color:'#700'
}, {
x_orig: c.x2 / ret.photo.width,
y_orig: c.y2 / ret.photo.height,
label: "2",
color:'#007'
}
]
return ret
$( ->
window.show_modal_loading("Loading...", 250)
# load images
for p in window.photos
p.url = "/static/img/intrinsic/experiments/#{p.key}.jpg"
image_urls = (p.url for p in window.photos)
image_objs = load_images(image_urls, ->
# store photo object
for p, i in window.photos
p.obj = image_objs[i]
# index by key
window.photos_by_key = {}
for p in window.photos
window.photos_by_key[p.key] = p
# start the tutorial
window.hide_modal_loading()
tutorial = new IntrinsicCompareTutorial()
start_idx = window.getURLParameter('start_idx')
if start_idx?
start_idx = parseInt(start_idx)
else
start_idx = 0
tutorial.show_page(start_idx)
)
)
class IntrinsicCompareTutorial
constructor: ->
@animating = true
@set_submit_enabled(false)
@ui = new IntrinsicCompareUI()
@ui.extra_margin = 100
@tutorial_content = [
window.build_content(
image_key: 'adelson',
x1: 300, y1: 838/2, x2: 900, y2: 838/2,
message: [
'This short tutorial will teach you how to do this HIT. You get as',
'many tries as you like, and you don\'t have to do it again once',
'you pass. Click "Next" to continue to the next screen.'
]
),
window.build_content(
image_key: 'adelson',
x1: 359, y1: 334, x2: 426, y2: 259,
message: [
'On this checkerboard pattern, there are two colors.',
'Point 1 is on a dark square, and point 2 is on a light square.'
'You can drag the photo to see more or scroll to zoom in/out.'
]
),
window.build_content(
image_key: 'adelson',
x1: 359, y1: 334, x2: 621, y2: 454,
message: [
'Now, point 2 is in shadow. However, we understand that the square',
'at point 2 is still lighter than point 1.',
'(Note that we are referring to the center of the circle)' ]
),
window.build_content(
image_key: '176',
x1: 922, y1: 56, x2: 1041, y2: 279,
message: [
'In this photo, all points on the ceiling have the same surface',
'color. The surface is white even though shadows make some parts',
'appear darker.' ]
),
window.build_content(
image_key: '176',
x1: 602, y1: 550, x2: 1254, y2: 652,
message: [
'Similarly, these two points on the wall have the same color.',
'The surface color is what matters, not the lighting or shadows.', ]
),
window.build_content(
image_key: '176',
x1: 775, y1: 991, x2: 1002, y2: 906,
message: [
'The surface color at point 2 is darker than the surface color at',
'point 1 even though it is under a highlight.' ]
),
window.build_content(
image_key: '176',
x1: 1273, y1: 614, x2: 1368, y2: 641,
expected_darker: ['E'],
message: [
'Now it is your turn. Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Indicate which point has a darker surface color.' ]
message_success: [
'Correct! Both points are on the same colored wall.' ]
),
window.build_content(
image_key: '66',
x1: 211, y1: 862, x2: 390, y2: 827,
message: [
'Sometimes you cannot see enough and need to zoom out.' ]
),
window.build_content(
image_key: '66',
x1: 211, y1: 862, x2: 390, y2: 827,
zoom_out: true,
message: [
'You can do this with the scroll wheel. You can also drag',
'with the mouse.' ]
),
window.build_content(
image_key: '108505',
x1: 1920, y1: 846, x2: 1440, y2: 732,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: '108505',
x1: 1086, y1: 240, x2: 1077, y2: 573,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: '108505',
x1: 1722, y1: 3030, x2: 1755, y2: 2670,
expected_darker: ['1'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects.' ]
message_success: [
'Correct! The glossy floor is darker than the white wall.' ]
),
window.build_content(
image_key: '176',
x1: 968, y1: 1100, x2: 1076, y2: 1163,
expected_darker: ['2'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.'
]
message_darker_error: [
'Try again. They are close, but the strips of wood are not all',
'the same color.' ]
message_confidence_error: [
'You should be able to tell with higher confidence. Try zooming',
'out (with the scroll wheel) or dragging the image around to see',
'more.' ]
message_success: [
'Correct! The two points are on slightly different colored',
'strips of wood.' ]
),
window.build_content(
image_key: '103703',
x1: 1637, y1: 587, x2: 1684, y2: 1123,
message: [
'Sometimes the image is too dark or bright and you cannot tell which',
'point is darker. In these cases, you can guess and then tell us',
'that you are guessing.' ]
),
window.build_content(
image_key: '176',
x1: 204, y1: 291, x2: 374, y2: 602,
expected_darker: ['1', '2', 'E'],
expected_confidence: ['0'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'This example is unclear, so click on any button.' ]
message_confidence_error: [
'This part of the image is too bright and washed out, so you cannot',
'tell what the answer should be. Therefore, you are only guessing'
'and should tell us this.' ]
message_success: [
'Correct! When the pixels are too bright or too dark to tell,',
'please tell us that you are guessing.']
)
window.build_content(
image_key: '835',
x1: 446, y1: 1179, x2: 649, y2: 1112,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the floor is darker',
'than the carpet. You can zoom out to better see.'
]
message_success: [
'Correct! We care about intrinsic surface color, not highlights',
'or shadows.'
]
)
window.build_content(
image_key: '835',
x1: 428, y1: 176, x2: 499, y2: 140,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears brighter because of a highlight, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: '66',
x1: 331, y1: 1170, x2: 449, y2: 1203,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore shadows.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears darker because of a shadow, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: '835',
x1: 1730, y1: 769, x2: 1629, y2: 965,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
'(Remember that you can drag or scroll to see more)',
]
message_darker_error: [
'Try again. They are close, but one is slightly darker.',
'You can see more of the carpet if you zoom out.'
]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The carpet is lighter even though it is in shadow.' ]
)
#window.build_content(
#image_key: '98082',
#x1: 867, y1: 564, x2: 1475, y2: 751,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. They are close, but one is slightly darker.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! It is a single material, but point 2 is on a darker'
#'portion.' ]
#),
#window.build_content(
#image_key: '98082',
#x1: 1475, y1: 751, x2: 687, y2: 753,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Notice that the pattern is repeating.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a repeating',
#'pattern.' ]
#),
window.build_content(
image_key: '98082',
x1: 129, y1: 476, x2: 508, y2: 349,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
]
message_darker_error: [
'Try again. You should ignore glossy highlights.' ]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The two points are on the same portion of a glossy',
'surface.' ]
)
# BELOW: pages teaching about mirrors. Mirrors are filtered out by the
# first task, so we aren't showing them.
#window.build_content(
#image_key: '176',
#x1: 1204, y1: 1030, x2: 1374, y2: 957,
#message: [
#'If you see a transparent or reflective object, you should judge the',
#'closest object, and not the things behind it. These two points',
#'have the same surface color.'
#]
#)
#window.build_content(
#image_key: '104550',
#x1: 911, y1: 423, x2: 1520, y2: 689,
#message: [
#'Similarly for mirrors, judge the surface of the mirror,'
#'not what you see inside the reflection. These two points'
#'are both on the mirror so they have the same surface color.'
#]
#)
#window.build_content(
#image_key: '104550',
#x1: 763, y1: 111, x2: 1019, y2: 239,
#message: [
#'Finally, since mirrors perfectly reflect light, they have a',
#'lighter surface color than non-mirror objects. Therefore, point 1',
#'is darker than point 2 in this example.'
#]
#)
#window.build_content(
#image_key: '66',
#x1: 845, y1: 932, x2: 998, y2: 919,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.'
#]
#message_darker_error: [
#'Try again. We care about the surface, not what is behind it.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a glass',
#'surface.' ]
#)
#window.build_content(
#image_key: '104550',
#x1: 1182, y1: 183, x2: 721, y2: 343,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the',
#'curtains, one of them is on a mirror.',
#]
#)
#window.build_content(
#image_key: '104550',
#x1: 978, y1: 404, x2: 1092, y2: 264,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that the points are both inside a mirror',
#'so they have the same natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Always check if the points are inside a mirror.' ]
#)
#window.build_content(
#image_key: '104550',
#x1: 919, y1: 811, x2: 1046, y2: 748,
#expected_darker: ['1'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they.',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the tissue',
#'box, one of them is on a mirror.' ]
#)
]
$(window).on('resize', debounce(@on_resize))
$('.response-darker').on('click', @btn_response_darker)
$('.response-confidence').on('click', @btn_response_confidence)
$('#btn-next').on('click', @btn_next)
$('#btn-back').on('click', @btn_back)
$('#btn-submit').on('click', @btn_submit)
on_resize: =>
if not @submit_enabled
@show_page(@idx)
btn_submit: =>
if @submit_enabled
window.mt_tutorial_complete()
btn_next: =>
if not @animating
if @idx < @tutorial_content.length - 1
@show_page(@idx + 1)
else
@idx += 1
$('#tut-confidence').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('#btn-next', false)
set_btn_enabled('#btn-back', true)
@ui.clear_ui()
$('#mt-done').show()
@set_submit_enabled(true)
btn_back: =>
if not @animating and @idx > 0
@show_page(@idx - 1)
btn_response_darker: (event) =>
if not @animating
content = @tutorial_content[@idx]
darker = $(event.target).attr('data-darker')
if darker in content.expected_darker
if content.expected_confidence?
@ui.set_message(content.message)
set_btn_enabled('button.response-confidence', true)
else
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
set_btn_enabled('button.response-confidence', false)
@ui.set_message(content.message_darker_error)
btn_response_confidence: (event) =>
if not @animating
content = @tutorial_content[@idx]
confidence = $(event.target).attr('data-confidence')
if confidence in content.expected_confidence
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
@ui.set_message(content.message_confidence_error)
show_page: (idx) ->
console.log 'show_page', idx
@idx = idx
$('#mt-done').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('button.controls', false)
@set_submit_enabled(false)
@animating = true
content = @tutorial_content[idx]
on_end = ( =>
set_btn_enabled('#btn-back', idx > 0)
if content.expected_darker?
@show_response_buttons()
else
@show_navigation_buttons()
@animating = false
)
if content.zoom_out
@ui.update_ui(content, ( =>
target = @ui.compute_current_zoom()
target[2] *= 2
@ui.zoom_to_target(target, on_end)
))
else
@ui.update_ui(content, on_end)
show_response_buttons: ->
set_btn_enabled('button.response-darker', true)
set_btn_enabled('button.response-confidence', false)
$('button.response-darker').removeClass('active')
if @tutorial_content[@idx].expected_confidence?
$('#tut-confidence').show()
else
$('#tut-confidence').hide()
$('#tut-buttons').show()
show_navigation_buttons: ->
$('#tut-buttons').hide()
set_btn_enabled('#btn-next', @idx < @tutorial_content.length)
set_submit_enabled: (b) ->
@submit_enabled = b
if b
$('#btn-submit').show()
set_btn_enabled('#btn-submit', true)
else
$('#btn-submit').hide()
| 210583 | window.photos = [
{key: '<KEY>', width: 1078, height: 838},
{key: '<KEY>', width: 2048, height: 1360},
{key: '<KEY>', width: 2048, height: 1362},
{key: '<KEY>', width: 2048, height: 1371},
{key: '<KEY>', width: 2048, height: 1360},
{key: '<KEY>', width: 2048, height: 1356}
{key: '<KEY>', width: 2048, height: 1365}
{key: '<KEY>', width: 2048, height: 3072}
]
window.build_content = (c) ->
ret = $.extend({}, c)
if c.image_key?
ret.photo = window.photos_by_key[c.image_key]
if c.x1? and c.x1? and c.x2? and c.y2?
ret.points = [
{
x_orig: c.x1 / ret.photo.width,
y_orig: c.y1 / ret.photo.height,
label: "1",
color:'#700'
}, {
x_orig: c.x2 / ret.photo.width,
y_orig: c.y2 / ret.photo.height,
label: "2",
color:'#007'
}
]
return ret
$( ->
window.show_modal_loading("Loading...", 250)
# load images
for p in window.photos
p.url = "/static/img/intrinsic/experiments/#{p.key}.jpg"
image_urls = (p.url for p in window.photos)
image_objs = load_images(image_urls, ->
# store photo object
for p, i in window.photos
p.obj = image_objs[i]
# index by key
window.photos_by_key = {}
for p in window.photos
window.photos_by_key[p.key] = p
# start the tutorial
window.hide_modal_loading()
tutorial = new IntrinsicCompareTutorial()
start_idx = window.getURLParameter('start_idx')
if start_idx?
start_idx = parseInt(start_idx)
else
start_idx = 0
tutorial.show_page(start_idx)
)
)
class IntrinsicCompareTutorial
constructor: ->
@animating = true
@set_submit_enabled(false)
@ui = new IntrinsicCompareUI()
@ui.extra_margin = 100
@tutorial_content = [
window.build_content(
image_key: '<KEY>',
x1: 300, y1: 838/2, x2: 900, y2: 838/2,
message: [
'This short tutorial will teach you how to do this HIT. You get as',
'many tries as you like, and you don\'t have to do it again once',
'you pass. Click "Next" to continue to the next screen.'
]
),
window.build_content(
image_key: '<KEY>',
x1: 359, y1: 334, x2: 426, y2: 259,
message: [
'On this checkerboard pattern, there are two colors.',
'Point 1 is on a dark square, and point 2 is on a light square.'
'You can drag the photo to see more or scroll to zoom in/out.'
]
),
window.build_content(
image_key: '<KEY>',
x1: 359, y1: 334, x2: 621, y2: 454,
message: [
'Now, point 2 is in shadow. However, we understand that the square',
'at point 2 is still lighter than point 1.',
'(Note that we are referring to the center of the circle)' ]
),
window.build_content(
image_key: '<KEY>',
x1: 922, y1: 56, x2: 1041, y2: 279,
message: [
'In this photo, all points on the ceiling have the same surface',
'color. The surface is white even though shadows make some parts',
'appear darker.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 602, y1: 550, x2: 1254, y2: 652,
message: [
'Similarly, these two points on the wall have the same color.',
'The surface color is what matters, not the lighting or shadows.', ]
),
window.build_content(
image_key: '<KEY>',
x1: 775, y1: 991, x2: 1002, y2: 906,
message: [
'The surface color at point 2 is darker than the surface color at',
'point 1 even though it is under a highlight.' ]
),
window.build_content(
image_key: '1<KEY>',
x1: 1273, y1: 614, x2: 1368, y2: 641,
expected_darker: ['E'],
message: [
'Now it is your turn. Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Indicate which point has a darker surface color.' ]
message_success: [
'Correct! Both points are on the same colored wall.' ]
),
window.build_content(
image_key: '6<KEY>',
x1: 211, y1: 862, x2: 390, y2: 827,
message: [
'Sometimes you cannot see enough and need to zoom out.' ]
),
window.build_content(
image_key: '6<KEY>',
x1: 211, y1: 862, x2: 390, y2: 827,
zoom_out: true,
message: [
'You can do this with the scroll wheel. You can also drag',
'with the mouse.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 1920, y1: 846, x2: 1440, y2: 732,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 1086, y1: 240, x2: 1077, y2: 573,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 1722, y1: 3030, x2: 1755, y2: 2670,
expected_darker: ['1'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects.' ]
message_success: [
'Correct! The glossy floor is darker than the white wall.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 968, y1: 1100, x2: 1076, y2: 1163,
expected_darker: ['2'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.'
]
message_darker_error: [
'Try again. They are close, but the strips of wood are not all',
'the same color.' ]
message_confidence_error: [
'You should be able to tell with higher confidence. Try zooming',
'out (with the scroll wheel) or dragging the image around to see',
'more.' ]
message_success: [
'Correct! The two points are on slightly different colored',
'strips of wood.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 1637, y1: 587, x2: 1684, y2: 1123,
message: [
'Sometimes the image is too dark or bright and you cannot tell which',
'point is darker. In these cases, you can guess and then tell us',
'that you are guessing.' ]
),
window.build_content(
image_key: '<KEY>',
x1: 204, y1: 291, x2: 374, y2: 602,
expected_darker: ['1', '2', 'E'],
expected_confidence: ['0'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'This example is unclear, so click on any button.' ]
message_confidence_error: [
'This part of the image is too bright and washed out, so you cannot',
'tell what the answer should be. Therefore, you are only guessing'
'and should tell us this.' ]
message_success: [
'Correct! When the pixels are too bright or too dark to tell,',
'please tell us that you are guessing.']
)
window.build_content(
image_key: '<KEY>',
x1: 446, y1: 1179, x2: 649, y2: 1112,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the floor is darker',
'than the carpet. You can zoom out to better see.'
]
message_success: [
'Correct! We care about intrinsic surface color, not highlights',
'or shadows.'
]
)
window.build_content(
image_key: '<KEY>',
x1: 428, y1: 176, x2: 499, y2: 140,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears brighter because of a highlight, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: '<KEY>6',
x1: 331, y1: 1170, x2: 449, y2: 1203,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore shadows.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears darker because of a shadow, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: '<KEY>',
x1: 1730, y1: 769, x2: 1629, y2: 965,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
'(Remember that you can drag or scroll to see more)',
]
message_darker_error: [
'Try again. They are close, but one is slightly darker.',
'You can see more of the carpet if you zoom out.'
]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The carpet is lighter even though it is in shadow.' ]
)
#window.build_content(
#image_key: '<KEY>',
#x1: 867, y1: 564, x2: 1475, y2: 751,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. They are close, but one is slightly darker.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! It is a single material, but point 2 is on a darker'
#'portion.' ]
#),
#window.build_content(
#image_key: '<KEY>',
#x1: 1475, y1: 751, x2: 687, y2: 753,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Notice that the pattern is repeating.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a repeating',
#'pattern.' ]
#),
window.build_content(
image_key: '<KEY>',
x1: 129, y1: 476, x2: 508, y2: 349,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
]
message_darker_error: [
'Try again. You should ignore glossy highlights.' ]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The two points are on the same portion of a glossy',
'surface.' ]
)
# BELOW: pages teaching about mirrors. Mirrors are filtered out by the
# first task, so we aren't showing them.
#window.build_content(
#image_key: '<KEY>',
#x1: 1204, y1: 1030, x2: 1374, y2: 957,
#message: [
#'If you see a transparent or reflective object, you should judge the',
#'closest object, and not the things behind it. These two points',
#'have the same surface color.'
#]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 911, y1: 423, x2: 1520, y2: 689,
#message: [
#'Similarly for mirrors, judge the surface of the mirror,'
#'not what you see inside the reflection. These two points'
#'are both on the mirror so they have the same surface color.'
#]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 763, y1: 111, x2: 1019, y2: 239,
#message: [
#'Finally, since mirrors perfectly reflect light, they have a',
#'lighter surface color than non-mirror objects. Therefore, point 1',
#'is darker than point 2 in this example.'
#]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 845, y1: 932, x2: 998, y2: 919,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.'
#]
#message_darker_error: [
#'Try again. We care about the surface, not what is behind it.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a glass',
#'surface.' ]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 1182, y1: 183, x2: 721, y2: 343,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the',
#'curtains, one of them is on a mirror.',
#]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 978, y1: 404, x2: 1092, y2: 264,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that the points are both inside a mirror',
#'so they have the same natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Always check if the points are inside a mirror.' ]
#)
#window.build_content(
#image_key: '<KEY>',
#x1: 919, y1: 811, x2: 1046, y2: 748,
#expected_darker: ['1'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they.',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the tissue',
#'box, one of them is on a mirror.' ]
#)
]
$(window).on('resize', debounce(@on_resize))
$('.response-darker').on('click', @btn_response_darker)
$('.response-confidence').on('click', @btn_response_confidence)
$('#btn-next').on('click', @btn_next)
$('#btn-back').on('click', @btn_back)
$('#btn-submit').on('click', @btn_submit)
on_resize: =>
if not @submit_enabled
@show_page(@idx)
btn_submit: =>
if @submit_enabled
window.mt_tutorial_complete()
btn_next: =>
if not @animating
if @idx < @tutorial_content.length - 1
@show_page(@idx + 1)
else
@idx += 1
$('#tut-confidence').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('#btn-next', false)
set_btn_enabled('#btn-back', true)
@ui.clear_ui()
$('#mt-done').show()
@set_submit_enabled(true)
btn_back: =>
if not @animating and @idx > 0
@show_page(@idx - 1)
btn_response_darker: (event) =>
if not @animating
content = @tutorial_content[@idx]
darker = $(event.target).attr('data-darker')
if darker in content.expected_darker
if content.expected_confidence?
@ui.set_message(content.message)
set_btn_enabled('button.response-confidence', true)
else
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
set_btn_enabled('button.response-confidence', false)
@ui.set_message(content.message_darker_error)
btn_response_confidence: (event) =>
if not @animating
content = @tutorial_content[@idx]
confidence = $(event.target).attr('data-confidence')
if confidence in content.expected_confidence
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
@ui.set_message(content.message_confidence_error)
show_page: (idx) ->
console.log 'show_page', idx
@idx = idx
$('#mt-done').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('button.controls', false)
@set_submit_enabled(false)
@animating = true
content = @tutorial_content[idx]
on_end = ( =>
set_btn_enabled('#btn-back', idx > 0)
if content.expected_darker?
@show_response_buttons()
else
@show_navigation_buttons()
@animating = false
)
if content.zoom_out
@ui.update_ui(content, ( =>
target = @ui.compute_current_zoom()
target[2] *= 2
@ui.zoom_to_target(target, on_end)
))
else
@ui.update_ui(content, on_end)
show_response_buttons: ->
set_btn_enabled('button.response-darker', true)
set_btn_enabled('button.response-confidence', false)
$('button.response-darker').removeClass('active')
if @tutorial_content[@idx].expected_confidence?
$('#tut-confidence').show()
else
$('#tut-confidence').hide()
$('#tut-buttons').show()
show_navigation_buttons: ->
$('#tut-buttons').hide()
set_btn_enabled('#btn-next', @idx < @tutorial_content.length)
set_submit_enabled: (b) ->
@submit_enabled = b
if b
$('#btn-submit').show()
set_btn_enabled('#btn-submit', true)
else
$('#btn-submit').hide()
| true | window.photos = [
{key: 'PI:KEY:<KEY>END_PI', width: 1078, height: 838},
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1360},
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1362},
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1371},
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1360},
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1356}
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 1365}
{key: 'PI:KEY:<KEY>END_PI', width: 2048, height: 3072}
]
window.build_content = (c) ->
ret = $.extend({}, c)
if c.image_key?
ret.photo = window.photos_by_key[c.image_key]
if c.x1? and c.x1? and c.x2? and c.y2?
ret.points = [
{
x_orig: c.x1 / ret.photo.width,
y_orig: c.y1 / ret.photo.height,
label: "1",
color:'#700'
}, {
x_orig: c.x2 / ret.photo.width,
y_orig: c.y2 / ret.photo.height,
label: "2",
color:'#007'
}
]
return ret
$( ->
window.show_modal_loading("Loading...", 250)
# load images
for p in window.photos
p.url = "/static/img/intrinsic/experiments/#{p.key}.jpg"
image_urls = (p.url for p in window.photos)
image_objs = load_images(image_urls, ->
# store photo object
for p, i in window.photos
p.obj = image_objs[i]
# index by key
window.photos_by_key = {}
for p in window.photos
window.photos_by_key[p.key] = p
# start the tutorial
window.hide_modal_loading()
tutorial = new IntrinsicCompareTutorial()
start_idx = window.getURLParameter('start_idx')
if start_idx?
start_idx = parseInt(start_idx)
else
start_idx = 0
tutorial.show_page(start_idx)
)
)
class IntrinsicCompareTutorial
constructor: ->
@animating = true
@set_submit_enabled(false)
@ui = new IntrinsicCompareUI()
@ui.extra_margin = 100
@tutorial_content = [
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 300, y1: 838/2, x2: 900, y2: 838/2,
message: [
'This short tutorial will teach you how to do this HIT. You get as',
'many tries as you like, and you don\'t have to do it again once',
'you pass. Click "Next" to continue to the next screen.'
]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 359, y1: 334, x2: 426, y2: 259,
message: [
'On this checkerboard pattern, there are two colors.',
'Point 1 is on a dark square, and point 2 is on a light square.'
'You can drag the photo to see more or scroll to zoom in/out.'
]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 359, y1: 334, x2: 621, y2: 454,
message: [
'Now, point 2 is in shadow. However, we understand that the square',
'at point 2 is still lighter than point 1.',
'(Note that we are referring to the center of the circle)' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 922, y1: 56, x2: 1041, y2: 279,
message: [
'In this photo, all points on the ceiling have the same surface',
'color. The surface is white even though shadows make some parts',
'appear darker.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 602, y1: 550, x2: 1254, y2: 652,
message: [
'Similarly, these two points on the wall have the same color.',
'The surface color is what matters, not the lighting or shadows.', ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 775, y1: 991, x2: 1002, y2: 906,
message: [
'The surface color at point 2 is darker than the surface color at',
'point 1 even though it is under a highlight.' ]
),
window.build_content(
image_key: '1PI:KEY:<KEY>END_PI',
x1: 1273, y1: 614, x2: 1368, y2: 641,
expected_darker: ['E'],
message: [
'Now it is your turn. Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Indicate which point has a darker surface color.' ]
message_success: [
'Correct! Both points are on the same colored wall.' ]
),
window.build_content(
image_key: '6PI:KEY:<KEY>END_PI',
x1: 211, y1: 862, x2: 390, y2: 827,
message: [
'Sometimes you cannot see enough and need to zoom out.' ]
),
window.build_content(
image_key: '6PI:KEY:<KEY>END_PI',
x1: 211, y1: 862, x2: 390, y2: 827,
zoom_out: true,
message: [
'You can do this with the scroll wheel. You can also drag',
'with the mouse.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 1920, y1: 846, x2: 1440, y2: 732,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 1086, y1: 240, x2: 1077, y2: 573,
expected_darker: ['E'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects. Imagine what it would look like if every',
'surface was receiving the same amount of lighting.'
]
message_success: [
'Correct! Both points are on a white colored wall.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 1722, y1: 3030, x2: 1755, y2: 2670,
expected_darker: ['1'],
message: [
'Using the buttons at the top, indicate which',
'point has a darker surface color.' ]
message_darker_error: [
'Try again. Remember that the point of this task is to ignore',
'shading effects.' ]
message_success: [
'Correct! The glossy floor is darker than the white wall.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 968, y1: 1100, x2: 1076, y2: 1163,
expected_darker: ['2'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.'
]
message_darker_error: [
'Try again. They are close, but the strips of wood are not all',
'the same color.' ]
message_confidence_error: [
'You should be able to tell with higher confidence. Try zooming',
'out (with the scroll wheel) or dragging the image around to see',
'more.' ]
message_success: [
'Correct! The two points are on slightly different colored',
'strips of wood.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 1637, y1: 587, x2: 1684, y2: 1123,
message: [
'Sometimes the image is too dark or bright and you cannot tell which',
'point is darker. In these cases, you can guess and then tell us',
'that you are guessing.' ]
),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 204, y1: 291, x2: 374, y2: 602,
expected_darker: ['1', '2', 'E'],
expected_confidence: ['0'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'This example is unclear, so click on any button.' ]
message_confidence_error: [
'This part of the image is too bright and washed out, so you cannot',
'tell what the answer should be. Therefore, you are only guessing'
'and should tell us this.' ]
message_success: [
'Correct! When the pixels are too bright or too dark to tell,',
'please tell us that you are guessing.']
)
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 446, y1: 1179, x2: 649, y2: 1112,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the floor is darker',
'than the carpet. You can zoom out to better see.'
]
message_success: [
'Correct! We care about intrinsic surface color, not highlights',
'or shadows.'
]
)
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 428, y1: 176, x2: 499, y2: 140,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore highlights.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears brighter because of a highlight, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI6',
x1: 331, y1: 1170, x2: 449, y2: 1203,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.' ]
message_darker_error: [
'Try again. Remember that you should try and ignore shadows.' ]
message_confidence_error: [
'You should be able to tell that the two points are on the same wall.'
]
message_success: [
'Correct! If two points have the same surface color, but one',
'appears darker because of a shadow, you should still consider',
'them to have the same brightness.'
]
)
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 1730, y1: 769, x2: 1629, y2: 965,
expected_darker: ['1'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
'(Remember that you can drag or scroll to see more)',
]
message_darker_error: [
'Try again. They are close, but one is slightly darker.',
'You can see more of the carpet if you zoom out.'
]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The carpet is lighter even though it is in shadow.' ]
)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 867, y1: 564, x2: 1475, y2: 751,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. They are close, but one is slightly darker.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! It is a single material, but point 2 is on a darker'
#'portion.' ]
#),
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 1475, y1: 751, x2: 687, y2: 753,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Notice that the pattern is repeating.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a repeating',
#'pattern.' ]
#),
window.build_content(
image_key: 'PI:KEY:<KEY>END_PI',
x1: 129, y1: 476, x2: 508, y2: 349,
expected_darker: ['E'],
expected_confidence: ['1', '2'],
message: [
'Using the buttons at the top, indicate which point has a darker',
'surface color. Also indicate how confident you are.',
]
message_darker_error: [
'Try again. You should ignore glossy highlights.' ]
message_confidence_error: [
'You should be able to tell with more accuracy than just guessing.',
'Try zooming out or dragging the image to see more areas.' ]
message_success: [
'Correct! The two points are on the same portion of a glossy',
'surface.' ]
)
# BELOW: pages teaching about mirrors. Mirrors are filtered out by the
# first task, so we aren't showing them.
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 1204, y1: 1030, x2: 1374, y2: 957,
#message: [
#'If you see a transparent or reflective object, you should judge the',
#'closest object, and not the things behind it. These two points',
#'have the same surface color.'
#]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 911, y1: 423, x2: 1520, y2: 689,
#message: [
#'Similarly for mirrors, judge the surface of the mirror,'
#'not what you see inside the reflection. These two points'
#'are both on the mirror so they have the same surface color.'
#]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 763, y1: 111, x2: 1019, y2: 239,
#message: [
#'Finally, since mirrors perfectly reflect light, they have a',
#'lighter surface color than non-mirror objects. Therefore, point 1',
#'is darker than point 2 in this example.'
#]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 845, y1: 932, x2: 998, y2: 919,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.'
#]
#message_darker_error: [
#'Try again. We care about the surface, not what is behind it.' ]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! The two points are on the same portion of a glass',
#'surface.' ]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 1182, y1: 183, x2: 721, y2: 343,
#expected_darker: ['2'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the',
#'curtains, one of them is on a mirror.',
#]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 978, y1: 404, x2: 1092, y2: 264,
#expected_darker: ['E'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that the points are both inside a mirror',
#'so they have the same natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Always check if the points are inside a mirror.' ]
#)
#window.build_content(
#image_key: 'PI:KEY:<KEY>END_PI',
#x1: 919, y1: 811, x2: 1046, y2: 748,
#expected_darker: ['1'],
#expected_confidence: ['1', '2'],
#message: [
#'Using the buttons at the top, indicate which point has a darker',
#'surface color. Also indicate how confident you are.',
#]
#message_darker_error: [
#'Try again. Remember that mirrors reflect the most light, so they.',
#'have the lightest natural surface color.'
#]
#message_confidence_error: [
#'You should be able to tell with more accuracy than just guessing.',
#'Try zooming out or dragging the image to see more areas.' ]
#message_success: [
#'Correct! Even though both points look like they are on the tissue',
#'box, one of them is on a mirror.' ]
#)
]
$(window).on('resize', debounce(@on_resize))
$('.response-darker').on('click', @btn_response_darker)
$('.response-confidence').on('click', @btn_response_confidence)
$('#btn-next').on('click', @btn_next)
$('#btn-back').on('click', @btn_back)
$('#btn-submit').on('click', @btn_submit)
on_resize: =>
if not @submit_enabled
@show_page(@idx)
btn_submit: =>
if @submit_enabled
window.mt_tutorial_complete()
btn_next: =>
if not @animating
if @idx < @tutorial_content.length - 1
@show_page(@idx + 1)
else
@idx += 1
$('#tut-confidence').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('#btn-next', false)
set_btn_enabled('#btn-back', true)
@ui.clear_ui()
$('#mt-done').show()
@set_submit_enabled(true)
btn_back: =>
if not @animating and @idx > 0
@show_page(@idx - 1)
btn_response_darker: (event) =>
if not @animating
content = @tutorial_content[@idx]
darker = $(event.target).attr('data-darker')
if darker in content.expected_darker
if content.expected_confidence?
@ui.set_message(content.message)
set_btn_enabled('button.response-confidence', true)
else
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
set_btn_enabled('button.response-confidence', false)
@ui.set_message(content.message_darker_error)
btn_response_confidence: (event) =>
if not @animating
content = @tutorial_content[@idx]
confidence = $(event.target).attr('data-confidence')
if confidence in content.expected_confidence
@ui.set_message(content.message_success)
@show_navigation_buttons()
else
@ui.set_message(content.message_confidence_error)
show_page: (idx) ->
console.log 'show_page', idx
@idx = idx
$('#mt-done').hide()
set_btn_enabled('button.response', false)
set_btn_enabled('button.controls', false)
@set_submit_enabled(false)
@animating = true
content = @tutorial_content[idx]
on_end = ( =>
set_btn_enabled('#btn-back', idx > 0)
if content.expected_darker?
@show_response_buttons()
else
@show_navigation_buttons()
@animating = false
)
if content.zoom_out
@ui.update_ui(content, ( =>
target = @ui.compute_current_zoom()
target[2] *= 2
@ui.zoom_to_target(target, on_end)
))
else
@ui.update_ui(content, on_end)
show_response_buttons: ->
set_btn_enabled('button.response-darker', true)
set_btn_enabled('button.response-confidence', false)
$('button.response-darker').removeClass('active')
if @tutorial_content[@idx].expected_confidence?
$('#tut-confidence').show()
else
$('#tut-confidence').hide()
$('#tut-buttons').show()
show_navigation_buttons: ->
$('#tut-buttons').hide()
set_btn_enabled('#btn-next', @idx < @tutorial_content.length)
set_submit_enabled: (b) ->
@submit_enabled = b
if b
$('#btn-submit').show()
set_btn_enabled('#btn-submit', true)
else
$('#btn-submit').hide()
|
[
{
"context": "0, 0, 1\n 1, 1, 0\n]\n\nkids =\n brother:\n name: \"Max\"\n age: 11\n sister:\n name: \"Ida\"\n age: ",
"end": 716,
"score": 0.9998762011528015,
"start": 713,
"tag": "NAME",
"value": "Max"
},
{
"context": " name: \"Max\"\n age: 11\n sister:\n ... | Application Support/BBEdit/Lint/test.coffee | bhdicaire/bbeditSetup | 0 | # Assignment:
number = 42
opposite = true
# Conditions:
number = -42 if opposite
# Functions:
square = (x) -> x * x
# Arrays:
list = [1, 2, 3, 4, 5]
# Objects:
math =
root: Math.sqrt
square: square
cube: (x) -> x * square x
# Splats:
race = (winner, runners...) ->
print winner, runners
# Existence:
alert "I knew it!" if elvis?
# Array comprehensions:
cubes = (math.cube num for num in list)
square = (x) -> x * x
cube = (x) -> square(x) * x
fill = (container, liquid = "coffee") ->
"Filling the #{container} with #{liquid}..."
song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "Max"
age: 11
sister:
name: "Ida"
age: 9
$('.account').attr class: 'active'
log object.class
outer = 1
changeNumbers = ->
inner = -1
outer = 10
inner = changeNumbers()
mood = greatlyImproved if singing
if happy and knowsIt
clapsHands()
chaChaCha()
else
showIt()
date = if friday then sue else jill
gold = silver = rest = "unknown"
awardMedals = (first, second, others...) ->
gold = first
silver = second
rest = others
contenders = [
"Michael Phelps"
"Liu Xiang"
"Yao Ming"
"Allyson Felix"
"Shawn Johnson"
"Roman Sebrle"
"Guo Jingjing"
"Tyson Gay"
"Asafa Powell"
"Usain Bolt"
]
awardMedals contenders...
alert "Gold: " + gold
alert "Silver: " + silver
alert "The Field: " + rest
# Eat lunch.
eat food for food in ['toast', 'cheese', 'wine']
# Fine five course dining.
courses = ['greens', 'caviar', 'truffles', 'roast', 'cake']
menu i + 1, dish for dish, i in courses
# Health conscious meal.
foods = ['broccoli', 'spinach', 'chocolate']
eat food for food in foods when food isnt 'chocolate'
countdown = (num for num in [10..1])
yearsOld = max: 10, ida: 9, tim: 11
ages = for child, age of yearsOld
"#{child} is #{age}"
# Econ 101
if this.studyingEconomics
buy() while supply > demand
sell() until supply > demand
# Nursery Rhyme
num = 6
lyrics = while num -= 1
"#{num} little monkeys, jumping on the bed.
One fell out and bumped his head."
for filename in list
do (filename) ->
fs.readFile filename, (err, contents) ->
compile filename, contents.toString()
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]
start = numbers[0..2]
middle = numbers[3...-2]
end = numbers[-2..]
copy = numbers[..]
numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
numbers[3..6] = [-3, -4, -5, -6]
grade = (student) ->
if student.excellentWork
"A+"
else if student.okayStuff
if student.triedHard then "B" else "B-"
else
"C"
eldest = if 24 > 21 then "Liz" else "Ike"
six = (one = 1) + (two = 2) + (three = 3)
# The first ten global properties.
globals = (name for name of window)[0...10]
alert(
try
nonexistent / undefined
catch error
"And the error is ... #{error}"
)
launch() if ignition is on
volume = 10 if band isnt SpinalTap
letTheWildRumpusBegin() unless answer is no
if car.speed < limit then accelerate()
winner = yes if pick in [47, 92, 13]
print inspect "My name is #{@name}"
solipsism = true if mind? and not world?
speed = 0
speed ?= 15
footprints = yeti ? "bear"
zip = lottery.drawWinner?().address?.zipcode
class Animal
constructor: (@name) ->
move: (meters) ->
alert @name + " moved #{meters}m."
class Snake extends Animal
move: ->
alert "Slithering..."
super 5
class Horse extends Animal
move: ->
alert "Galloping..."
super 45
sam = new Snake "Sammy the Python"
tom = new Horse "Tommy the Palomino"
sam.move()
tom.move()
String::dasherize = ->
this.replace /_/g, "-"
theBait = 1000
theSwitch = 0
[theBait, theSwitch] = [theSwitch, theBait]
weatherReport = (location) ->
# Make an Ajax request to fetch the weather...
[location, 72, "Mostly Sunny"]
[city, temp, forecast] = weatherReport "Berkeley, CA"
futurists =
sculptor: "Umberto Boccioni"
painter: "Vladimir Burliuk"
poet:
name: "F.T. Marinetti"
address: [
"Via Roma 42R"
"Bellagio, Italy 22021"
]
{poet: {name, address: [street, city]}} = futurists
tag = "<impossible>"
[open, contents..., close] = tag.split("")
text = "Every literary critic believes he will
outwit history and have the last word"
[first, ..., last] = text.split " "
class Person
constructor: (options) ->
{@name, @age, @height} = options
tim = new Person age: 4
Account = (customer, cart) ->
@customer = customer
@cart = cart
$('.shopping_cart').bind 'click', (event) =>
@customer.purchase @cart
hi = `function() {
return [document.title, "Hello JavaScript"].join(": ");
}`
switch day
when "Mon" then go work
when "Tue" then go relax
when "Thu" then go iceFishing
when "Fri", "Sat"
if day is bingoDay
go bingo
go dancing
when "Sun" then go church
else go work
score = 76
grade = switch
when score < 60 then 'F'
when score < 70 then 'D'
when score < 80 then 'C'
when score < 90 then 'B'
else 'A'
# grade == 'C'
try
allHellBreaksLoose()
catsAndDogsLivingTogether()
catch error
print error
finally
cleanUp()
cholesterol = 127
healthy = 200 > cholesterol > 60
author = "Wittgenstein"
quote = "A picture is a fact. -- #{ author }"
sentence = "#{ 22 / 7 } is a decent approximation of π"
mobyDick = "Call me Ishmael. Some years ago --
never mind how long precisely -- having little
or no money in my purse, and nothing particular
to interest me on shore, I thought I would sail
about a little and see the watery part of the
world..."
html = """
<strong>
cup of coffeescript
</strong>
"""
stringWithQuotes = """This string has "embedded quotes" which confuses bbedit!"""
###
SkinnyMochaHalfCaffScript Compiler v1.0
Released under the MIT License
###
OPERATOR = /// ^ (
?: [-=]> # function
| [-+*/%<>&|^!?=]= # compound assign / compare
| >>>=? # zero-fill right shift
| ([-+:])\1 # doubles
| ([&|<>])\2=? # logic / shift
| \?\. # soak access
| \.{2,3} # range or splat
) ///
fs = require 'fs'
option '-o', '--output [DIR]', 'directory for compiled code'
task 'build:parser', 'rebuild the Jison parser', (options) ->
require 'jison'
code = require('./lib/grammar').parser.generate()
dir = options.output or 'lib'
fs.writeFile "#{dir}/parser.js", code
| 178040 | # Assignment:
number = 42
opposite = true
# Conditions:
number = -42 if opposite
# Functions:
square = (x) -> x * x
# Arrays:
list = [1, 2, 3, 4, 5]
# Objects:
math =
root: Math.sqrt
square: square
cube: (x) -> x * square x
# Splats:
race = (winner, runners...) ->
print winner, runners
# Existence:
alert "I knew it!" if elvis?
# Array comprehensions:
cubes = (math.cube num for num in list)
square = (x) -> x * x
cube = (x) -> square(x) * x
fill = (container, liquid = "coffee") ->
"Filling the #{container} with #{liquid}..."
song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "<NAME>"
age: 11
sister:
name: "<NAME>"
age: 9
$('.account').attr class: 'active'
log object.class
outer = 1
changeNumbers = ->
inner = -1
outer = 10
inner = changeNumbers()
mood = greatlyImproved if singing
if happy and knowsIt
clapsHands()
chaChaCha()
else
showIt()
date = if friday then sue else jill
gold = silver = rest = "unknown"
awardMedals = (first, second, others...) ->
gold = first
silver = second
rest = others
contenders = [
"<NAME>"
"<NAME>"
"<NAME>"
"<NAME>"
"<NAME>"
"<NAME>"
"<NAME>"
"<NAME>ay"
"<NAME>"
"<NAME>"
]
awardMedals contenders...
alert "Gold: " + gold
alert "Silver: " + silver
alert "The Field: " + rest
# Eat lunch.
eat food for food in ['toast', 'cheese', 'wine']
# Fine five course dining.
courses = ['greens', 'caviar', 'truffles', 'roast', 'cake']
menu i + 1, dish for dish, i in courses
# Health conscious meal.
foods = ['broccoli', 'spinach', 'chocolate']
eat food for food in foods when food isnt 'chocolate'
countdown = (num for num in [10..1])
yearsOld = max: 10, ida: 9, tim: 11
ages = for child, age of yearsOld
"#{child} is #{age}"
# Econ 101
if this.studyingEconomics
buy() while supply > demand
sell() until supply > demand
# Nursery Rhyme
num = 6
lyrics = while num -= 1
"#{num} little monkeys, jumping on the bed.
One fell out and bumped his head."
for filename in list
do (filename) ->
fs.readFile filename, (err, contents) ->
compile filename, contents.toString()
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]
start = numbers[0..2]
middle = numbers[3...-2]
end = numbers[-2..]
copy = numbers[..]
numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
numbers[3..6] = [-3, -4, -5, -6]
grade = (student) ->
if student.excellentWork
"A+"
else if student.okayStuff
if student.triedHard then "B" else "B-"
else
"C"
eldest = if 24 > 21 then "Liz" else "Ike"
six = (one = 1) + (two = 2) + (three = 3)
# The first ten global properties.
globals = (name for name of window)[0...10]
alert(
try
nonexistent / undefined
catch error
"And the error is ... #{error}"
)
launch() if ignition is on
volume = 10 if band isnt SpinalTap
letTheWildRumpusBegin() unless answer is no
if car.speed < limit then accelerate()
winner = yes if pick in [47, 92, 13]
print inspect "My name is<NAME> #{@name}"
solipsism = true if mind? and not world?
speed = 0
speed ?= 15
footprints = yeti ? "bear"
zip = lottery.drawWinner?().address?.zipcode
class Animal
constructor: (@name) ->
move: (meters) ->
alert @name + " moved #{meters}m."
class Snake extends Animal
move: ->
alert "Slithering..."
super 5
class Horse extends Animal
move: ->
alert "Galloping..."
super 45
sam = new Snake "<NAME>"
tom = new Horse "<NAME>"
sam.move()
tom.move()
String::dasherize = ->
this.replace /_/g, "-"
theBait = 1000
theSwitch = 0
[theBait, theSwitch] = [theSwitch, theBait]
weatherReport = (location) ->
# Make an Ajax request to fetch the weather...
[location, 72, "Mostly Sunny"]
[city, temp, forecast] = weatherReport "Berkeley, CA"
futurists =
sculptor: "<NAME>"
painter: "<NAME>"
poet:
name: "<NAME>"
address: [
"Via Roma 42R"
"Bellagio, Italy 22021"
]
{poet: {name, address: [street, city]}} = futurists
tag = "<impossible>"
[open, contents..., close] = tag.split("")
text = "Every literary critic believes he will
outwit history and have the last word"
[first, ..., last] = text.split " "
class Person
constructor: (options) ->
{@name, @age, @height} = options
tim = new Person age: 4
Account = (customer, cart) ->
@customer = customer
@cart = cart
$('.shopping_cart').bind 'click', (event) =>
@customer.purchase @cart
hi = `function() {
return [document.title, "Hello JavaScript"].join(": ");
}`
switch day
when "Mon" then go work
when "Tue" then go relax
when "Thu" then go iceFishing
when "Fri", "Sat"
if day is bingoDay
go bingo
go dancing
when "Sun" then go church
else go work
score = 76
grade = switch
when score < 60 then 'F'
when score < 70 then 'D'
when score < 80 then 'C'
when score < 90 then 'B'
else 'A'
# grade == 'C'
try
allHellBreaksLoose()
catsAndDogsLivingTogether()
catch error
print error
finally
cleanUp()
cholesterol = 127
healthy = 200 > cholesterol > 60
author = "<NAME>"
quote = "A picture is a fact. -- #{ author }"
sentence = "#{ 22 / 7 } is a decent approximation of π"
mobyDick = "Call me <NAME>. Some years ago --
never mind how long precisely -- having little
or no money in my purse, and nothing particular
to interest me on shore, I thought I would sail
about a little and see the watery part of the
world..."
html = """
<strong>
cup of coffeescript
</strong>
"""
stringWithQuotes = """This string has "embedded quotes" which confuses bbedit!"""
###
SkinnyMochaHalfCaffScript Compiler v1.0
Released under the MIT License
###
OPERATOR = /// ^ (
?: [-=]> # function
| [-+*/%<>&|^!?=]= # compound assign / compare
| >>>=? # zero-fill right shift
| ([-+:])\1 # doubles
| ([&|<>])\2=? # logic / shift
| \?\. # soak access
| \.{2,3} # range or splat
) ///
fs = require 'fs'
option '-o', '--output [DIR]', 'directory for compiled code'
task 'build:parser', 'rebuild the Jison parser', (options) ->
require 'jison'
code = require('./lib/grammar').parser.generate()
dir = options.output or 'lib'
fs.writeFile "#{dir}/parser.js", code
| true | # Assignment:
number = 42
opposite = true
# Conditions:
number = -42 if opposite
# Functions:
square = (x) -> x * x
# Arrays:
list = [1, 2, 3, 4, 5]
# Objects:
math =
root: Math.sqrt
square: square
cube: (x) -> x * square x
# Splats:
race = (winner, runners...) ->
print winner, runners
# Existence:
alert "I knew it!" if elvis?
# Array comprehensions:
cubes = (math.cube num for num in list)
square = (x) -> x * x
cube = (x) -> square(x) * x
fill = (container, liquid = "coffee") ->
"Filling the #{container} with #{liquid}..."
song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "PI:NAME:<NAME>END_PI"
age: 11
sister:
name: "PI:NAME:<NAME>END_PI"
age: 9
$('.account').attr class: 'active'
log object.class
outer = 1
changeNumbers = ->
inner = -1
outer = 10
inner = changeNumbers()
mood = greatlyImproved if singing
if happy and knowsIt
clapsHands()
chaChaCha()
else
showIt()
date = if friday then sue else jill
gold = silver = rest = "unknown"
awardMedals = (first, second, others...) ->
gold = first
silver = second
rest = others
contenders = [
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PIay"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
]
awardMedals contenders...
alert "Gold: " + gold
alert "Silver: " + silver
alert "The Field: " + rest
# Eat lunch.
eat food for food in ['toast', 'cheese', 'wine']
# Fine five course dining.
courses = ['greens', 'caviar', 'truffles', 'roast', 'cake']
menu i + 1, dish for dish, i in courses
# Health conscious meal.
foods = ['broccoli', 'spinach', 'chocolate']
eat food for food in foods when food isnt 'chocolate'
countdown = (num for num in [10..1])
yearsOld = max: 10, ida: 9, tim: 11
ages = for child, age of yearsOld
"#{child} is #{age}"
# Econ 101
if this.studyingEconomics
buy() while supply > demand
sell() until supply > demand
# Nursery Rhyme
num = 6
lyrics = while num -= 1
"#{num} little monkeys, jumping on the bed.
One fell out and bumped his head."
for filename in list
do (filename) ->
fs.readFile filename, (err, contents) ->
compile filename, contents.toString()
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]
start = numbers[0..2]
middle = numbers[3...-2]
end = numbers[-2..]
copy = numbers[..]
numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
numbers[3..6] = [-3, -4, -5, -6]
grade = (student) ->
if student.excellentWork
"A+"
else if student.okayStuff
if student.triedHard then "B" else "B-"
else
"C"
eldest = if 24 > 21 then "Liz" else "Ike"
six = (one = 1) + (two = 2) + (three = 3)
# The first ten global properties.
globals = (name for name of window)[0...10]
alert(
try
nonexistent / undefined
catch error
"And the error is ... #{error}"
)
launch() if ignition is on
volume = 10 if band isnt SpinalTap
letTheWildRumpusBegin() unless answer is no
if car.speed < limit then accelerate()
winner = yes if pick in [47, 92, 13]
print inspect "My name isPI:NAME:<NAME>END_PI #{@name}"
solipsism = true if mind? and not world?
speed = 0
speed ?= 15
footprints = yeti ? "bear"
zip = lottery.drawWinner?().address?.zipcode
class Animal
constructor: (@name) ->
move: (meters) ->
alert @name + " moved #{meters}m."
class Snake extends Animal
move: ->
alert "Slithering..."
super 5
class Horse extends Animal
move: ->
alert "Galloping..."
super 45
sam = new Snake "PI:NAME:<NAME>END_PI"
tom = new Horse "PI:NAME:<NAME>END_PI"
sam.move()
tom.move()
String::dasherize = ->
this.replace /_/g, "-"
theBait = 1000
theSwitch = 0
[theBait, theSwitch] = [theSwitch, theBait]
weatherReport = (location) ->
# Make an Ajax request to fetch the weather...
[location, 72, "Mostly Sunny"]
[city, temp, forecast] = weatherReport "Berkeley, CA"
futurists =
sculptor: "PI:NAME:<NAME>END_PI"
painter: "PI:NAME:<NAME>END_PI"
poet:
name: "PI:NAME:<NAME>END_PI"
address: [
"Via Roma 42R"
"Bellagio, Italy 22021"
]
{poet: {name, address: [street, city]}} = futurists
tag = "<impossible>"
[open, contents..., close] = tag.split("")
text = "Every literary critic believes he will
outwit history and have the last word"
[first, ..., last] = text.split " "
class Person
constructor: (options) ->
{@name, @age, @height} = options
tim = new Person age: 4
Account = (customer, cart) ->
@customer = customer
@cart = cart
$('.shopping_cart').bind 'click', (event) =>
@customer.purchase @cart
hi = `function() {
return [document.title, "Hello JavaScript"].join(": ");
}`
switch day
when "Mon" then go work
when "Tue" then go relax
when "Thu" then go iceFishing
when "Fri", "Sat"
if day is bingoDay
go bingo
go dancing
when "Sun" then go church
else go work
score = 76
grade = switch
when score < 60 then 'F'
when score < 70 then 'D'
when score < 80 then 'C'
when score < 90 then 'B'
else 'A'
# grade == 'C'
try
allHellBreaksLoose()
catsAndDogsLivingTogether()
catch error
print error
finally
cleanUp()
cholesterol = 127
healthy = 200 > cholesterol > 60
author = "PI:NAME:<NAME>END_PI"
quote = "A picture is a fact. -- #{ author }"
sentence = "#{ 22 / 7 } is a decent approximation of π"
mobyDick = "Call me PI:NAME:<NAME>END_PI. Some years ago --
never mind how long precisely -- having little
or no money in my purse, and nothing particular
to interest me on shore, I thought I would sail
about a little and see the watery part of the
world..."
html = """
<strong>
cup of coffeescript
</strong>
"""
stringWithQuotes = """This string has "embedded quotes" which confuses bbedit!"""
###
SkinnyMochaHalfCaffScript Compiler v1.0
Released under the MIT License
###
OPERATOR = /// ^ (
?: [-=]> # function
| [-+*/%<>&|^!?=]= # compound assign / compare
| >>>=? # zero-fill right shift
| ([-+:])\1 # doubles
| ([&|<>])\2=? # logic / shift
| \?\. # soak access
| \.{2,3} # range or splat
) ///
fs = require 'fs'
option '-o', '--output [DIR]', 'directory for compiled code'
task 'build:parser', 'rebuild the Jison parser', (options) ->
require 'jison'
code = require('./lib/grammar').parser.generate()
dir = options.output or 'lib'
fs.writeFile "#{dir}/parser.js", code
|
[
{
"context": "cons/bitbucket@2x.png'\n\n @_fields.push\n key: 'webhookUrl'\n type: 'text'\n readonly: true\n descript",
"end": 4201,
"score": 0.9816253185272217,
"start": 4191,
"tag": "KEY",
"value": "webhookUrl"
}
] | src/services/bitbucket.coffee | jianliaoim/talk-services | 40 | util = require '../util'
_receiveWebhook = ({body, headers}) ->
try
[type, action] = headers['x-event-key'].split(":")
catch
throw new Error('Invalid event format')
throw new Error('Invalid event type') unless type in ['repo', 'issue', 'pullrequest']
message = {}
attachment = category: 'quote', data: {}
switch type
when 'repo'
throw new Error('Unsupported action') unless action in ['push', 'commit_comment_created']
if action is 'push'
attachment.data.title = "A new push for project #{body.repository.name}"
attachment.data.text = "Committer: #{body.actor.display_name}"
attachment.data.redirectUrl = body.repository.links.html.href
else if action is 'commit_comment_created'
attachment.data.title = "A new comment for #{body.repository.name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'issue'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created an issue for project #{body.repository.full_name}"
attachment.data.text = body.issue.content.raw
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated an issue for project #{body.repository.full_name}"
attachment.data.text = body.changes.content.new
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for project #{body.repository.full_name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'pullrequest'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created', 'comment_deleted', 'fulfilled', 'rejected']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_deleted'
attachment.data.title = "#{body.actor.display_name} deleted a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'fulfilled'
attachment.data.title = "#{body.actor.display_name} fulfilled the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'rejected'
attachment.data.title = "#{body.actor.display_name} rejected the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
message.attachments = [attachment]
message
module.exports = ->
@title = 'Bitbucket'
@template = 'webhook'
@summary = util.i18n
zh: '免费的代码托管服务'
en: 'Free code management service.'
@description = util.i18n
zh: 'BitBucket 是一家采用Mercurial和Git作为分布式版本控制系统源代码托管云服务'
en: 'Bitbucket is a Git and Mercurial based source code management and collaboration solution in the cloud.'
@iconUrl = util.static 'images/icons/bitbucket@2x.png'
@_fields.push
key: 'webhookUrl'
type: 'text'
readonly: true
description: util.i18n
zh: '复制 webhook 地址到 bitbucket.org 中使用'
en: 'Copy this webhook to your bitbucket.org to use it.'
@registerEvent 'service.webhook', _receiveWebhook
| 46915 | util = require '../util'
_receiveWebhook = ({body, headers}) ->
try
[type, action] = headers['x-event-key'].split(":")
catch
throw new Error('Invalid event format')
throw new Error('Invalid event type') unless type in ['repo', 'issue', 'pullrequest']
message = {}
attachment = category: 'quote', data: {}
switch type
when 'repo'
throw new Error('Unsupported action') unless action in ['push', 'commit_comment_created']
if action is 'push'
attachment.data.title = "A new push for project #{body.repository.name}"
attachment.data.text = "Committer: #{body.actor.display_name}"
attachment.data.redirectUrl = body.repository.links.html.href
else if action is 'commit_comment_created'
attachment.data.title = "A new comment for #{body.repository.name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'issue'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created an issue for project #{body.repository.full_name}"
attachment.data.text = body.issue.content.raw
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated an issue for project #{body.repository.full_name}"
attachment.data.text = body.changes.content.new
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for project #{body.repository.full_name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'pullrequest'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created', 'comment_deleted', 'fulfilled', 'rejected']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_deleted'
attachment.data.title = "#{body.actor.display_name} deleted a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'fulfilled'
attachment.data.title = "#{body.actor.display_name} fulfilled the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'rejected'
attachment.data.title = "#{body.actor.display_name} rejected the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
message.attachments = [attachment]
message
module.exports = ->
@title = 'Bitbucket'
@template = 'webhook'
@summary = util.i18n
zh: '免费的代码托管服务'
en: 'Free code management service.'
@description = util.i18n
zh: 'BitBucket 是一家采用Mercurial和Git作为分布式版本控制系统源代码托管云服务'
en: 'Bitbucket is a Git and Mercurial based source code management and collaboration solution in the cloud.'
@iconUrl = util.static 'images/icons/bitbucket@2x.png'
@_fields.push
key: '<KEY>'
type: 'text'
readonly: true
description: util.i18n
zh: '复制 webhook 地址到 bitbucket.org 中使用'
en: 'Copy this webhook to your bitbucket.org to use it.'
@registerEvent 'service.webhook', _receiveWebhook
| true | util = require '../util'
_receiveWebhook = ({body, headers}) ->
try
[type, action] = headers['x-event-key'].split(":")
catch
throw new Error('Invalid event format')
throw new Error('Invalid event type') unless type in ['repo', 'issue', 'pullrequest']
message = {}
attachment = category: 'quote', data: {}
switch type
when 'repo'
throw new Error('Unsupported action') unless action in ['push', 'commit_comment_created']
if action is 'push'
attachment.data.title = "A new push for project #{body.repository.name}"
attachment.data.text = "Committer: #{body.actor.display_name}"
attachment.data.redirectUrl = body.repository.links.html.href
else if action is 'commit_comment_created'
attachment.data.title = "A new comment for #{body.repository.name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'issue'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created an issue for project #{body.repository.full_name}"
attachment.data.text = body.issue.content.raw
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated an issue for project #{body.repository.full_name}"
attachment.data.text = body.changes.content.new
attachment.data.redirectUrl = body.issue.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for project #{body.repository.full_name}"
attachment.data.text = body.comment.content.raw
attachment.data.redirectUrl = body.comment.links.html.href
when 'pullrequest'
throw new Error('Unsupported action') unless action in ['created', 'updated', 'comment_created', 'comment_deleted', 'fulfilled', 'rejected']
if action is 'created'
attachment.data.title = "#{body.actor.display_name} created a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'updated'
attachment.data.title = "#{body.actor.display_name} updated a pull request for #{body.repository.name}"
attachment.data.text = body.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_created'
attachment.data.title = "#{body.actor.display_name} created a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'comment_deleted'
attachment.data.title = "#{body.actor.display_name} deleted a comment for pull request #{body.pullrequest.title}"
attachment.data.text = body.comment.pullrequest.title
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'fulfilled'
attachment.data.title = "#{body.actor.display_name} fulfilled the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
else if action is 'rejected'
attachment.data.title = "#{body.actor.display_name} rejected the pull request #{body.pullrequest.title}"
attachment.data.text = ""
attachment.data.redirectUrl = body.pullrequest.links.html.href
message.attachments = [attachment]
message
module.exports = ->
@title = 'Bitbucket'
@template = 'webhook'
@summary = util.i18n
zh: '免费的代码托管服务'
en: 'Free code management service.'
@description = util.i18n
zh: 'BitBucket 是一家采用Mercurial和Git作为分布式版本控制系统源代码托管云服务'
en: 'Bitbucket is a Git and Mercurial based source code management and collaboration solution in the cloud.'
@iconUrl = util.static 'images/icons/bitbucket@2x.png'
@_fields.push
key: 'PI:KEY:<KEY>END_PI'
type: 'text'
readonly: true
description: util.i18n
zh: '复制 webhook 地址到 bitbucket.org 中使用'
en: 'Copy this webhook to your bitbucket.org to use it.'
@registerEvent 'service.webhook', _receiveWebhook
|
[
{
"context": " Plugin _.extend {}, @props,\n key: \"#{@props.category}-#{plugin.name}\"\n plugin:",
"end": 542,
"score": 0.6393367052078247,
"start": 539,
"tag": "KEY",
"value": "\"#{"
},
{
"context": "tend {}, @props,\n key: \"#{@props.category}-#{p... | src/components/category.coffee | brianshaler/kerplunk-plugin-manager | 0 | _ = require 'lodash'
React = require 'react'
Bootstrap = require 'react-bootstrap'
Plugin = require './plugin'
Panel = React.createFactory Bootstrap.Panel
module.exports = React.createFactory React.createClass
render: ->
permissions = @props.permissions ? {}
Panel
header: "#{@props.category} (#{@props.plugins.length})"
eventKey: @props.eventKey
collapsible: true
defaultExpanded: @props.eventKey == '0'
,
_.map @props.plugins, (plugin) =>
Plugin _.extend {}, @props,
key: "#{@props.category}-#{plugin.name}"
plugin: plugin
permissions: permissions[plugin.name] ? {}
| 41079 | _ = require 'lodash'
React = require 'react'
Bootstrap = require 'react-bootstrap'
Plugin = require './plugin'
Panel = React.createFactory Bootstrap.Panel
module.exports = React.createFactory React.createClass
render: ->
permissions = @props.permissions ? {}
Panel
header: "#{@props.category} (#{@props.plugins.length})"
eventKey: @props.eventKey
collapsible: true
defaultExpanded: @props.eventKey == '0'
,
_.map @props.plugins, (plugin) =>
Plugin _.extend {}, @props,
key: <KEY>@props.category}-<KEY>plugin.<KEY>
plugin: plugin
permissions: permissions[plugin.name] ? {}
| true | _ = require 'lodash'
React = require 'react'
Bootstrap = require 'react-bootstrap'
Plugin = require './plugin'
Panel = React.createFactory Bootstrap.Panel
module.exports = React.createFactory React.createClass
render: ->
permissions = @props.permissions ? {}
Panel
header: "#{@props.category} (#{@props.plugins.length})"
eventKey: @props.eventKey
collapsible: true
defaultExpanded: @props.eventKey == '0'
,
_.map @props.plugins, (plugin) =>
Plugin _.extend {}, @props,
key: PI:KEY:<KEY>END_PI@props.category}-PI:KEY:<KEY>END_PIplugin.PI:KEY:<KEY>END_PI
plugin: plugin
permissions: permissions[plugin.name] ? {}
|
[
{
"context": "os: 0\n username: options.uuid\n password: options.token\n reconnectPeriod: 5000\n @options = _.defa",
"end": 605,
"score": 0.9961769580841064,
"start": 592,
"tag": "PASSWORD",
"value": "options.token"
}
] | src/meshblu.coffee | octoblu/mqtt-client | 1 | _ = require 'lodash'
url = require 'url'
nodeUuid = require 'node-uuid'
{EventEmitter2} = require 'eventemitter2'
debug = require('debug')('meshblu-mqtt')
PROXY_EVENTS = ['close', 'error', 'reconnect', 'offline', 'pong', 'open', 'config', 'data']
class Meshblu extends EventEmitter2
constructor: (options={}, dependencies={})->
super wildcard: true
@mqtt = dependencies.mqtt ? require 'mqtt'
defaults =
keepalive: 10
protocolId: 'MQIsdp'
protocolVersion: 4
qos: 0
username: options.uuid
password: options.token
reconnectPeriod: 5000
@options = _.defaults options, defaults
@messageCallbacks = {}
connect: (callback=->) =>
callback = _.once callback
uri = @_buildUri()
@client = @mqtt.connect uri, @options
@client.once 'connect', =>
response = _.pick @options, 'uuid', 'token'
@client.subscribe @options.uuid, qos: @options.qos
callback null, response
@client.once 'error', callback
@client.on 'message', @_messageHandler
_.each PROXY_EVENTS, (event) => @_proxy event
publish: (topic, data, fn=->) =>
throw new Error 'No Active Connection' unless @client?
if !data
dataString = {}
else if _.isString data
dataString = data
else
data.callbackId = nodeUuid.v1();
@messageCallbacks[data.callbackId] = fn;
dataString = JSON.stringify(data)
debug 'publish', topic, dataString
@client.publish topic, dataString
close: (callback) =>
@client.once 'close', callback
@client.end()
# API Functions
message: (params) =>
@publish 'message', params
subscribe: (params) =>
@client.subscribe params
unsubscribe: (params) =>
@client.unsubscribe params
update: (data, fn=->) =>
@publish 'update', data, fn
resetToken: (data, fn=->) =>
@publish 'resetToken', data, fn
getPublicKey: (data, fn=->) =>
@publish 'getPublicKey', data, fn
generateAndStoreToken: (data, fn=->) =>
@publish 'generateAndStoreToken', data, fn
whoami: (fn=->) =>
@publish 'whoami', {}, fn
# Private Functions
_buildUri: =>
defaults =
protocol: 'mqtt'
hostname: 'meshblu.octoblu.com'
port: 1883
uriOptions = _.defaults {}, @options, defaults
url.format uriOptions
_messageHandler: (uuid, message) =>
message = message.toString()
try
message = JSON.parse message
catch error
debug 'unable to parse message', message
debug '_messageHandler', message.topic, message.data
return if @handleCallbackResponse message
return @emit message.topic, message.data
handleCallbackResponse: (message) =>
id = message._request?.callbackId
return false unless id?
callback = @messageCallbacks[id] ? ->
callback message.data if message.topic == 'error'
callback null, message.data if message.topic != 'error'
delete @messageCallbacks[id]
return true
_proxy: (event) =>
@client.on event, =>
debug 'proxy ' + event, _.first arguments
@emit event, arguments...
_uuidOrObject: (data) =>
return uuid: data if _.isString data
return data
module.exports = Meshblu
| 66842 | _ = require 'lodash'
url = require 'url'
nodeUuid = require 'node-uuid'
{EventEmitter2} = require 'eventemitter2'
debug = require('debug')('meshblu-mqtt')
PROXY_EVENTS = ['close', 'error', 'reconnect', 'offline', 'pong', 'open', 'config', 'data']
class Meshblu extends EventEmitter2
constructor: (options={}, dependencies={})->
super wildcard: true
@mqtt = dependencies.mqtt ? require 'mqtt'
defaults =
keepalive: 10
protocolId: 'MQIsdp'
protocolVersion: 4
qos: 0
username: options.uuid
password: <PASSWORD>
reconnectPeriod: 5000
@options = _.defaults options, defaults
@messageCallbacks = {}
connect: (callback=->) =>
callback = _.once callback
uri = @_buildUri()
@client = @mqtt.connect uri, @options
@client.once 'connect', =>
response = _.pick @options, 'uuid', 'token'
@client.subscribe @options.uuid, qos: @options.qos
callback null, response
@client.once 'error', callback
@client.on 'message', @_messageHandler
_.each PROXY_EVENTS, (event) => @_proxy event
publish: (topic, data, fn=->) =>
throw new Error 'No Active Connection' unless @client?
if !data
dataString = {}
else if _.isString data
dataString = data
else
data.callbackId = nodeUuid.v1();
@messageCallbacks[data.callbackId] = fn;
dataString = JSON.stringify(data)
debug 'publish', topic, dataString
@client.publish topic, dataString
close: (callback) =>
@client.once 'close', callback
@client.end()
# API Functions
message: (params) =>
@publish 'message', params
subscribe: (params) =>
@client.subscribe params
unsubscribe: (params) =>
@client.unsubscribe params
update: (data, fn=->) =>
@publish 'update', data, fn
resetToken: (data, fn=->) =>
@publish 'resetToken', data, fn
getPublicKey: (data, fn=->) =>
@publish 'getPublicKey', data, fn
generateAndStoreToken: (data, fn=->) =>
@publish 'generateAndStoreToken', data, fn
whoami: (fn=->) =>
@publish 'whoami', {}, fn
# Private Functions
_buildUri: =>
defaults =
protocol: 'mqtt'
hostname: 'meshblu.octoblu.com'
port: 1883
uriOptions = _.defaults {}, @options, defaults
url.format uriOptions
_messageHandler: (uuid, message) =>
message = message.toString()
try
message = JSON.parse message
catch error
debug 'unable to parse message', message
debug '_messageHandler', message.topic, message.data
return if @handleCallbackResponse message
return @emit message.topic, message.data
handleCallbackResponse: (message) =>
id = message._request?.callbackId
return false unless id?
callback = @messageCallbacks[id] ? ->
callback message.data if message.topic == 'error'
callback null, message.data if message.topic != 'error'
delete @messageCallbacks[id]
return true
_proxy: (event) =>
@client.on event, =>
debug 'proxy ' + event, _.first arguments
@emit event, arguments...
_uuidOrObject: (data) =>
return uuid: data if _.isString data
return data
module.exports = Meshblu
| true | _ = require 'lodash'
url = require 'url'
nodeUuid = require 'node-uuid'
{EventEmitter2} = require 'eventemitter2'
debug = require('debug')('meshblu-mqtt')
PROXY_EVENTS = ['close', 'error', 'reconnect', 'offline', 'pong', 'open', 'config', 'data']
class Meshblu extends EventEmitter2
constructor: (options={}, dependencies={})->
super wildcard: true
@mqtt = dependencies.mqtt ? require 'mqtt'
defaults =
keepalive: 10
protocolId: 'MQIsdp'
protocolVersion: 4
qos: 0
username: options.uuid
password: PI:PASSWORD:<PASSWORD>END_PI
reconnectPeriod: 5000
@options = _.defaults options, defaults
@messageCallbacks = {}
connect: (callback=->) =>
callback = _.once callback
uri = @_buildUri()
@client = @mqtt.connect uri, @options
@client.once 'connect', =>
response = _.pick @options, 'uuid', 'token'
@client.subscribe @options.uuid, qos: @options.qos
callback null, response
@client.once 'error', callback
@client.on 'message', @_messageHandler
_.each PROXY_EVENTS, (event) => @_proxy event
publish: (topic, data, fn=->) =>
throw new Error 'No Active Connection' unless @client?
if !data
dataString = {}
else if _.isString data
dataString = data
else
data.callbackId = nodeUuid.v1();
@messageCallbacks[data.callbackId] = fn;
dataString = JSON.stringify(data)
debug 'publish', topic, dataString
@client.publish topic, dataString
close: (callback) =>
@client.once 'close', callback
@client.end()
# API Functions
message: (params) =>
@publish 'message', params
subscribe: (params) =>
@client.subscribe params
unsubscribe: (params) =>
@client.unsubscribe params
update: (data, fn=->) =>
@publish 'update', data, fn
resetToken: (data, fn=->) =>
@publish 'resetToken', data, fn
getPublicKey: (data, fn=->) =>
@publish 'getPublicKey', data, fn
generateAndStoreToken: (data, fn=->) =>
@publish 'generateAndStoreToken', data, fn
whoami: (fn=->) =>
@publish 'whoami', {}, fn
# Private Functions
_buildUri: =>
defaults =
protocol: 'mqtt'
hostname: 'meshblu.octoblu.com'
port: 1883
uriOptions = _.defaults {}, @options, defaults
url.format uriOptions
_messageHandler: (uuid, message) =>
message = message.toString()
try
message = JSON.parse message
catch error
debug 'unable to parse message', message
debug '_messageHandler', message.topic, message.data
return if @handleCallbackResponse message
return @emit message.topic, message.data
handleCallbackResponse: (message) =>
id = message._request?.callbackId
return false unless id?
callback = @messageCallbacks[id] ? ->
callback message.data if message.topic == 'error'
callback null, message.data if message.topic != 'error'
delete @messageCallbacks[id]
return true
_proxy: (event) =>
@client.on event, =>
debug 'proxy ' + event, _.first arguments
@emit event, arguments...
_uuidOrObject: (data) =>
return uuid: data if _.isString data
return data
module.exports = Meshblu
|
[
{
"context": " switch req.data.identification\n when 'duplicate@gmail.com'\n `{\"response\": {\n ",
"end": 201,
"score": 0.999911904335022,
"start": 182,
"tag": "EMAIL",
"value": "duplicate@gmail.com"
},
{
"context": "\": \"false\"\n ... | models/user/fixture/email_uniqueness.coffee | signonsridhar/sridhar_hbs | 0 | define(['can_fixture'], (can)->
can.fixture('GET /bss/authentication?action=checkcredentialavailability', (req, resp)->
switch req.data.identification
when 'duplicate@gmail.com'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "false"
}}`
when 'unique@gmail.com'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
else
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
)
) | 152592 | define(['can_fixture'], (can)->
can.fixture('GET /bss/authentication?action=checkcredentialavailability', (req, resp)->
switch req.data.identification
when '<EMAIL>'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "false"
}}`
when '<EMAIL>'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
else
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
)
) | true | define(['can_fixture'], (can)->
can.fixture('GET /bss/authentication?action=checkcredentialavailability', (req, resp)->
switch req.data.identification
when 'PI:EMAIL:<EMAIL>END_PI'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "false"
}}`
when 'PI:EMAIL:<EMAIL>END_PI'
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
else
`{"response": {
"service": "checkcredentialavailability",
"response_code": 100,
"error_message": "Error",
"execution_time": 7,
"timestamp": "2013-10-04T21:01:17+0000",
"version": "1.0",
"response_data": "true"
}}`
)
) |
[
{
"context": "ss Fri Jan 16 08:02:30 EST 2015\"\n # gravatar: \"5b7145b0f10f7c09be842e9e4e58826d\"\n # timestamp: 1423667274803\n # username: \"",
"end": 506,
"score": 0.9396958351135254,
"start": 474,
"tag": "PASSWORD",
"value": "5b7145b0f10f7c09be842e9e4e58826d"
},
{
"cont... | app/scripts/views/modified-by-user-field-display.coffee | OpenSourceFieldlinguistics/dative | 7 | define [
'./field-display'
'./modified-by-user-representation-set'
], (FieldDisplayView, ModifiedByUserRepresentationSetView) ->
# Modified By User Field Display View
# -----------------------------------
#
# A view for displaying a FieldDB `modifiedByUser` array, i.e., an array of
# objects with `username` `timestamp`, `gravatar`, and `appVersion`
# attributes, e.g.,
#
# appVersion: "2.38.16.07.59ss Fri Jan 16 08:02:30 EST 2015"
# gravatar: "5b7145b0f10f7c09be842e9e4e58826d"
# timestamp: 1423667274803
# username: "jdoe"
#
# NOTE: @cesine: I ignore the first modifier object because it is different
# than the rest: it has no timestamp. I think it just redundantly records
# the enterer. Am I right about that?
class ModifiedByUserFieldDisplayView extends FieldDisplayView
getRepresentationView: ->
new ModifiedByUserRepresentationSetView @context
getContext: ->
_.extend(super, subattribute: 'username')
# If the `modifiedByUser` is an array with 1 or fewer elements, we don't
# display anything.
shouldBeHidden: ->
modifiersArray = @context.value or []
# if modifiersArray.length < 2 then true else false
if modifiersArray.length < 1 then true else false
| 77066 | define [
'./field-display'
'./modified-by-user-representation-set'
], (FieldDisplayView, ModifiedByUserRepresentationSetView) ->
# Modified By User Field Display View
# -----------------------------------
#
# A view for displaying a FieldDB `modifiedByUser` array, i.e., an array of
# objects with `username` `timestamp`, `gravatar`, and `appVersion`
# attributes, e.g.,
#
# appVersion: "2.38.16.07.59ss Fri Jan 16 08:02:30 EST 2015"
# gravatar: "<PASSWORD>"
# timestamp: 1423667274803
# username: "jdoe"
#
# NOTE: @cesine: I ignore the first modifier object because it is different
# than the rest: it has no timestamp. I think it just redundantly records
# the enterer. Am I right about that?
class ModifiedByUserFieldDisplayView extends FieldDisplayView
getRepresentationView: ->
new ModifiedByUserRepresentationSetView @context
getContext: ->
_.extend(super, subattribute: 'username')
# If the `modifiedByUser` is an array with 1 or fewer elements, we don't
# display anything.
shouldBeHidden: ->
modifiersArray = @context.value or []
# if modifiersArray.length < 2 then true else false
if modifiersArray.length < 1 then true else false
| true | define [
'./field-display'
'./modified-by-user-representation-set'
], (FieldDisplayView, ModifiedByUserRepresentationSetView) ->
# Modified By User Field Display View
# -----------------------------------
#
# A view for displaying a FieldDB `modifiedByUser` array, i.e., an array of
# objects with `username` `timestamp`, `gravatar`, and `appVersion`
# attributes, e.g.,
#
# appVersion: "2.38.16.07.59ss Fri Jan 16 08:02:30 EST 2015"
# gravatar: "PI:PASSWORD:<PASSWORD>END_PI"
# timestamp: 1423667274803
# username: "jdoe"
#
# NOTE: @cesine: I ignore the first modifier object because it is different
# than the rest: it has no timestamp. I think it just redundantly records
# the enterer. Am I right about that?
class ModifiedByUserFieldDisplayView extends FieldDisplayView
getRepresentationView: ->
new ModifiedByUserRepresentationSetView @context
getContext: ->
_.extend(super, subattribute: 'username')
# If the `modifiedByUser` is an array with 1 or fewer elements, we don't
# display anything.
shouldBeHidden: ->
modifiersArray = @context.value or []
# if modifiersArray.length < 2 then true else false
if modifiersArray.length < 1 then true else false
|
[
{
"context": "\n\n user = parser.username\n password = parser.password\n\n parser.username = parser.password = \"\"\n url =",
"end": 156,
"score": 0.6893993020057678,
"start": 148,
"tag": "PASSWORD",
"value": "password"
}
] | src/components/socket.coffee | UniversityRadioYork/webcast.js | 0 | Webcast.Socket = ({url, mime, info}) ->
parser = document.createElement "a"
parser.href = url
user = parser.username
password = parser.password
parser.username = parser.password = ""
url = parser.href
socket = new WebSocket url, "webcast"
socket.mime = mime
socket.info = info
hello =
mime: mime
if user? && user != ""
hello.user = socket.user = user
if password? && password != ""
hello.password = socket.password = password
for key, value of info
hello[key] = value
send = socket.send
socket.send = null
socket.addEventListener "open", ->
send.call socket, JSON.stringify(
type: "hello"
data: hello
)
# This method takes ArrayBuffer or any TypedArray
socket.sendData = (data) ->
return unless socket.isOpen()
return unless data?.length > 0
unless data instanceof ArrayBuffer
data = data.buffer.slice data.byteOffset, data.length*data.BYTES_PER_ELEMENT
send.call socket, data
socket.sendMetadata = (metadata) ->
return unless socket.isOpen()
send.call socket, JSON.stringify(
type: "metadata"
data: metadata
)
socket.isOpen = ->
socket.readyState == WebSocket.OPEN
socket
| 117373 | Webcast.Socket = ({url, mime, info}) ->
parser = document.createElement "a"
parser.href = url
user = parser.username
password = parser.<PASSWORD>
parser.username = parser.password = ""
url = parser.href
socket = new WebSocket url, "webcast"
socket.mime = mime
socket.info = info
hello =
mime: mime
if user? && user != ""
hello.user = socket.user = user
if password? && password != ""
hello.password = socket.password = password
for key, value of info
hello[key] = value
send = socket.send
socket.send = null
socket.addEventListener "open", ->
send.call socket, JSON.stringify(
type: "hello"
data: hello
)
# This method takes ArrayBuffer or any TypedArray
socket.sendData = (data) ->
return unless socket.isOpen()
return unless data?.length > 0
unless data instanceof ArrayBuffer
data = data.buffer.slice data.byteOffset, data.length*data.BYTES_PER_ELEMENT
send.call socket, data
socket.sendMetadata = (metadata) ->
return unless socket.isOpen()
send.call socket, JSON.stringify(
type: "metadata"
data: metadata
)
socket.isOpen = ->
socket.readyState == WebSocket.OPEN
socket
| true | Webcast.Socket = ({url, mime, info}) ->
parser = document.createElement "a"
parser.href = url
user = parser.username
password = parser.PI:PASSWORD:<PASSWORD>END_PI
parser.username = parser.password = ""
url = parser.href
socket = new WebSocket url, "webcast"
socket.mime = mime
socket.info = info
hello =
mime: mime
if user? && user != ""
hello.user = socket.user = user
if password? && password != ""
hello.password = socket.password = password
for key, value of info
hello[key] = value
send = socket.send
socket.send = null
socket.addEventListener "open", ->
send.call socket, JSON.stringify(
type: "hello"
data: hello
)
# This method takes ArrayBuffer or any TypedArray
socket.sendData = (data) ->
return unless socket.isOpen()
return unless data?.length > 0
unless data instanceof ArrayBuffer
data = data.buffer.slice data.byteOffset, data.length*data.BYTES_PER_ELEMENT
send.call socket, data
socket.sendMetadata = (metadata) ->
return unless socket.isOpen()
send.call socket, JSON.stringify(
type: "metadata"
data: metadata
)
socket.isOpen = ->
socket.readyState == WebSocket.OPEN
socket
|
[
{
"context": "# Copyright 2012 Joshua Carver \n# \n# Licensed under the Apache License, Versio",
"end": 30,
"score": 0.9998670816421509,
"start": 17,
"tag": "NAME",
"value": "Joshua Carver"
}
] | src/coffeescript/charts/line_chart.coffee | jcarver989/raphy-charts | 5 | # Copyright 2012 Joshua Carver
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# @import point.coffee
# @import bezier.coffee
# @import scaling.coffee
# @import tooltip.coffee
# @import dot.coffee
# @import line_chart_options.coffee
# @import line.coffee
# @import line_bar.coffee
# @import grid.coffee
# @import base_chart.coffee
class LineChart extends BaseChart
constructor: (dom_id, options = {}) ->
super dom_id, new LineChartOptions(options)
@padding = 26
@all_points = []
@line_indices = []
@line_options = []
add_line: (args) ->
data = args.data
return if data.length < 1
points = []
for item in data
if item.length == 3 # has options
points.push new Point(item[0], item[1], item[2])
else
points.push new Point(item[0], item[1])
points_count = @all_points.length
@line_indices.push [points_count, points_count + points.length-1]
@all_points.push.apply(@all_points, points)
@line_options.push LineChartOptions.merge(@options, args.options)
return
draw_grid: (x_coordinates = [], y_coordinates = []) ->
stroke = (path, color, width) ->
path.attr({
stroke: color
"stroke-width" : width
})
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
height = @height - @options.y_padding
width = @width - x_offset
paths = @r.set()
for val in x_coordinates
paths.push @r.path("M #{val}, #{@options.y_padding} L #{val}, #{height} Z")
for val in y_coordinates
paths.push @r.path("M #{@options.x_padding}, #{val} L #{width}, #{val} Z")
# color the axis for easier reading
if @options.multi_axis == true && @line_options.length == 2
left_side = @options.x_padding
left_stroke = @r.path("M #{left_side}, #{@options.y_padding} L #{left_side}, #{height} Z")
right_side = @width - @options.x_padding * 2
right_stroke = @r.path("M #{right_side}, #{@options.y_padding} L #{right_side}, #{height} Z")
stroke(left_stroke, @line_options[0].line_color, 2).toBack()
stroke(right_stroke, @line_options[1].line_color, 2).toBack()
# do this last to avoid overwriting the multi axis colors
stroke(paths, "#ddd", 1).toBack()
create_scalers: (points) ->
y = undefined
max_x = undefined
min_x = undefined
max_y = undefined
min_y = undefined
if @options.scale == 'log'
log = new LogScaler()
log_points = (new Point(p.x, log(p.y)) for p in points)
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(log_points)
else
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
x = new Scaler()
.domain([min_x, max_x])
.range([@options.x_padding, @width - x_offset])
y_scaler = new Scaler()
.domain([min_y, max_y])
.range([@options.y_padding, @height - @options.y_padding])
# top of chart is 0,0 so need to reflect y axis
linear = (i) => @height - y_scaler(i)
if @options.scale == 'log'
y = (i) -> linear(log(i))
else
y = linear
[x, y]
create_scalers_for_single_point: () ->
y = (i) => 0.5 * (@height - @options.y_padding)
x = (i) => 0.5 * (@width - @options.x_padding)
[x, y]
_draw_y_labels: (labels, x_offset = 0) ->
fmt = @options.label_format
size = @options.y_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
padding = size + 5
# How far from the left are the labels going to be?
offset = if @options.multi_axis && x_offset > 0 then x_offset else x_offset + padding
# If there's a label for this axis then they need to be a little more to the
# right to make space for the label.
if @options.y_axis_name
offset += (size * 1.75)
# Create the label
label_color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || size
label = new Label(
@r,
5,
@height / 2,
@options.y_axis_name,
fmt,
label_size,
font_family,
label_color
).draw()
# Rotate the label so you read it upwards - this will knock out the
# X position
label.transform("T0,0R270S1")
# Fix the X position by taking the bounding box's X position and
# translating to 0
label.transform("...t0," + (label.getBBox()['x'] * -1))
[x, y] = @create_scalers(labels)
label_coordinates = []
axis = new LabelSet(@r, fmt)
.x((i) -> offset)
.y((i) -> y(labels[i].y))
.size(size)
.color(color)
for label in labels
axis.draw(label.y)
label_coordinates.push y(label.y)
label_coordinates
calc_y_label_step_size: (min_y, max_y, max_labels = @options.max_y_labels) ->
step_size = (max_y - min_y)/(max_labels-1)
# round to nearest int
if max_y > 1
step_size = Math.round(step_size)
step_size = 1 if step_size == 0
step_size
draw_y_labels: (points, x_offset = 0) ->
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
# draw 1 label if all values are the same
return @_draw_y_labels([new Point(0, max_y)], x_offset) if max_y == min_y
labels = []
if @options.scale == 'log'
log = new LogScaler()
start = log(min_y)
end = log(max_y)
step_size = (end - start)/(@options.max_y_labels-1)
label = min_y
n = 0
while label <= max_y && n < @options.max_y_labels
label = Math.pow(10, start + step_size * n)
labels.push new Point(0, label)
n += 1
else
y = min_y
step_size = @calc_y_label_step_size(min_y, max_y)
while y <= max_y
labels.push new Point(0, y)
y += step_size
labels[labels.length-1].y = Math.round(max_y) if max_y > 1
return @_draw_y_labels(labels, x_offset)
draw_x_label: (raw_point, point) ->
fmt = @options.label_format
size = @options.x_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
# If the x label is named make room for it by nudging the labels a little
# higher towards the graph
if @options.x_axis_name
y = @height - (size * 2)
else
y = @height - size
label = if raw_point.is_date_type == true then new Date(raw_point.x) else Math.round(raw_point.x)
new Label(
@r,
point.x,
y,
label,
fmt,
size,
font_family,
color
).draw()
draw_x_labels: (raw_points, points) ->
label_coordinates = []
max_labels = @options.max_x_labels
# Is this axis named? If so, add the label name to the axis
if @options.x_axis_name
color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || @options.x_label_size
label = new Label(
@r,
(@width / 2),
@height - (@options.x_label_size / 2),
@options.x_axis_name,
@options.label_format,
label_size,
@options.font_family,
color
).draw()
# draw first
@draw_x_label(raw_points[0], points[0])
label_coordinates.push points[0].x
return if max_labels < 2
# draw last
last = points.length-1
@draw_x_label(raw_points[last], points[last])
label_coordinates.push points[last].x
return if max_labels < 3
len = points.length-1
step_size = len / (max_labels-1)
# when irrational
rounded_step_size = Math.round(step_size)
step_size = rounded_step_size+1 if step_size != rounded_step_size
# draw labels in between first and last
i = step_size
while i < len
raw_point = raw_points[i]
point = points[i]
@draw_x_label(raw_point, point)
label_coordinates.push point.x
i += step_size
label_coordinates
draw_line: (raw_points, points, options) ->
if @options.render == "bar"
new LineBar(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
else
new Line(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
clear: () ->
super()
@all_points = []
@line_indices = []
@line_options = []
draw: () ->
return if @all_points.length < 1
@r.clear()
[x, y] = if @all_points.length > 1 then @create_scalers(@all_points) else @create_scalers_for_single_point()
for line_indices, i in @line_indices
[begin, end] = line_indices
raw_points = @all_points[begin..end]
# scale points on their own axis if multi axis is set
if @options.multi_axis
[line_x, line_y] = if @all_points.length > 2 then @create_scalers(raw_points) else @create_scalers_for_single_point()
else
line_x = x
line_y = y
points = (new Point(line_x(point.x), line_y(point.y)) for point in raw_points)
options = @line_options[i]
@draw_line(raw_points, points, options)
if i == 0
@x_label_coordinates = @draw_x_labels(raw_points, points) if @options.show_x_labels == true
if @options.multi_axis && @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(raw_points)
else if @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(@all_points)
@draw_grid(@x_label_coordinates, @y_label_coordinates) if @options.show_grid == true
else if i == 1 && @options.multi_axis
@draw_y_labels(raw_points, @width - @options.x_padding) if @options.show_y_labels == true
return
exports.LineChart = LineChart
| 215462 | # Copyright 2012 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# @import point.coffee
# @import bezier.coffee
# @import scaling.coffee
# @import tooltip.coffee
# @import dot.coffee
# @import line_chart_options.coffee
# @import line.coffee
# @import line_bar.coffee
# @import grid.coffee
# @import base_chart.coffee
class LineChart extends BaseChart
constructor: (dom_id, options = {}) ->
super dom_id, new LineChartOptions(options)
@padding = 26
@all_points = []
@line_indices = []
@line_options = []
add_line: (args) ->
data = args.data
return if data.length < 1
points = []
for item in data
if item.length == 3 # has options
points.push new Point(item[0], item[1], item[2])
else
points.push new Point(item[0], item[1])
points_count = @all_points.length
@line_indices.push [points_count, points_count + points.length-1]
@all_points.push.apply(@all_points, points)
@line_options.push LineChartOptions.merge(@options, args.options)
return
draw_grid: (x_coordinates = [], y_coordinates = []) ->
stroke = (path, color, width) ->
path.attr({
stroke: color
"stroke-width" : width
})
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
height = @height - @options.y_padding
width = @width - x_offset
paths = @r.set()
for val in x_coordinates
paths.push @r.path("M #{val}, #{@options.y_padding} L #{val}, #{height} Z")
for val in y_coordinates
paths.push @r.path("M #{@options.x_padding}, #{val} L #{width}, #{val} Z")
# color the axis for easier reading
if @options.multi_axis == true && @line_options.length == 2
left_side = @options.x_padding
left_stroke = @r.path("M #{left_side}, #{@options.y_padding} L #{left_side}, #{height} Z")
right_side = @width - @options.x_padding * 2
right_stroke = @r.path("M #{right_side}, #{@options.y_padding} L #{right_side}, #{height} Z")
stroke(left_stroke, @line_options[0].line_color, 2).toBack()
stroke(right_stroke, @line_options[1].line_color, 2).toBack()
# do this last to avoid overwriting the multi axis colors
stroke(paths, "#ddd", 1).toBack()
create_scalers: (points) ->
y = undefined
max_x = undefined
min_x = undefined
max_y = undefined
min_y = undefined
if @options.scale == 'log'
log = new LogScaler()
log_points = (new Point(p.x, log(p.y)) for p in points)
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(log_points)
else
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
x = new Scaler()
.domain([min_x, max_x])
.range([@options.x_padding, @width - x_offset])
y_scaler = new Scaler()
.domain([min_y, max_y])
.range([@options.y_padding, @height - @options.y_padding])
# top of chart is 0,0 so need to reflect y axis
linear = (i) => @height - y_scaler(i)
if @options.scale == 'log'
y = (i) -> linear(log(i))
else
y = linear
[x, y]
create_scalers_for_single_point: () ->
y = (i) => 0.5 * (@height - @options.y_padding)
x = (i) => 0.5 * (@width - @options.x_padding)
[x, y]
_draw_y_labels: (labels, x_offset = 0) ->
fmt = @options.label_format
size = @options.y_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
padding = size + 5
# How far from the left are the labels going to be?
offset = if @options.multi_axis && x_offset > 0 then x_offset else x_offset + padding
# If there's a label for this axis then they need to be a little more to the
# right to make space for the label.
if @options.y_axis_name
offset += (size * 1.75)
# Create the label
label_color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || size
label = new Label(
@r,
5,
@height / 2,
@options.y_axis_name,
fmt,
label_size,
font_family,
label_color
).draw()
# Rotate the label so you read it upwards - this will knock out the
# X position
label.transform("T0,0R270S1")
# Fix the X position by taking the bounding box's X position and
# translating to 0
label.transform("...t0," + (label.getBBox()['x'] * -1))
[x, y] = @create_scalers(labels)
label_coordinates = []
axis = new LabelSet(@r, fmt)
.x((i) -> offset)
.y((i) -> y(labels[i].y))
.size(size)
.color(color)
for label in labels
axis.draw(label.y)
label_coordinates.push y(label.y)
label_coordinates
calc_y_label_step_size: (min_y, max_y, max_labels = @options.max_y_labels) ->
step_size = (max_y - min_y)/(max_labels-1)
# round to nearest int
if max_y > 1
step_size = Math.round(step_size)
step_size = 1 if step_size == 0
step_size
draw_y_labels: (points, x_offset = 0) ->
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
# draw 1 label if all values are the same
return @_draw_y_labels([new Point(0, max_y)], x_offset) if max_y == min_y
labels = []
if @options.scale == 'log'
log = new LogScaler()
start = log(min_y)
end = log(max_y)
step_size = (end - start)/(@options.max_y_labels-1)
label = min_y
n = 0
while label <= max_y && n < @options.max_y_labels
label = Math.pow(10, start + step_size * n)
labels.push new Point(0, label)
n += 1
else
y = min_y
step_size = @calc_y_label_step_size(min_y, max_y)
while y <= max_y
labels.push new Point(0, y)
y += step_size
labels[labels.length-1].y = Math.round(max_y) if max_y > 1
return @_draw_y_labels(labels, x_offset)
draw_x_label: (raw_point, point) ->
fmt = @options.label_format
size = @options.x_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
# If the x label is named make room for it by nudging the labels a little
# higher towards the graph
if @options.x_axis_name
y = @height - (size * 2)
else
y = @height - size
label = if raw_point.is_date_type == true then new Date(raw_point.x) else Math.round(raw_point.x)
new Label(
@r,
point.x,
y,
label,
fmt,
size,
font_family,
color
).draw()
draw_x_labels: (raw_points, points) ->
label_coordinates = []
max_labels = @options.max_x_labels
# Is this axis named? If so, add the label name to the axis
if @options.x_axis_name
color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || @options.x_label_size
label = new Label(
@r,
(@width / 2),
@height - (@options.x_label_size / 2),
@options.x_axis_name,
@options.label_format,
label_size,
@options.font_family,
color
).draw()
# draw first
@draw_x_label(raw_points[0], points[0])
label_coordinates.push points[0].x
return if max_labels < 2
# draw last
last = points.length-1
@draw_x_label(raw_points[last], points[last])
label_coordinates.push points[last].x
return if max_labels < 3
len = points.length-1
step_size = len / (max_labels-1)
# when irrational
rounded_step_size = Math.round(step_size)
step_size = rounded_step_size+1 if step_size != rounded_step_size
# draw labels in between first and last
i = step_size
while i < len
raw_point = raw_points[i]
point = points[i]
@draw_x_label(raw_point, point)
label_coordinates.push point.x
i += step_size
label_coordinates
draw_line: (raw_points, points, options) ->
if @options.render == "bar"
new LineBar(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
else
new Line(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
clear: () ->
super()
@all_points = []
@line_indices = []
@line_options = []
draw: () ->
return if @all_points.length < 1
@r.clear()
[x, y] = if @all_points.length > 1 then @create_scalers(@all_points) else @create_scalers_for_single_point()
for line_indices, i in @line_indices
[begin, end] = line_indices
raw_points = @all_points[begin..end]
# scale points on their own axis if multi axis is set
if @options.multi_axis
[line_x, line_y] = if @all_points.length > 2 then @create_scalers(raw_points) else @create_scalers_for_single_point()
else
line_x = x
line_y = y
points = (new Point(line_x(point.x), line_y(point.y)) for point in raw_points)
options = @line_options[i]
@draw_line(raw_points, points, options)
if i == 0
@x_label_coordinates = @draw_x_labels(raw_points, points) if @options.show_x_labels == true
if @options.multi_axis && @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(raw_points)
else if @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(@all_points)
@draw_grid(@x_label_coordinates, @y_label_coordinates) if @options.show_grid == true
else if i == 1 && @options.multi_axis
@draw_y_labels(raw_points, @width - @options.x_padding) if @options.show_y_labels == true
return
exports.LineChart = LineChart
| true | # Copyright 2012 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# @import point.coffee
# @import bezier.coffee
# @import scaling.coffee
# @import tooltip.coffee
# @import dot.coffee
# @import line_chart_options.coffee
# @import line.coffee
# @import line_bar.coffee
# @import grid.coffee
# @import base_chart.coffee
class LineChart extends BaseChart
constructor: (dom_id, options = {}) ->
super dom_id, new LineChartOptions(options)
@padding = 26
@all_points = []
@line_indices = []
@line_options = []
add_line: (args) ->
data = args.data
return if data.length < 1
points = []
for item in data
if item.length == 3 # has options
points.push new Point(item[0], item[1], item[2])
else
points.push new Point(item[0], item[1])
points_count = @all_points.length
@line_indices.push [points_count, points_count + points.length-1]
@all_points.push.apply(@all_points, points)
@line_options.push LineChartOptions.merge(@options, args.options)
return
draw_grid: (x_coordinates = [], y_coordinates = []) ->
stroke = (path, color, width) ->
path.attr({
stroke: color
"stroke-width" : width
})
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
height = @height - @options.y_padding
width = @width - x_offset
paths = @r.set()
for val in x_coordinates
paths.push @r.path("M #{val}, #{@options.y_padding} L #{val}, #{height} Z")
for val in y_coordinates
paths.push @r.path("M #{@options.x_padding}, #{val} L #{width}, #{val} Z")
# color the axis for easier reading
if @options.multi_axis == true && @line_options.length == 2
left_side = @options.x_padding
left_stroke = @r.path("M #{left_side}, #{@options.y_padding} L #{left_side}, #{height} Z")
right_side = @width - @options.x_padding * 2
right_stroke = @r.path("M #{right_side}, #{@options.y_padding} L #{right_side}, #{height} Z")
stroke(left_stroke, @line_options[0].line_color, 2).toBack()
stroke(right_stroke, @line_options[1].line_color, 2).toBack()
# do this last to avoid overwriting the multi axis colors
stroke(paths, "#ddd", 1).toBack()
create_scalers: (points) ->
y = undefined
max_x = undefined
min_x = undefined
max_y = undefined
min_y = undefined
if @options.scale == 'log'
log = new LogScaler()
log_points = (new Point(p.x, log(p.y)) for p in points)
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(log_points)
else
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
x_offset = if @options.multi_axis then @options.x_padding * 2 else @options.x_padding
x = new Scaler()
.domain([min_x, max_x])
.range([@options.x_padding, @width - x_offset])
y_scaler = new Scaler()
.domain([min_y, max_y])
.range([@options.y_padding, @height - @options.y_padding])
# top of chart is 0,0 so need to reflect y axis
linear = (i) => @height - y_scaler(i)
if @options.scale == 'log'
y = (i) -> linear(log(i))
else
y = linear
[x, y]
create_scalers_for_single_point: () ->
y = (i) => 0.5 * (@height - @options.y_padding)
x = (i) => 0.5 * (@width - @options.x_padding)
[x, y]
_draw_y_labels: (labels, x_offset = 0) ->
fmt = @options.label_format
size = @options.y_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
padding = size + 5
# How far from the left are the labels going to be?
offset = if @options.multi_axis && x_offset > 0 then x_offset else x_offset + padding
# If there's a label for this axis then they need to be a little more to the
# right to make space for the label.
if @options.y_axis_name
offset += (size * 1.75)
# Create the label
label_color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || size
label = new Label(
@r,
5,
@height / 2,
@options.y_axis_name,
fmt,
label_size,
font_family,
label_color
).draw()
# Rotate the label so you read it upwards - this will knock out the
# X position
label.transform("T0,0R270S1")
# Fix the X position by taking the bounding box's X position and
# translating to 0
label.transform("...t0," + (label.getBBox()['x'] * -1))
[x, y] = @create_scalers(labels)
label_coordinates = []
axis = new LabelSet(@r, fmt)
.x((i) -> offset)
.y((i) -> y(labels[i].y))
.size(size)
.color(color)
for label in labels
axis.draw(label.y)
label_coordinates.push y(label.y)
label_coordinates
calc_y_label_step_size: (min_y, max_y, max_labels = @options.max_y_labels) ->
step_size = (max_y - min_y)/(max_labels-1)
# round to nearest int
if max_y > 1
step_size = Math.round(step_size)
step_size = 1 if step_size == 0
step_size
draw_y_labels: (points, x_offset = 0) ->
[max_x, min_x, max_y, min_y] = Scaling.get_ranges_for_points(points)
if @options.y_axis_scale.length == 2
[min_y, max_y] = @options.y_axis_scale
# draw 1 label if all values are the same
return @_draw_y_labels([new Point(0, max_y)], x_offset) if max_y == min_y
labels = []
if @options.scale == 'log'
log = new LogScaler()
start = log(min_y)
end = log(max_y)
step_size = (end - start)/(@options.max_y_labels-1)
label = min_y
n = 0
while label <= max_y && n < @options.max_y_labels
label = Math.pow(10, start + step_size * n)
labels.push new Point(0, label)
n += 1
else
y = min_y
step_size = @calc_y_label_step_size(min_y, max_y)
while y <= max_y
labels.push new Point(0, y)
y += step_size
labels[labels.length-1].y = Math.round(max_y) if max_y > 1
return @_draw_y_labels(labels, x_offset)
draw_x_label: (raw_point, point) ->
fmt = @options.label_format
size = @options.x_label_size
font_family = @options.font_family
color = @options.label_color || '#333'
# If the x label is named make room for it by nudging the labels a little
# higher towards the graph
if @options.x_axis_name
y = @height - (size * 2)
else
y = @height - size
label = if raw_point.is_date_type == true then new Date(raw_point.x) else Math.round(raw_point.x)
new Label(
@r,
point.x,
y,
label,
fmt,
size,
font_family,
color
).draw()
draw_x_labels: (raw_points, points) ->
label_coordinates = []
max_labels = @options.max_x_labels
# Is this axis named? If so, add the label name to the axis
if @options.x_axis_name
color = @options.axis_name_color || '#333'
label_size = @options.axis_name_size || @options.x_label_size
label = new Label(
@r,
(@width / 2),
@height - (@options.x_label_size / 2),
@options.x_axis_name,
@options.label_format,
label_size,
@options.font_family,
color
).draw()
# draw first
@draw_x_label(raw_points[0], points[0])
label_coordinates.push points[0].x
return if max_labels < 2
# draw last
last = points.length-1
@draw_x_label(raw_points[last], points[last])
label_coordinates.push points[last].x
return if max_labels < 3
len = points.length-1
step_size = len / (max_labels-1)
# when irrational
rounded_step_size = Math.round(step_size)
step_size = rounded_step_size+1 if step_size != rounded_step_size
# draw labels in between first and last
i = step_size
while i < len
raw_point = raw_points[i]
point = points[i]
@draw_x_label(raw_point, point)
label_coordinates.push point.x
i += step_size
label_coordinates
draw_line: (raw_points, points, options) ->
if @options.render == "bar"
new LineBar(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
else
new Line(
@r,
raw_points,
points,
@height,
@width,
options
).draw()
clear: () ->
super()
@all_points = []
@line_indices = []
@line_options = []
draw: () ->
return if @all_points.length < 1
@r.clear()
[x, y] = if @all_points.length > 1 then @create_scalers(@all_points) else @create_scalers_for_single_point()
for line_indices, i in @line_indices
[begin, end] = line_indices
raw_points = @all_points[begin..end]
# scale points on their own axis if multi axis is set
if @options.multi_axis
[line_x, line_y] = if @all_points.length > 2 then @create_scalers(raw_points) else @create_scalers_for_single_point()
else
line_x = x
line_y = y
points = (new Point(line_x(point.x), line_y(point.y)) for point in raw_points)
options = @line_options[i]
@draw_line(raw_points, points, options)
if i == 0
@x_label_coordinates = @draw_x_labels(raw_points, points) if @options.show_x_labels == true
if @options.multi_axis && @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(raw_points)
else if @options.show_y_labels == true
@y_label_coordinates = @draw_y_labels(@all_points)
@draw_grid(@x_label_coordinates, @y_label_coordinates) if @options.show_grid == true
else if i == 1 && @options.multi_axis
@draw_y_labels(raw_points, @width - @options.x_padding) if @options.show_y_labels == true
return
exports.LineChart = LineChart
|
[
{
"context": "text onto HTML canvas elements\n\nWritten in 2013 by Karl Naylor <kpn103@yahoo.com>\n\nTo the extent possible under ",
"end": 106,
"score": 0.9998908638954163,
"start": 95,
"tag": "NAME",
"value": "Karl Naylor"
},
{
"context": " canvas elements\n\nWritten in 2013 by Kar... | src/content/coffee/handywriteOnCanvas/graphemes/misc.coffee | karlorg/phonetify | 1 | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by Karl Naylor <kpn103@yahoo.com>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
lWidth = 1
iRadius = eRadius = 0.05
graphemes.classes.i = class I extends Grapheme
constructor: ->
super()
@_entryAngle = @_exitAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true
return obj
_withinHalfTurnOf: (angle, target) ->
halfTurn = TAU * 0.5
angle += TAU while angle < target - halfTurn
angle -= TAU while angle >= target + halfTurn
return angle
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# and leave us. If one or both neighbours are missing or
# undecided, we will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push(@left.getExitAngle())
if @right and not fromRight
@right.decide(force, true, false)
angles.push(@right.getEntryAngle())
switch angles.length
when 0
@_entryAngle = @_exitAngle = 0
@_determined = true
return
when 1
@_entryAngle = @_exitAngle = angles[0]
@_determined = true
return
when 2
angles[1] = @_withinHalfTurnOf(angles[1], angles[0])
# if angle between a0 and a1 is obtuse, flip a1 to make it acute
theta = Math.min(
Math.abs(angles[1] - angles[0]),
Math.abs(angles[0] - angles[1]))
if theta > TAU / 4
angles[1] = @_withinHalfTurnOf(angles[1] + TAU / 2, angles[0])
# average the two angles to get our entry/exit angle
@_entryAngle = @_exitAngle = (angles[0] + angles[1]) * 0.5
@_determined = true
return
getBoundingBox: ->
r = iRadius
theta = @_entryAngle
sinTheta = Math.sin(theta)
cosTheta = Math.cos(theta)
boxC = new boxes.BoundingBox(
- r - r * sinTheta,
r * cosTheta - r,
r - r * sinTheta,
r * cosTheta + r )
boxCc = new boxes.BoundingBox(
- r - r * sinTheta,
- r * cosTheta - r,
r - r * sinTheta,
r * cosTheta - r )
return boxes.combineBoundingBoxes [boxC, boxCc]
getEntryAngle: -> @_entryAngle
getExitAngle: -> @_exitAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
ctx.save()
ctx.rotate(@_entryAngle)
ctx.beginPath()
ctx.arc(
0, iRadius,
iRadius,
0, TAU,
0) # anticlockwise
ctx.stroke()
ctx.beginPath()
ctx.arc(
0, - iRadius,
iRadius,
0, TAU,
1) # anticlockwise
ctx.stroke()
ctx.restore()
return
graphemes.classes.aw = class AW extends Grapheme
constructor: ->
super()
@_midlineAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true # for now, OO is considered circle-like
return obj
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# us. If one or both neighbours are missing or undecided, we
# will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push @left.getExitAngle()
if @right and not fromRight
@right.decide(force, true, false)
angles.push @right.getEntryAngle() + TAU / 2
switch angles.length
when 0
@_midlineAngle = TAU / 4
@_determined = true
return
when 1
@_midlineAngle = angles[0] - TAU / 4
@_determined = true
return
when 2
points = (geometry.vectorFromAngle(angle).p1 for angle in angles)
midlineVector = new geometry.Vector(
{ x: 0, y: 0 },
geometry.pointSum(points)
)
@_midlineAngle = midlineVector.angle()
@_determined = true
return
_getControlPoints: ->
return [
{ x: 0, y: 0 }
geometry.rotatePoint(
{ x: lWidth / 3, y: - (lWidth / 6) }, @_midlineAngle)
geometry.rotatePoint(
{ x: lWidth / 3, y: (lWidth / 6) }, @_midlineAngle)
{ x: 0, y: 0 }
]
getBoundingBox: ->
return boxes.boxFromPoints(@_getControlPoints())
getEntryAngle: -> @_midLineAngle
getExitAngle: -> @_midLineAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
cp = @_getControlPoints()
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
cp[1].x, cp[1].y,
cp[2].x, cp[2].y,
cp[3].x, cp[3].y)
ctx.stroke()
return
return graphemes
| 4855 | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by <NAME> <<EMAIL>>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
lWidth = 1
iRadius = eRadius = 0.05
graphemes.classes.i = class I extends Grapheme
constructor: ->
super()
@_entryAngle = @_exitAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true
return obj
_withinHalfTurnOf: (angle, target) ->
halfTurn = TAU * 0.5
angle += TAU while angle < target - halfTurn
angle -= TAU while angle >= target + halfTurn
return angle
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# and leave us. If one or both neighbours are missing or
# undecided, we will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push(@left.getExitAngle())
if @right and not fromRight
@right.decide(force, true, false)
angles.push(@right.getEntryAngle())
switch angles.length
when 0
@_entryAngle = @_exitAngle = 0
@_determined = true
return
when 1
@_entryAngle = @_exitAngle = angles[0]
@_determined = true
return
when 2
angles[1] = @_withinHalfTurnOf(angles[1], angles[0])
# if angle between a0 and a1 is obtuse, flip a1 to make it acute
theta = Math.min(
Math.abs(angles[1] - angles[0]),
Math.abs(angles[0] - angles[1]))
if theta > TAU / 4
angles[1] = @_withinHalfTurnOf(angles[1] + TAU / 2, angles[0])
# average the two angles to get our entry/exit angle
@_entryAngle = @_exitAngle = (angles[0] + angles[1]) * 0.5
@_determined = true
return
getBoundingBox: ->
r = iRadius
theta = @_entryAngle
sinTheta = Math.sin(theta)
cosTheta = Math.cos(theta)
boxC = new boxes.BoundingBox(
- r - r * sinTheta,
r * cosTheta - r,
r - r * sinTheta,
r * cosTheta + r )
boxCc = new boxes.BoundingBox(
- r - r * sinTheta,
- r * cosTheta - r,
r - r * sinTheta,
r * cosTheta - r )
return boxes.combineBoundingBoxes [boxC, boxCc]
getEntryAngle: -> @_entryAngle
getExitAngle: -> @_exitAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
ctx.save()
ctx.rotate(@_entryAngle)
ctx.beginPath()
ctx.arc(
0, iRadius,
iRadius,
0, TAU,
0) # anticlockwise
ctx.stroke()
ctx.beginPath()
ctx.arc(
0, - iRadius,
iRadius,
0, TAU,
1) # anticlockwise
ctx.stroke()
ctx.restore()
return
graphemes.classes.aw = class AW extends Grapheme
constructor: ->
super()
@_midlineAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true # for now, OO is considered circle-like
return obj
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# us. If one or both neighbours are missing or undecided, we
# will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push @left.getExitAngle()
if @right and not fromRight
@right.decide(force, true, false)
angles.push @right.getEntryAngle() + TAU / 2
switch angles.length
when 0
@_midlineAngle = TAU / 4
@_determined = true
return
when 1
@_midlineAngle = angles[0] - TAU / 4
@_determined = true
return
when 2
points = (geometry.vectorFromAngle(angle).p1 for angle in angles)
midlineVector = new geometry.Vector(
{ x: 0, y: 0 },
geometry.pointSum(points)
)
@_midlineAngle = midlineVector.angle()
@_determined = true
return
_getControlPoints: ->
return [
{ x: 0, y: 0 }
geometry.rotatePoint(
{ x: lWidth / 3, y: - (lWidth / 6) }, @_midlineAngle)
geometry.rotatePoint(
{ x: lWidth / 3, y: (lWidth / 6) }, @_midlineAngle)
{ x: 0, y: 0 }
]
getBoundingBox: ->
return boxes.boxFromPoints(@_getControlPoints())
getEntryAngle: -> @_midLineAngle
getExitAngle: -> @_midLineAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
cp = @_getControlPoints()
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
cp[1].x, cp[1].y,
cp[2].x, cp[2].y,
cp[3].x, cp[3].y)
ctx.stroke()
return
return graphemes
| true | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
lWidth = 1
iRadius = eRadius = 0.05
graphemes.classes.i = class I extends Grapheme
constructor: ->
super()
@_entryAngle = @_exitAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true
return obj
_withinHalfTurnOf: (angle, target) ->
halfTurn = TAU * 0.5
angle += TAU while angle < target - halfTurn
angle -= TAU while angle >= target + halfTurn
return angle
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# and leave us. If one or both neighbours are missing or
# undecided, we will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push(@left.getExitAngle())
if @right and not fromRight
@right.decide(force, true, false)
angles.push(@right.getEntryAngle())
switch angles.length
when 0
@_entryAngle = @_exitAngle = 0
@_determined = true
return
when 1
@_entryAngle = @_exitAngle = angles[0]
@_determined = true
return
when 2
angles[1] = @_withinHalfTurnOf(angles[1], angles[0])
# if angle between a0 and a1 is obtuse, flip a1 to make it acute
theta = Math.min(
Math.abs(angles[1] - angles[0]),
Math.abs(angles[0] - angles[1]))
if theta > TAU / 4
angles[1] = @_withinHalfTurnOf(angles[1] + TAU / 2, angles[0])
# average the two angles to get our entry/exit angle
@_entryAngle = @_exitAngle = (angles[0] + angles[1]) * 0.5
@_determined = true
return
getBoundingBox: ->
r = iRadius
theta = @_entryAngle
sinTheta = Math.sin(theta)
cosTheta = Math.cos(theta)
boxC = new boxes.BoundingBox(
- r - r * sinTheta,
r * cosTheta - r,
r - r * sinTheta,
r * cosTheta + r )
boxCc = new boxes.BoundingBox(
- r - r * sinTheta,
- r * cosTheta - r,
r - r * sinTheta,
r * cosTheta - r )
return boxes.combineBoundingBoxes [boxC, boxCc]
getEntryAngle: -> @_entryAngle
getExitAngle: -> @_exitAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
ctx.save()
ctx.rotate(@_entryAngle)
ctx.beginPath()
ctx.arc(
0, iRadius,
iRadius,
0, TAU,
0) # anticlockwise
ctx.stroke()
ctx.beginPath()
ctx.arc(
0, - iRadius,
iRadius,
0, TAU,
1) # anticlockwise
ctx.stroke()
ctx.restore()
return
graphemes.classes.aw = class AW extends Grapheme
constructor: ->
super()
@_midlineAngle = 0
@_determined = false
return
getKeywords: ->
obj = super()
obj.circle = true # for now, OO is considered circle-like
return obj
decide: (force=false, fromLeft=false, fromRight=false) ->
return if @_determined and not force
# `angles` will contain the angles at which our neighbours enter
# us. If one or both neighbours are missing or undecided, we
# will have less than two angles.
angles = []
if @left and not fromLeft
@left.decide(force, false, true)
angles.push @left.getExitAngle()
if @right and not fromRight
@right.decide(force, true, false)
angles.push @right.getEntryAngle() + TAU / 2
switch angles.length
when 0
@_midlineAngle = TAU / 4
@_determined = true
return
when 1
@_midlineAngle = angles[0] - TAU / 4
@_determined = true
return
when 2
points = (geometry.vectorFromAngle(angle).p1 for angle in angles)
midlineVector = new geometry.Vector(
{ x: 0, y: 0 },
geometry.pointSum(points)
)
@_midlineAngle = midlineVector.angle()
@_determined = true
return
_getControlPoints: ->
return [
{ x: 0, y: 0 }
geometry.rotatePoint(
{ x: lWidth / 3, y: - (lWidth / 6) }, @_midlineAngle)
geometry.rotatePoint(
{ x: lWidth / 3, y: (lWidth / 6) }, @_midlineAngle)
{ x: 0, y: 0 }
]
getBoundingBox: ->
return boxes.boxFromPoints(@_getControlPoints())
getEntryAngle: -> @_midLineAngle
getExitAngle: -> @_midLineAngle
getFinishPoint: -> { x: 0, y: 0 }
render: (ctx) ->
cp = @_getControlPoints()
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
cp[1].x, cp[1].y,
cp[2].x, cp[2].y,
cp[3].x, cp[3].y)
ctx.stroke()
return
return graphemes
|
[
{
"context": "###\nThe MIT License\n\nCopyright (c) 2015 Juan Cruz Viotti. https://jviotti.github.io.\n\nPermission is hereby",
"end": 56,
"score": 0.9998037815093994,
"start": 40,
"tag": "NAME",
"value": "Juan Cruz Viotti"
}
] | lib/unit.coffee | jviotti/wary | 1 | ###
The MIT License
Copyright (c) 2015 Juan Cruz Viotti. https://jviotti.github.io.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs-extra'))
_ = require('lodash')
utils = require('./utils')
temporal = require('./temporal')
###*
# @summary Run a test case unit
# @function
# @protected
#
# @param {String} name - unit test name
# @param {Object} images - images hash
# @param {Function} action - test action
#
# @example
# unit.run 'a simple test',
# lorem: 'path/to/lorem.txt'
# , (files) ->
# fs.readFileAsync(files.lorem, encoding: 'utf8')
###
exports.run = (name, files, action) ->
utils.promiseMapValues(files, temporal.fromFile).then (temporals) ->
Promise.try ->
action(temporals)
.finally ->
unlink = _.map _.values(temporals), (file) ->
return fs.unlinkAsync(file)
return Promise.settle(unlink)
| 103869 | ###
The MIT License
Copyright (c) 2015 <NAME>. https://jviotti.github.io.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs-extra'))
_ = require('lodash')
utils = require('./utils')
temporal = require('./temporal')
###*
# @summary Run a test case unit
# @function
# @protected
#
# @param {String} name - unit test name
# @param {Object} images - images hash
# @param {Function} action - test action
#
# @example
# unit.run 'a simple test',
# lorem: 'path/to/lorem.txt'
# , (files) ->
# fs.readFileAsync(files.lorem, encoding: 'utf8')
###
exports.run = (name, files, action) ->
utils.promiseMapValues(files, temporal.fromFile).then (temporals) ->
Promise.try ->
action(temporals)
.finally ->
unlink = _.map _.values(temporals), (file) ->
return fs.unlinkAsync(file)
return Promise.settle(unlink)
| true | ###
The MIT License
Copyright (c) 2015 PI:NAME:<NAME>END_PI. https://jviotti.github.io.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
Promise = require('bluebird')
fs = Promise.promisifyAll(require('fs-extra'))
_ = require('lodash')
utils = require('./utils')
temporal = require('./temporal')
###*
# @summary Run a test case unit
# @function
# @protected
#
# @param {String} name - unit test name
# @param {Object} images - images hash
# @param {Function} action - test action
#
# @example
# unit.run 'a simple test',
# lorem: 'path/to/lorem.txt'
# , (files) ->
# fs.readFileAsync(files.lorem, encoding: 'utf8')
###
exports.run = (name, files, action) ->
utils.promiseMapValues(files, temporal.fromFile).then (temporals) ->
Promise.try ->
action(temporals)
.finally ->
unlink = _.map _.values(temporals), (file) ->
return fs.unlinkAsync(file)
return Promise.settle(unlink)
|
[
{
"context": " Test\n constructor: ->\n @obj = { name: \"Nathan\" }\n\n append: ->\n Template(@obj).appendT",
"end": 96,
"score": 0.9996191263198853,
"start": 90,
"tag": "NAME",
"value": "Nathan"
}
] | Source/Stitch.Tests.Web/scripts/app/controllers/test.coffee | nathanpalmer/stitch-aspnet | 0 | Template = require("views/view")
class Test
constructor: ->
@obj = { name: "Nathan" }
append: ->
Template(@obj).appendTo('#content')
module.exports = Test | 3105 | Template = require("views/view")
class Test
constructor: ->
@obj = { name: "<NAME>" }
append: ->
Template(@obj).appendTo('#content')
module.exports = Test | true | Template = require("views/view")
class Test
constructor: ->
@obj = { name: "PI:NAME:<NAME>END_PI" }
append: ->
Template(@obj).appendTo('#content')
module.exports = Test |
[
{
"context": "me', ->\n element(By.model('name')).sendKeys 'William'\n expect(element(By.css('.hello')).getText()",
"end": 203,
"score": 0.999394416809082,
"start": 196,
"tag": "NAME",
"value": "William"
},
{
"context": "lement(By.css('.hello')).getText()).toEqual 'Hello Wi... | test/e2e/index.coffee | githistory/web-widget | 0 | describe 'index page', ->
describe 'hello widget', ->
beforeEach ->
browser.get '/'
it 'should say hello to user with correct name', ->
element(By.model('name')).sendKeys 'William'
expect(element(By.css('.hello')).getText()).toEqual 'Hello William'
| 194298 | describe 'index page', ->
describe 'hello widget', ->
beforeEach ->
browser.get '/'
it 'should say hello to user with correct name', ->
element(By.model('name')).sendKeys '<NAME>'
expect(element(By.css('.hello')).getText()).toEqual 'Hello <NAME>'
| true | describe 'index page', ->
describe 'hello widget', ->
beforeEach ->
browser.get '/'
it 'should say hello to user with correct name', ->
element(By.model('name')).sendKeys 'PI:NAME:<NAME>END_PI'
expect(element(By.css('.hello')).getText()).toEqual 'Hello PI:NAME:<NAME>END_PI'
|
[
{
"context": " new DotLedger.Models.Transaction\n search: 'Some Name'\n amount: '10.00'\n posted_at: '2013-01-",
"end": 526,
"score": 0.8515821695327759,
"start": 517,
"tag": "NAME",
"value": "Some Name"
},
{
"context": "().render()\n expect(view.$el).toContainTe... | spec/javascripts/dot_ledger/views/transactions/table_row_spec.js.coffee | timobleeker/dotledger | 0 | describe "DotLedger.Views.Transactions.TableRow", ->
sortedTransaction = ->
model = createModel()
model.set
sorted_transaction:
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
sortedForReviewTransaction = ->
model = createModel()
model.set
sorted_transaction:
review: true
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
createModel = ->
new DotLedger.Models.Transaction
search: 'Some Name'
amount: '10.00'
posted_at: '2013-01-01'
id: 1
createView = (model = createModel()) ->
new DotLedger.Views.Transactions.TableRow
model: model
it "should be defined", ->
expect(DotLedger.Views.Transactions.TableRow).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.Transactions.TableRow).toUseTemplate('transactions/table_row')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the amount", ->
view = createView().render()
expect(view.$el).toContainText('$10.00')
it "renders the posted at date", ->
view = createView().render()
expect(view.$el).toContainElement('time[datetime="2013-01-01"]')
describe "transaction unsorted", ->
it "renders sort button", ->
view = createView().render()
expect(view.$el).toContainElement('a.sort-transaction')
it "renders the search", ->
view = createView().render()
expect(view.$el).toContainText('Some Name')
it "renders unsorted", ->
view = createView().render()
expect(view.$el).toContainText('Unsorted')
describe "transaction flagged for review", ->
it "renders sort button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.sort-transaction')
expect(view.$el.find('a.sort-transaction')).toHaveText('Edit')
it "renders ok button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.review-okay-transaction')
expect(view.$el.find('a.review-okay-transaction')).toHaveText('Ok')
describe "transaction sorted", ->
it "renders sort button", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainElement('a.edit-transaction')
expect(view.$el.find('a.edit-transaction')).toHaveText('Edit')
it "renders category name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Category')
it "renders sorted transaction name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Sorted Transaction')
| 194251 | describe "DotLedger.Views.Transactions.TableRow", ->
sortedTransaction = ->
model = createModel()
model.set
sorted_transaction:
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
sortedForReviewTransaction = ->
model = createModel()
model.set
sorted_transaction:
review: true
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
createModel = ->
new DotLedger.Models.Transaction
search: '<NAME>'
amount: '10.00'
posted_at: '2013-01-01'
id: 1
createView = (model = createModel()) ->
new DotLedger.Views.Transactions.TableRow
model: model
it "should be defined", ->
expect(DotLedger.Views.Transactions.TableRow).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.Transactions.TableRow).toUseTemplate('transactions/table_row')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the amount", ->
view = createView().render()
expect(view.$el).toContainText('$10.00')
it "renders the posted at date", ->
view = createView().render()
expect(view.$el).toContainElement('time[datetime="2013-01-01"]')
describe "transaction unsorted", ->
it "renders sort button", ->
view = createView().render()
expect(view.$el).toContainElement('a.sort-transaction')
it "renders the search", ->
view = createView().render()
expect(view.$el).toContainText('<NAME> Name')
it "renders unsorted", ->
view = createView().render()
expect(view.$el).toContainText('Unsorted')
describe "transaction flagged for review", ->
it "renders sort button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.sort-transaction')
expect(view.$el.find('a.sort-transaction')).toHaveText('Edit')
it "renders ok button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.review-okay-transaction')
expect(view.$el.find('a.review-okay-transaction')).toHaveText('Ok')
describe "transaction sorted", ->
it "renders sort button", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainElement('a.edit-transaction')
expect(view.$el.find('a.edit-transaction')).toHaveText('Edit')
it "renders category name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Category')
it "renders sorted transaction name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Sorted Transaction')
| true | describe "DotLedger.Views.Transactions.TableRow", ->
sortedTransaction = ->
model = createModel()
model.set
sorted_transaction:
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
sortedForReviewTransaction = ->
model = createModel()
model.set
sorted_transaction:
review: true
category_name: 'Test Category'
name: 'Test Sorted Transaction'
model
createModel = ->
new DotLedger.Models.Transaction
search: 'PI:NAME:<NAME>END_PI'
amount: '10.00'
posted_at: '2013-01-01'
id: 1
createView = (model = createModel()) ->
new DotLedger.Views.Transactions.TableRow
model: model
it "should be defined", ->
expect(DotLedger.Views.Transactions.TableRow).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.Transactions.TableRow).toUseTemplate('transactions/table_row')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the amount", ->
view = createView().render()
expect(view.$el).toContainText('$10.00')
it "renders the posted at date", ->
view = createView().render()
expect(view.$el).toContainElement('time[datetime="2013-01-01"]')
describe "transaction unsorted", ->
it "renders sort button", ->
view = createView().render()
expect(view.$el).toContainElement('a.sort-transaction')
it "renders the search", ->
view = createView().render()
expect(view.$el).toContainText('PI:NAME:<NAME>END_PI Name')
it "renders unsorted", ->
view = createView().render()
expect(view.$el).toContainText('Unsorted')
describe "transaction flagged for review", ->
it "renders sort button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.sort-transaction')
expect(view.$el.find('a.sort-transaction')).toHaveText('Edit')
it "renders ok button", ->
view = createView(sortedForReviewTransaction()).render()
expect(view.$el).toContainElement('a.review-okay-transaction')
expect(view.$el.find('a.review-okay-transaction')).toHaveText('Ok')
describe "transaction sorted", ->
it "renders sort button", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainElement('a.edit-transaction')
expect(view.$el.find('a.edit-transaction')).toHaveText('Edit')
it "renders category name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Category')
it "renders sorted transaction name", ->
view = createView(sortedTransaction()).render()
expect(view.$el).toContainText('Test Sorted Transaction')
|
[
{
"context": "###*\n@author Mat Groves http://matgroves.com/ @Doormat23\n###\n\ndefine 'Cof",
"end": 23,
"score": 0.9998841285705566,
"start": 13,
"tag": "NAME",
"value": "Mat Groves"
},
{
"context": "###*\n@author Mat Groves http://matgroves.com/ @Doormat23\n###\n\ndefine 'Coffixi/t... | src/Coffixi/text/Text.coffee | namuol/Coffixi | 1 | ###*
@author Mat Groves http://matgroves.com/ @Doormat23
###
define 'Coffixi/text/Text', [
'Coffixi/display/Sprite'
'Coffixi/textures/Texture'
'Coffixi/textures/BaseTexture'
'Coffixi/core/Point'
], (
Sprite
Texture
BaseTexture
Point
) ->
# LOU TODO: Abstract out all the canvas rendering stuff. Native implementation will use SDL_ttf.
###*
A Text Object will create a line(s) of text to split a line you can use "\n"
@class Text
@extends Sprite
@constructor
@param text {String} The copy that you would like the text to display
@param [style] {Object} The style parameters
@param [style.font] {String} default "bold 20pt Arial" The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
class Text extends Sprite
@heightCache: {}
constructor: (text, style) ->
canvas = document.createElement("canvas")
canvas.getContext("2d")
super Texture.fromCanvas(canvas)
@canvas = @texture.baseTexture.source
@context = @texture.baseTexture._ctx
@setText text
@setStyle style
@updateText()
@dirty = false
###*
Set the style of the text
@method setStyle
@param [style] {Object} The style parameters
@param [style.font="bold 20pt Arial"] {String} The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke="black"] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
setStyle: (style) ->
style = style or {}
style.font = style.font or "bold 20pt Arial"
style.fill = style.fill or "black"
style.align = style.align or "left"
style.stroke = style.stroke or "black" #provide a default, see: https://github.com/GoodBoyDigital/pixi.js/issues/136
style.strokeThickness = style.strokeThickness or 0
style.wordWrap = style.wordWrap or false
style.wordWrapWidth = style.wordWrapWidth or 100
@style = style
@dirty = true
###*
Set the copy for the text object. To split a line you can use "\n"
@methos setText
@param {String} text The copy that you would like the text to display
###
setText: (text) ->
@text = text.toString() or " "
@dirty = true
###*
Renders text
@method updateText
@private
###
updateText: ->
@context.font = @style.font
outputText = @text
# word wrap
# preserve original text
outputText = @wordWrap(@text) if @style.wordWrap
#split text into lines
lines = outputText.split(/(?:\r\n|\r|\n)/)
#calculate text width
lineWidths = []
maxLineWidth = 0
i = 0
while i < lines.length
lineWidth = @context.measureText(lines[i]).width
lineWidths[i] = lineWidth
maxLineWidth = Math.max(maxLineWidth, lineWidth)
i++
@width = @canvas.width = maxLineWidth + @style.strokeThickness
#calculate text height
lineHeight = @determineFontHeight("font: " + @style.font + ";") + @style.strokeThickness
@height = @canvas.height = lineHeight * lines.length
#set canvas text styles
@context.fillStyle = @style.fill
@context.font = @style.font
@context.strokeStyle = @style.stroke
@context.lineWidth = @style.strokeThickness
@context.textBaseline = "top"
#draw lines line by line
i = 0
while i < lines.length
linePosition = new Point(@style.strokeThickness / 2, @style.strokeThickness / 2 + i * lineHeight)
if @style.align is "right"
linePosition.x += maxLineWidth - lineWidths[i]
else linePosition.x += (maxLineWidth - lineWidths[i]) / 2 if @style.align is "center"
@context.strokeText lines[i], linePosition.x, linePosition.y if @style.stroke and @style.strokeThickness
@context.fillText lines[i], linePosition.x, linePosition.y if @style.fill
i++
@updateTexture()
###*
Updates texture size based on canvas size
@method updateTexture
@private
###
updateTexture: ->
@texture.baseTexture.width = @canvas.width
@texture.baseTexture.height = @canvas.height
@texture.frame.width = @canvas.width
@texture.frame.height = @canvas.height
@width = @canvas.width
@height = @canvas.height
BaseTexture.texturesToUpdate.push @texture.baseTexture
###*
Updates the transfor of this object
@method updateTransform
@private
###
updateTransform: ->
if @dirty
@updateText()
@dirty = false
super
###*
http://stackoverflow.com/users/34441/ellisbben
great solution to the problem!
@method determineFontHeight
@param fontStyle {Object}
@private
###
determineFontHeight: (fontStyle) ->
# build a little reference dictionary so if the font style has been used return a
# cached version...
result = Text.heightCache[fontStyle]
unless result
body = document.getElementsByTagName("body")[0]
dummy = document.createElement("div")
dummyText = document.createTextNode("M")
dummy.appendChild dummyText
dummy.setAttribute "style", fontStyle + ";position:absolute;top:0;left:0"
body.appendChild dummy
result = dummy.offsetHeight
Text.heightCache[fontStyle] = result
body.removeChild dummy
result
###*
A Text Object will apply wordwrap
@method wordWrap
@param text {String}
@private
###
wordWrap: (text) ->
# search good wrap position
searchWrapPos = (ctx, text, start, end, wrapWidth) ->
p = Math.floor((end - start) / 2) + start
return 1 if p is start
if ctx.measureText(text.substring(0, p)).width <= wrapWidth
if ctx.measureText(text.substring(0, p + 1)).width > wrapWidth
p
else
arguments.callee ctx, text, p, end, wrapWidth
else
arguments.callee ctx, text, start, p, wrapWidth
lineWrap = (ctx, text, wrapWidth) ->
return text if ctx.measureText(text).width <= wrapWidth or text.length < 1
pos = searchWrapPos(ctx, text, 0, text.length, wrapWidth)
text.substring(0, pos) + "\n" + arguments.callee(ctx, text.substring(pos), wrapWidth)
result = ""
lines = text.split("\n")
i = 0
while i < lines.length
result += lineWrap(@context, lines[i], @style.wordWrapWidth) + "\n"
i++
result
###*
Destroys this text object
@method destroy
@param destroyTexture {Boolean}
###
destroy: (destroyTexture) ->
@texture.destroy() if destroyTexture
| 116790 | ###*
@author <NAME> http://matgroves.com/ @Doormat23
###
define 'Coffixi/text/Text', [
'Coffixi/display/Sprite'
'Coffixi/textures/Texture'
'Coffixi/textures/BaseTexture'
'Coffixi/core/Point'
], (
Sprite
Texture
BaseTexture
Point
) ->
# LOU TODO: Abstract out all the canvas rendering stuff. Native implementation will use SDL_ttf.
###*
A Text Object will create a line(s) of text to split a line you can use "\n"
@class Text
@extends Sprite
@constructor
@param text {String} The copy that you would like the text to display
@param [style] {Object} The style parameters
@param [style.font] {String} default "bold 20pt Arial" The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
class Text extends Sprite
@heightCache: {}
constructor: (text, style) ->
canvas = document.createElement("canvas")
canvas.getContext("2d")
super Texture.fromCanvas(canvas)
@canvas = @texture.baseTexture.source
@context = @texture.baseTexture._ctx
@setText text
@setStyle style
@updateText()
@dirty = false
###*
Set the style of the text
@method setStyle
@param [style] {Object} The style parameters
@param [style.font="bold 20pt Arial"] {String} The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke="black"] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
setStyle: (style) ->
style = style or {}
style.font = style.font or "bold 20pt Arial"
style.fill = style.fill or "black"
style.align = style.align or "left"
style.stroke = style.stroke or "black" #provide a default, see: https://github.com/GoodBoyDigital/pixi.js/issues/136
style.strokeThickness = style.strokeThickness or 0
style.wordWrap = style.wordWrap or false
style.wordWrapWidth = style.wordWrapWidth or 100
@style = style
@dirty = true
###*
Set the copy for the text object. To split a line you can use "\n"
@methos setText
@param {String} text The copy that you would like the text to display
###
setText: (text) ->
@text = text.toString() or " "
@dirty = true
###*
Renders text
@method updateText
@private
###
updateText: ->
@context.font = @style.font
outputText = @text
# word wrap
# preserve original text
outputText = @wordWrap(@text) if @style.wordWrap
#split text into lines
lines = outputText.split(/(?:\r\n|\r|\n)/)
#calculate text width
lineWidths = []
maxLineWidth = 0
i = 0
while i < lines.length
lineWidth = @context.measureText(lines[i]).width
lineWidths[i] = lineWidth
maxLineWidth = Math.max(maxLineWidth, lineWidth)
i++
@width = @canvas.width = maxLineWidth + @style.strokeThickness
#calculate text height
lineHeight = @determineFontHeight("font: " + @style.font + ";") + @style.strokeThickness
@height = @canvas.height = lineHeight * lines.length
#set canvas text styles
@context.fillStyle = @style.fill
@context.font = @style.font
@context.strokeStyle = @style.stroke
@context.lineWidth = @style.strokeThickness
@context.textBaseline = "top"
#draw lines line by line
i = 0
while i < lines.length
linePosition = new Point(@style.strokeThickness / 2, @style.strokeThickness / 2 + i * lineHeight)
if @style.align is "right"
linePosition.x += maxLineWidth - lineWidths[i]
else linePosition.x += (maxLineWidth - lineWidths[i]) / 2 if @style.align is "center"
@context.strokeText lines[i], linePosition.x, linePosition.y if @style.stroke and @style.strokeThickness
@context.fillText lines[i], linePosition.x, linePosition.y if @style.fill
i++
@updateTexture()
###*
Updates texture size based on canvas size
@method updateTexture
@private
###
updateTexture: ->
@texture.baseTexture.width = @canvas.width
@texture.baseTexture.height = @canvas.height
@texture.frame.width = @canvas.width
@texture.frame.height = @canvas.height
@width = @canvas.width
@height = @canvas.height
BaseTexture.texturesToUpdate.push @texture.baseTexture
###*
Updates the transfor of this object
@method updateTransform
@private
###
updateTransform: ->
if @dirty
@updateText()
@dirty = false
super
###*
http://stackoverflow.com/users/34441/ellisbben
great solution to the problem!
@method determineFontHeight
@param fontStyle {Object}
@private
###
determineFontHeight: (fontStyle) ->
# build a little reference dictionary so if the font style has been used return a
# cached version...
result = Text.heightCache[fontStyle]
unless result
body = document.getElementsByTagName("body")[0]
dummy = document.createElement("div")
dummyText = document.createTextNode("M")
dummy.appendChild dummyText
dummy.setAttribute "style", fontStyle + ";position:absolute;top:0;left:0"
body.appendChild dummy
result = dummy.offsetHeight
Text.heightCache[fontStyle] = result
body.removeChild dummy
result
###*
A Text Object will apply wordwrap
@method wordWrap
@param text {String}
@private
###
wordWrap: (text) ->
# search good wrap position
searchWrapPos = (ctx, text, start, end, wrapWidth) ->
p = Math.floor((end - start) / 2) + start
return 1 if p is start
if ctx.measureText(text.substring(0, p)).width <= wrapWidth
if ctx.measureText(text.substring(0, p + 1)).width > wrapWidth
p
else
arguments.callee ctx, text, p, end, wrapWidth
else
arguments.callee ctx, text, start, p, wrapWidth
lineWrap = (ctx, text, wrapWidth) ->
return text if ctx.measureText(text).width <= wrapWidth or text.length < 1
pos = searchWrapPos(ctx, text, 0, text.length, wrapWidth)
text.substring(0, pos) + "\n" + arguments.callee(ctx, text.substring(pos), wrapWidth)
result = ""
lines = text.split("\n")
i = 0
while i < lines.length
result += lineWrap(@context, lines[i], @style.wordWrapWidth) + "\n"
i++
result
###*
Destroys this text object
@method destroy
@param destroyTexture {Boolean}
###
destroy: (destroyTexture) ->
@texture.destroy() if destroyTexture
| true | ###*
@author PI:NAME:<NAME>END_PI http://matgroves.com/ @Doormat23
###
define 'Coffixi/text/Text', [
'Coffixi/display/Sprite'
'Coffixi/textures/Texture'
'Coffixi/textures/BaseTexture'
'Coffixi/core/Point'
], (
Sprite
Texture
BaseTexture
Point
) ->
# LOU TODO: Abstract out all the canvas rendering stuff. Native implementation will use SDL_ttf.
###*
A Text Object will create a line(s) of text to split a line you can use "\n"
@class Text
@extends Sprite
@constructor
@param text {String} The copy that you would like the text to display
@param [style] {Object} The style parameters
@param [style.font] {String} default "bold 20pt Arial" The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
class Text extends Sprite
@heightCache: {}
constructor: (text, style) ->
canvas = document.createElement("canvas")
canvas.getContext("2d")
super Texture.fromCanvas(canvas)
@canvas = @texture.baseTexture.source
@context = @texture.baseTexture._ctx
@setText text
@setStyle style
@updateText()
@dirty = false
###*
Set the style of the text
@method setStyle
@param [style] {Object} The style parameters
@param [style.font="bold 20pt Arial"] {String} The style and size of the font
@param [style.fill="black"] {Object} A canvas fillstyle that will be used on the text eg "red", "#00FF00"
@param [style.align="left"] {String} An alignment of the multiline text ("left", "center" or "right")
@param [style.stroke="black"] {String} A canvas fillstyle that will be used on the text stroke eg "blue", "#FCFF00"
@param [style.strokeThickness=0] {Number} A number that represents the thickness of the stroke. Default is 0 (no stroke)
@param [style.wordWrap=false] {Boolean} Indicates if word wrap should be used
@param [style.wordWrapWidth=100] {Number} The width at which text will wrap
###
setStyle: (style) ->
style = style or {}
style.font = style.font or "bold 20pt Arial"
style.fill = style.fill or "black"
style.align = style.align or "left"
style.stroke = style.stroke or "black" #provide a default, see: https://github.com/GoodBoyDigital/pixi.js/issues/136
style.strokeThickness = style.strokeThickness or 0
style.wordWrap = style.wordWrap or false
style.wordWrapWidth = style.wordWrapWidth or 100
@style = style
@dirty = true
###*
Set the copy for the text object. To split a line you can use "\n"
@methos setText
@param {String} text The copy that you would like the text to display
###
setText: (text) ->
@text = text.toString() or " "
@dirty = true
###*
Renders text
@method updateText
@private
###
updateText: ->
@context.font = @style.font
outputText = @text
# word wrap
# preserve original text
outputText = @wordWrap(@text) if @style.wordWrap
#split text into lines
lines = outputText.split(/(?:\r\n|\r|\n)/)
#calculate text width
lineWidths = []
maxLineWidth = 0
i = 0
while i < lines.length
lineWidth = @context.measureText(lines[i]).width
lineWidths[i] = lineWidth
maxLineWidth = Math.max(maxLineWidth, lineWidth)
i++
@width = @canvas.width = maxLineWidth + @style.strokeThickness
#calculate text height
lineHeight = @determineFontHeight("font: " + @style.font + ";") + @style.strokeThickness
@height = @canvas.height = lineHeight * lines.length
#set canvas text styles
@context.fillStyle = @style.fill
@context.font = @style.font
@context.strokeStyle = @style.stroke
@context.lineWidth = @style.strokeThickness
@context.textBaseline = "top"
#draw lines line by line
i = 0
while i < lines.length
linePosition = new Point(@style.strokeThickness / 2, @style.strokeThickness / 2 + i * lineHeight)
if @style.align is "right"
linePosition.x += maxLineWidth - lineWidths[i]
else linePosition.x += (maxLineWidth - lineWidths[i]) / 2 if @style.align is "center"
@context.strokeText lines[i], linePosition.x, linePosition.y if @style.stroke and @style.strokeThickness
@context.fillText lines[i], linePosition.x, linePosition.y if @style.fill
i++
@updateTexture()
###*
Updates texture size based on canvas size
@method updateTexture
@private
###
updateTexture: ->
@texture.baseTexture.width = @canvas.width
@texture.baseTexture.height = @canvas.height
@texture.frame.width = @canvas.width
@texture.frame.height = @canvas.height
@width = @canvas.width
@height = @canvas.height
BaseTexture.texturesToUpdate.push @texture.baseTexture
###*
Updates the transfor of this object
@method updateTransform
@private
###
updateTransform: ->
if @dirty
@updateText()
@dirty = false
super
###*
http://stackoverflow.com/users/34441/ellisbben
great solution to the problem!
@method determineFontHeight
@param fontStyle {Object}
@private
###
determineFontHeight: (fontStyle) ->
# build a little reference dictionary so if the font style has been used return a
# cached version...
result = Text.heightCache[fontStyle]
unless result
body = document.getElementsByTagName("body")[0]
dummy = document.createElement("div")
dummyText = document.createTextNode("M")
dummy.appendChild dummyText
dummy.setAttribute "style", fontStyle + ";position:absolute;top:0;left:0"
body.appendChild dummy
result = dummy.offsetHeight
Text.heightCache[fontStyle] = result
body.removeChild dummy
result
###*
A Text Object will apply wordwrap
@method wordWrap
@param text {String}
@private
###
wordWrap: (text) ->
# search good wrap position
searchWrapPos = (ctx, text, start, end, wrapWidth) ->
p = Math.floor((end - start) / 2) + start
return 1 if p is start
if ctx.measureText(text.substring(0, p)).width <= wrapWidth
if ctx.measureText(text.substring(0, p + 1)).width > wrapWidth
p
else
arguments.callee ctx, text, p, end, wrapWidth
else
arguments.callee ctx, text, start, p, wrapWidth
lineWrap = (ctx, text, wrapWidth) ->
return text if ctx.measureText(text).width <= wrapWidth or text.length < 1
pos = searchWrapPos(ctx, text, 0, text.length, wrapWidth)
text.substring(0, pos) + "\n" + arguments.callee(ctx, text.substring(pos), wrapWidth)
result = ""
lines = text.split("\n")
i = 0
while i < lines.length
result += lineWrap(@context, lines[i], @style.wordWrapWidth) + "\n"
i++
result
###*
Destroys this text object
@method destroy
@param destroyTexture {Boolean}
###
destroy: (destroyTexture) ->
@texture.destroy() if destroyTexture
|
[
{
"context": "###*\n# \n# @date 2016-02-02 17:44:28\n# @author vfasky <vfasky@gmail.com>\n# @link http://vfasky.com\n###\n",
"end": 52,
"score": 0.9984913468360901,
"start": 46,
"tag": "USERNAME",
"value": "vfasky"
},
{
"context": "\n# \n# @date 2016-02-02 17:44:28\n# @author vfasky ... | test/test.coffee | vfasky/h2svd-loader | 0 | ###*
#
# @date 2016-02-02 17:44:28
# @author vfasky <vfasky@gmail.com>
# @link http://vfasky.com
###
'use strict'
h2v = require '../h2v'
fs = require 'fs'
t1 = fs.readFileSync './tpl/t2.html', 'utf8'
fs.writeFileSync './tpl/t1.js', h2v(t1), 'utf8'
| 166530 | ###*
#
# @date 2016-02-02 17:44:28
# @author vfasky <<EMAIL>>
# @link http://vfasky.com
###
'use strict'
h2v = require '../h2v'
fs = require 'fs'
t1 = fs.readFileSync './tpl/t2.html', 'utf8'
fs.writeFileSync './tpl/t1.js', h2v(t1), 'utf8'
| true | ###*
#
# @date 2016-02-02 17:44:28
# @author vfasky <PI:EMAIL:<EMAIL>END_PI>
# @link http://vfasky.com
###
'use strict'
h2v = require '../h2v'
fs = require 'fs'
t1 = fs.readFileSync './tpl/t2.html', 'utf8'
fs.writeFileSync './tpl/t1.js', h2v(t1), 'utf8'
|
[
{
"context": "pt arrays and objects\"\n\t\t\t,\n\t\t\t\tid: 2\n\t\t\t\ttitle: \"Joe\"\n\t\t\t\ttags: [\"testing\", \"node.js\"]\n\t\t\t\tdescription",
"end": 2480,
"score": 0.7120204567909241,
"start": 2477,
"tag": "NAME",
"value": "Joe"
}
] | src/demo/visual-search.coffee | ADstruc/query-engine | 83 | # Fetch the globals
CoffeeScript = window.CoffeeScript
queryEngine = window.queryEngine
Js2coffee = window.Js2coffee
# Prepare the editors glboal
editors = window.editors = {}
# Load in the coffescript Ace editor mode
coffeeMode = require('ace/mode/coffee').Mode
coffeeModeInstance = new coffeeMode()
# Set pad widths to half of the screen
$(window)
.resize ->
padWidth = $(window).width()/2 - 20
padHeight = $(window).height() - $('.header:first').height() - 80
$('.pad,.editor').width(padWidth).height(padHeight)
.trigger('resize')
# Disable backspace redirect as it happens often
$(document).keydown (e) ->
isInput = $(document.activeElement).is(':input')
e.preventDefault() if e.keyCode is 8 and not isInput
# Create our two code editors
for key in ['code','result']
# Create our editor
editor = ace.edit(key)
# Apply settings
editor.setTheme 'ace/theme/textmate'
editor.setShowPrintMargin(false)
editor.getSession().setMode(coffeeModeInstance)
editor.setHighlightActiveLine(true)
editor.getSession().setTabSize(4)
editor.getSession().setUseSoftTabs(false)
# Assign to the global
editors[key] = editor
# Run our code snippet and output the result
# We wrap in a try, as perhaps they have invalid syntax - in which case, we want to output the error to result instead
codeChanged = ->
try
codeCoffeeScript = editors.code.getSession().getValue()
codeJavaScript = CoffeeScript.compile(codeCoffeeScript)
collection = eval(codeJavaScript)
window.updateResults(collection)
catch err
errMessage = err.stack.toString()
console.log(errMessage)
editors.result.getSession().setValue(errMessage)
window.updateResults = (collection) ->
resultArray = collection?.toJSON()
resultJavaScript = JSON.stringify(resultArray)
resultCoffee = Js2coffee.build("var result = #{resultJavaScript}")
editors.result.getSession().setValue(resultCoffee)
# Bind our change event to the code input
editors.code.getSession().on('change', codeChanged)
# Set the example code value
editors.code.getSession().setValue """
# Create our project collection from an array of models
# and set several pills that we can use for searching
projectCollection = window.queryEngine.createLiveCollection([
id: 1
title: "Query Engine"
tags: ["backbone", "node.js"]
description: "Query-Engine provides extensive Querying, Filtering, and Searching abilities for Backbone.js Collections as well as JavaScript arrays and objects"
,
id: 2
title: "Joe"
tags: ["testing", "node.js"]
description: "Node.js asynchronous testing framework, runner and reporter"
])
projectSearchCollection = projectCollection.createLiveChildCollection()
.setPill('id', {
prefixes: ['id:']
callback: (model,value) ->
pass = model.get('id') is parseInt(value,10)
return pass
})
.setPill('tag', {
logicalOperator: 'AND'
prefixes: ['tag:']
callback: (model,value) ->
pass = value in model.get('tags')
return pass
})
.setPill('title', {
prefixes: ['title:']
callback: (model,value) ->
pass = model.get('title') is value
return pass
})
.setFilter('search', (model,searchString) ->
return true unless searchString?
searchRegex = queryEngine.createSafeRegex(searchString)
pass = searchRegex.test(model.get('description'))
return pass
)
.query()
# Setup Visual Search
$searchbar = $('#searchbar').empty()
$visualsearch = $('<div>').appendTo($searchbar)
visualsearch = window.VS.init({
container: $visualsearch
callbacks:
search: (searchString, searchCollection) ->
searchString = ""
searchCollection.forEach (pill) ->
category = pill.get("category")
value = pill.get("value")
if category isnt "text"
searchString += " " + category + ":\\"" + value + "\\""
else
searchString += " " + value
window.updateResults projectSearchCollection.setSearchString(searchString).query()
facetMatches: (callback) ->
pills = projectSearchCollection.getPills()
pillNames = _.keys(pills)
callback(pillNames)
valueMatches: (facet, searchTerm, callback) ->
switch facet
when 'id'
ids = []
ids.push(String(model.id)) for model in projectCollection.models
callback(ids)
when 'tag'
callback _.uniq _.flatten projectCollection.pluck('tags')
when 'title'
callback projectCollection.pluck('title')
})
visualsearch.searchBox.value('tag:"node.js"');
# Return our project collection
return projectSearchCollection
""" | 195481 | # Fetch the globals
CoffeeScript = window.CoffeeScript
queryEngine = window.queryEngine
Js2coffee = window.Js2coffee
# Prepare the editors glboal
editors = window.editors = {}
# Load in the coffescript Ace editor mode
coffeeMode = require('ace/mode/coffee').Mode
coffeeModeInstance = new coffeeMode()
# Set pad widths to half of the screen
$(window)
.resize ->
padWidth = $(window).width()/2 - 20
padHeight = $(window).height() - $('.header:first').height() - 80
$('.pad,.editor').width(padWidth).height(padHeight)
.trigger('resize')
# Disable backspace redirect as it happens often
$(document).keydown (e) ->
isInput = $(document.activeElement).is(':input')
e.preventDefault() if e.keyCode is 8 and not isInput
# Create our two code editors
for key in ['code','result']
# Create our editor
editor = ace.edit(key)
# Apply settings
editor.setTheme 'ace/theme/textmate'
editor.setShowPrintMargin(false)
editor.getSession().setMode(coffeeModeInstance)
editor.setHighlightActiveLine(true)
editor.getSession().setTabSize(4)
editor.getSession().setUseSoftTabs(false)
# Assign to the global
editors[key] = editor
# Run our code snippet and output the result
# We wrap in a try, as perhaps they have invalid syntax - in which case, we want to output the error to result instead
codeChanged = ->
try
codeCoffeeScript = editors.code.getSession().getValue()
codeJavaScript = CoffeeScript.compile(codeCoffeeScript)
collection = eval(codeJavaScript)
window.updateResults(collection)
catch err
errMessage = err.stack.toString()
console.log(errMessage)
editors.result.getSession().setValue(errMessage)
window.updateResults = (collection) ->
resultArray = collection?.toJSON()
resultJavaScript = JSON.stringify(resultArray)
resultCoffee = Js2coffee.build("var result = #{resultJavaScript}")
editors.result.getSession().setValue(resultCoffee)
# Bind our change event to the code input
editors.code.getSession().on('change', codeChanged)
# Set the example code value
editors.code.getSession().setValue """
# Create our project collection from an array of models
# and set several pills that we can use for searching
projectCollection = window.queryEngine.createLiveCollection([
id: 1
title: "Query Engine"
tags: ["backbone", "node.js"]
description: "Query-Engine provides extensive Querying, Filtering, and Searching abilities for Backbone.js Collections as well as JavaScript arrays and objects"
,
id: 2
title: "<NAME>"
tags: ["testing", "node.js"]
description: "Node.js asynchronous testing framework, runner and reporter"
])
projectSearchCollection = projectCollection.createLiveChildCollection()
.setPill('id', {
prefixes: ['id:']
callback: (model,value) ->
pass = model.get('id') is parseInt(value,10)
return pass
})
.setPill('tag', {
logicalOperator: 'AND'
prefixes: ['tag:']
callback: (model,value) ->
pass = value in model.get('tags')
return pass
})
.setPill('title', {
prefixes: ['title:']
callback: (model,value) ->
pass = model.get('title') is value
return pass
})
.setFilter('search', (model,searchString) ->
return true unless searchString?
searchRegex = queryEngine.createSafeRegex(searchString)
pass = searchRegex.test(model.get('description'))
return pass
)
.query()
# Setup Visual Search
$searchbar = $('#searchbar').empty()
$visualsearch = $('<div>').appendTo($searchbar)
visualsearch = window.VS.init({
container: $visualsearch
callbacks:
search: (searchString, searchCollection) ->
searchString = ""
searchCollection.forEach (pill) ->
category = pill.get("category")
value = pill.get("value")
if category isnt "text"
searchString += " " + category + ":\\"" + value + "\\""
else
searchString += " " + value
window.updateResults projectSearchCollection.setSearchString(searchString).query()
facetMatches: (callback) ->
pills = projectSearchCollection.getPills()
pillNames = _.keys(pills)
callback(pillNames)
valueMatches: (facet, searchTerm, callback) ->
switch facet
when 'id'
ids = []
ids.push(String(model.id)) for model in projectCollection.models
callback(ids)
when 'tag'
callback _.uniq _.flatten projectCollection.pluck('tags')
when 'title'
callback projectCollection.pluck('title')
})
visualsearch.searchBox.value('tag:"node.js"');
# Return our project collection
return projectSearchCollection
""" | true | # Fetch the globals
CoffeeScript = window.CoffeeScript
queryEngine = window.queryEngine
Js2coffee = window.Js2coffee
# Prepare the editors glboal
editors = window.editors = {}
# Load in the coffescript Ace editor mode
coffeeMode = require('ace/mode/coffee').Mode
coffeeModeInstance = new coffeeMode()
# Set pad widths to half of the screen
$(window)
.resize ->
padWidth = $(window).width()/2 - 20
padHeight = $(window).height() - $('.header:first').height() - 80
$('.pad,.editor').width(padWidth).height(padHeight)
.trigger('resize')
# Disable backspace redirect as it happens often
$(document).keydown (e) ->
isInput = $(document.activeElement).is(':input')
e.preventDefault() if e.keyCode is 8 and not isInput
# Create our two code editors
for key in ['code','result']
# Create our editor
editor = ace.edit(key)
# Apply settings
editor.setTheme 'ace/theme/textmate'
editor.setShowPrintMargin(false)
editor.getSession().setMode(coffeeModeInstance)
editor.setHighlightActiveLine(true)
editor.getSession().setTabSize(4)
editor.getSession().setUseSoftTabs(false)
# Assign to the global
editors[key] = editor
# Run our code snippet and output the result
# We wrap in a try, as perhaps they have invalid syntax - in which case, we want to output the error to result instead
codeChanged = ->
try
codeCoffeeScript = editors.code.getSession().getValue()
codeJavaScript = CoffeeScript.compile(codeCoffeeScript)
collection = eval(codeJavaScript)
window.updateResults(collection)
catch err
errMessage = err.stack.toString()
console.log(errMessage)
editors.result.getSession().setValue(errMessage)
window.updateResults = (collection) ->
resultArray = collection?.toJSON()
resultJavaScript = JSON.stringify(resultArray)
resultCoffee = Js2coffee.build("var result = #{resultJavaScript}")
editors.result.getSession().setValue(resultCoffee)
# Bind our change event to the code input
editors.code.getSession().on('change', codeChanged)
# Set the example code value
editors.code.getSession().setValue """
# Create our project collection from an array of models
# and set several pills that we can use for searching
projectCollection = window.queryEngine.createLiveCollection([
id: 1
title: "Query Engine"
tags: ["backbone", "node.js"]
description: "Query-Engine provides extensive Querying, Filtering, and Searching abilities for Backbone.js Collections as well as JavaScript arrays and objects"
,
id: 2
title: "PI:NAME:<NAME>END_PI"
tags: ["testing", "node.js"]
description: "Node.js asynchronous testing framework, runner and reporter"
])
projectSearchCollection = projectCollection.createLiveChildCollection()
.setPill('id', {
prefixes: ['id:']
callback: (model,value) ->
pass = model.get('id') is parseInt(value,10)
return pass
})
.setPill('tag', {
logicalOperator: 'AND'
prefixes: ['tag:']
callback: (model,value) ->
pass = value in model.get('tags')
return pass
})
.setPill('title', {
prefixes: ['title:']
callback: (model,value) ->
pass = model.get('title') is value
return pass
})
.setFilter('search', (model,searchString) ->
return true unless searchString?
searchRegex = queryEngine.createSafeRegex(searchString)
pass = searchRegex.test(model.get('description'))
return pass
)
.query()
# Setup Visual Search
$searchbar = $('#searchbar').empty()
$visualsearch = $('<div>').appendTo($searchbar)
visualsearch = window.VS.init({
container: $visualsearch
callbacks:
search: (searchString, searchCollection) ->
searchString = ""
searchCollection.forEach (pill) ->
category = pill.get("category")
value = pill.get("value")
if category isnt "text"
searchString += " " + category + ":\\"" + value + "\\""
else
searchString += " " + value
window.updateResults projectSearchCollection.setSearchString(searchString).query()
facetMatches: (callback) ->
pills = projectSearchCollection.getPills()
pillNames = _.keys(pills)
callback(pillNames)
valueMatches: (facet, searchTerm, callback) ->
switch facet
when 'id'
ids = []
ids.push(String(model.id)) for model in projectCollection.models
callback(ids)
when 'tag'
callback _.uniq _.flatten projectCollection.pluck('tags')
when 'title'
callback projectCollection.pluck('title')
})
visualsearch.searchBox.value('tag:"node.js"');
# Return our project collection
return projectSearchCollection
""" |
[
{
"context": "-er-animating\n\n# requestAnimationFrame polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel\n\n# MIT lic",
"end": 256,
"score": 0.9999001622200012,
"start": 245,
"tag": "NAME",
"value": "Erik Möller"
},
{
"context": "AnimationFrame polyfill by Erik Möller. f... | slideshow.coffee | olmokramer/slideshow.js | 5 | 'use strict'
# requestAnimationFrame polyfill
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel
# MIT license
do (root = window ? this) ->
lastTime = 0
vendors = ['ms', 'moz', 'webkit', 'o']
i = 0
while i < vendors.length and not root.requestAnimationFrame
vendor = vendors[i++]
root.requestAnimationFrame = root["#{vendor}RequestAnimationFrame"]
root.cancelAnimationFrame = root["#{vendor}CancelAnimationFrame"] ? root["#{vendor}CancelRequestAnimationFrame"]
unless root.requestAnimationFrame?
root.requestAnimationFrame = (callback) ->
currTime = new Date().getTime()
timeToCall = Math.max 0, 16 - (currTime - lastTime)
id = root.setTimeout (-> callback currTime + timeToCall), timeToCall
lastTime = currTime + timeToCall
id
unless root.cancelAnimationFrame?
root.cancelAnimationFrame = (id) ->
clearTimeout id
# end requestAnimationFrame polyfill
# indexOf(array, match) is equivalent to array.indexOf(match)
indexOf = (array, match) ->
return unless array?
if Array::indexOf?
return Array::indexOf.call Array::slice.call(array), match
for item, i in array when item is match
return i
-1
# extend target with properties from object in objects
extend = (target, objects...) ->
return unless typeof target is 'object'
for object in objects
for own prop of object
target[prop] = object[prop]
target
# shallow clone object
clone = (object) -> extend {}, object
# bind(fn, context) binds context to fn
bind = (fn, context) -> -> fn.apply context, [].slice.call arguments
now = Date.now ? -> new Date().getTime()
class Slideshow
constructor: (element, options = {}) ->
# test if element is a valid html element or maybe
# a jQuery object or Backbone View
unless element.nodeType is 1
if element[0]? then element = element[0] # jQuery
if element.el? then element = element.el # Backbone
if element.nodeType isnt 1
throw new Error 'No valid element provided'
@configure options
@el = element
# and go!
init.call @
configure: (options) ->
@options = extend {}, defaults, options
if typeof @options.effect is 'string' and effects[@options.effect]?
@options.effect = clone effects[@options.effect]
@options.effect.conditions ?= effects.default.conditions.concat()
# private API
defaults =
touchEventsEnabled: true
mouseEventsEnabled: true
swipeThreshold: 0
animationDuration: 400
animationDirection: 'x'
effect: 'default'
effects =
default: do ->
transformCSSProperty = do ->
style = document.createElement('div').style
return 'transform' if style['transform']?
for vendor in ['moz', 'webkit', 'khtml', 'o', 'ms']
prefixed = "#{vendor}Transform"
return prefixed if style[prefixed]?
false
conditions: [
progress: .1
time: 250
durationModifier: .5
,
progress: .3
time: 500
,
progress: .5
]
before: (slideState, slideElement) ->
slideElement.style.display = 'block'
###
slideState is either -1, 0 or 1
if slideState === 0 then this is the current slide and we want to show it, so set translateX(0)
if slideState === -1 then this is the previous slide (to the left) so translateX(-100%)
if slideState === 1 then this is the next slide (to the right) so translateX(100%)
###
X = -slideState * 100
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
progress: (slideState, progress, slideElement) ->
###
slideState = either 0 or 1
0 <= Math.abs(progress) <= 1, but progress can also be negative.
progress < 0 indicates movement to the left
progress > 0 indicates movement to the right
if slideState === 0 then this is the current slide and we want it to move away as progress increases:
X1 = 100 * p where p = progress
if slideState === 1 then this is the target slide and we want it to move in from the left/right as progress increases:
X2 = 100 * (-p / |p|) * (|p| - 1) where |p| = Math.abs(progress)
X = (1 - S) * X1 + S * X2 where S = slideState
X is the translateX value that should be set on this slide
X = (1 - S) * 100 * p + S * 100 * (-p / |p|) * (1 - |p|)
X = 100 * p * ( (1 - S) - S * (1 / |p|) * (1 - |p|) )
X = 100 * p * ( 1 - S - S * ( (1 / |p|) - 1 ) )
X = 100 * p * ( 1 - S + S * (1 - (1 / |p|) ) )
X = 100 * p * ( 1 - S + S - (S / |p|) )
X = 100 * p * ( 1 - (S / |p|) )
###
X = 100 * progress * (1 - slideState / Math.abs progress)
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
after: (slideState, slideElement) ->
###
slideState is either 0 or 1
if slideState === 0 then this is the previously visible slide and it must be hidden
if slideState === 1 then this is the currently visible slide and it must be visible
###
slideElement.style.display = if slideState > 0 then 'block' else 'none'
init = ->
initSlides.call @
initEvents.call @
initSlides = ->
# we don't want the slides to be visible outside their container
effectBefore = @options.effect.before ? Function.prototype
effectAfter = @options.effect.after ? Function.prototype
# el.children may behave weird in IE8
@slides = @el.children ? @el.childNodes
@current = 0
for slide, i in @slides when i isnt @current
# call the before and after functions once on all slides, so all slides
# are positioned properly
if i is @current
effectBefore.call @, 0, @slides[@current]
effectAfter.call @, 1, @slides[@current]
else
effectBefore.call @, 1, slide
effectAfter.call @, 0, slide
initEvents = ->
@eventStart = bind eventStart, @
@eventProgress = bind eventProgress, @
@eventEnd = bind eventEnd, @
# check for TouchEvent support and if enabled in options
if TouchEvent? and @options.touchEventsEnabled
@el.addEventListener 'touchstart', @eventStart
@el.addEventListener 'touchmove', @eventProgress
@el.addEventListener 'touchend', @eventEnd
# check for MouseEvent support and if enabled in options
if MouseEvent? and @options.mouseEventsEnabled
@el.addEventListener 'mousedown', @eventStart
@el.addEventListener 'mousemove', @eventProgress
@el.addEventListener 'mouseup', @eventEnd
@el.addEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.addEventListener 'mousedown', preventDefault
slide.addEventListener 'mousemove', preventDefault
slide.addEventListener 'mouseup', preventDefault
setCurrentSlide = (slide) ->
# set @current to slide's index in @slides
@current = indexOf @slides, slide
animateSlides = (currentSlide, targetSlide, {direction, initialProgress, durationMod}, callback) ->
# return if an animation is in progress
return if @currentAnimation?
# call onWillChange
unless @currentEvent? and @currentEvent.cancelOnWillChange
@options.onWillChange?.call @, currentSlide, targetSlide, (@current - direction / Math.abs(direction)) %% @slides.length
# progress and durationMod are only passed from a touch event
progress = initialProgress ? 0
durationMod ?= 1
# alter the duration of the animation after a touch event
duration = Math.max 1, @options.animationDuration * (1 - progress) * durationMod
# slides shouldn't be prepared if this is called from a touch event
# because this has already happened in touchStart
unless @currentEvent?
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, (if direction < 0 then 1 else -1), targetSlide
# cache the animation state
@currentAnimation = {start: now(), currentSlide, targetSlide, direction, duration, progress, callback}
# and finally start animating
requestAnimationFrame bind nextFrame, @
nextFrame = (timestamp) ->
# immediately call the next requestAnimationFrame
id = requestAnimationFrame bind nextFrame, @
anim = @currentAnimation
{start, progress, duration, direction, currentSlide, targetSlide, callback} = @currentAnimation
# calculate the actual progress (fraction of the animationDuration)
progress = progress + (now() - start) / duration * (1 - progress)
if progress >= 1
progress = 1
# the animation has ended
@currentAnimation = null
cancelAnimationFrame id
# call the after and callback functions
effectAfter = @options.effect.after ? Function.prototype
effectAfter.call @, 0, currentSlide
effectAfter.call @, 1, targetSlide
# set the new currentSlide
setCurrentSlide.call @, targetSlide
if typeof callback == 'function'
callback.call @, currentSlide, targetSlide, @current
@options.onDidChange?.call @, currentSlide, targetSlide, @current
setCurrentSlide.call @, targetSlide
# call the progress functions
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress * direction, currentSlide
effectProgress.call @, 1, progress * direction, targetSlide
eventStart = (event) ->
# do nothing if an animation or touch event is currently in progress
return if @currentAnimation? or @currentEvent?
# get the relevant slides
currentSlide = @getCurrentSlide()
prevSlide = @getPrevSlide()
nextSlide = @getNextSlide()
# prepare the slides to be animated
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, -1, prevSlide
effectBefore.call @, 1, nextSlide
# cache the touch event state
{timeStamp} = event
{pageX: startX, pageY: startY} = event.touches?[0] ? event
@currentEvent = {currentSlide, prevSlide, nextSlide, timeStamp, startX, startY}
eventProgress = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{pageX, pageY} = event.touches?[0] ? event
# calculate the progress based on the distance touched
# progress = switch @options.animationDirection
# when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
# when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
progress =
x: (pageX - @currentEvent.startX) / @el.clientWidth
y: (pageY - @currentEvent.startY) / @el.clientHeight
progress = switch @options.animationDirection
when 'x'
if Math.abs(progress.x) > Math.abs(progress.y) then progress.x
when 'y'
if Math.abs(progress.y) > Math.abs(progress.x) then progress.y
@currentEvent.shouldCancel = !progress
return unless progress?
# get the target slide
targetSlide = if progress < 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
if targetSlide isnt @currentEvent.targetSlide
@currentEvent.cancelOnWillChange = false
@currentEvent.targetSlide = targetSlide
# trigger onWillChange event
unless @currentEvent.cancelOnWillChange and progress isnt 0
@currentEvent.cancelOnWillChange = true
nextIndex = (@current - progress / Math.abs progress) %% @slides.length
@options.onWillChange?.call @, @currentEvent.currentSlide, targetSlide, nextIndex
@currentEvent.targetSlide = targetSlide
# animate the slide
requestAnimationFrame =>
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress, @currentEvent.currentSlide
effectProgress.call @, 1, progress, targetSlide
eventEnd = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{timeStamp} = event
{pageX, pageY} = event.changedTouches?[0] ? event
# calculate the final progress that has been made
progress = switch @options.animationDirection
when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
if @currentEvent.shouldCancel
currentSlide = if progress > 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
direction = progress / Math.abs progress
initialProgress = 1 - Math.abs progress
animateSlides.call @, currentSlide, @currentEvent.currentSlide, {direction, initialProgress}
@currentEvent = null
return
if progress is 0
@currentEvent = null
return
# calculate the time passed
timePassed = timeStamp - @currentEvent.timeStamp
progressAbs = Math.abs progress
# check progress and timePassed against the conditions
for condition in @options.effect.conditions
if progressAbs > condition.progress and timePassed < (condition.time ? Infinity)
# one condition passed so set durationMod from that condition
durationMod = condition.durationModifier ? 1
break
# at this point, durationMod is only set if we matched a condition
# so slide to the next slide
if durationMod?
# we matched a condition, so slide away the currentSlide and slide in
# the targetSlide. if we slided to the left, the nextSlide will be the
# targetSlide, else the prevSlide will be.
currentSlide = @currentEvent.currentSlide
direction = progress / progressAbs
if direction is 1
targetSlide = @currentEvent.prevSlide
else
targetSlide = @currentEvent.nextSlide
initialProgress = progressAbs
else
# we didn't match a condition, so slide the currentSlide back into
# position and slide targetSlide (nextSlide or prevSlide, depending on
# slide direction) away
targetSlide = @currentEvent.currentSlide
direction = -progress / progressAbs
if direction is 1
currentSlide = @currentEvent.nextSlide
else
currentSlide = @currentEvent.prevSlide
initialProgress = 1 - progressAbs
# call the animateSlides function with the parameters
animateSlides.call @, currentSlide, targetSlide, {direction, initialProgress, durationMod}, =>
@currentEvent = null
preventDefault = (event) ->
event.preventDefault()
# end private API
# public API
# get*Slide all return an HTMLElement
# get the slide at index i
# getSlide(-1) === getSlide(slides.length - 1)
# and getSlide(slides.length) === getSlide(0)
getSlide: (i) ->
@slides[i %% @slides.length]
# get the currently visible slide
getCurrentSlide: -> @slides[@current]
getCurrentIndex: -> @current
# get the slide after the currently visible one
getNextSlide: -> @getSlide @current + 1
# get the slide before the currently visible one
getPrevSlide: -> @getSlide @current - 1
# get the first slide
getFirstSlide: -> @slides[0]
# get the last slide
getLastSlide: -> @slides[@slides.length - 1]
# goTo* initiates an animation
# go to the slide at index i
goTo: (i, cb) ->
return if i is @current
currentSlide = @getCurrentSlide()
targetSlide = @getSlide i
# slide to left if i < @current, else slide to right
direction = if i < @current then 1 else -1
animateSlides.call @, currentSlide, targetSlide, {direction}, cb
# go to the next slide
goToNext: (cb) -> @goTo @current + 1, cb
# go to the previous slide
goToPrev: (cb) -> @goTo @current - 1, cb
# go to first slide
goToFirst: (cb) -> @goTo 0, cb
# go to last slide
goToLast: (cb) -> @goTo @slides.length - 1, cb
# destroy this instance
destroy: ->
@el.removeEventListener 'touchstart', @eventStart
@el.removeEventListener 'touchmove', @eventProgress
@el.removeEventListener 'touchend', @eventEnd
@el.removeEventListener 'mousedown', @eventStart
@el.removeEventListener 'mousemove', @eventProgress
@el.removeEventListener 'mouseup', @eventEnd
@el.removeEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.removeEventListener 'mousedown', preventDefault
slide.removeEventListener 'mousemove', preventDefault
slide.removeEventListener 'mouseup', preventDefault
{@el, @slides, @eventStart, @eventProgress, @eventEnd, @options} = {}
# class methods
@registerAsJQueryPlugin: (jQuery, methodName = 'Slideshow') ->
jQuery.fn[methodName] = (options) -> (new Slideshow container, options for container in @)
@registerEffect: (name, effect) ->
effect.conditions ?= effects.default.conditions.concat()
effects[name] ?= effect
# amd, commonjs and browser environment support
do (root = this) ->
# amd
if typeof define is 'function' and define.amd
define [], -> Slideshow
# commonjs
else if typeof exports isnt 'undefined'
module.exports = Slideshow
# browser
else
root.Slideshow = Slideshow
| 193671 | 'use strict'
# requestAnimationFrame polyfill
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by <NAME>. fixes from <NAME> and <NAME>
# MIT license
do (root = window ? this) ->
lastTime = 0
vendors = ['ms', 'moz', 'webkit', 'o']
i = 0
while i < vendors.length and not root.requestAnimationFrame
vendor = vendors[i++]
root.requestAnimationFrame = root["#{vendor}RequestAnimationFrame"]
root.cancelAnimationFrame = root["#{vendor}CancelAnimationFrame"] ? root["#{vendor}CancelRequestAnimationFrame"]
unless root.requestAnimationFrame?
root.requestAnimationFrame = (callback) ->
currTime = new Date().getTime()
timeToCall = Math.max 0, 16 - (currTime - lastTime)
id = root.setTimeout (-> callback currTime + timeToCall), timeToCall
lastTime = currTime + timeToCall
id
unless root.cancelAnimationFrame?
root.cancelAnimationFrame = (id) ->
clearTimeout id
# end requestAnimationFrame polyfill
# indexOf(array, match) is equivalent to array.indexOf(match)
indexOf = (array, match) ->
return unless array?
if Array::indexOf?
return Array::indexOf.call Array::slice.call(array), match
for item, i in array when item is match
return i
-1
# extend target with properties from object in objects
extend = (target, objects...) ->
return unless typeof target is 'object'
for object in objects
for own prop of object
target[prop] = object[prop]
target
# shallow clone object
clone = (object) -> extend {}, object
# bind(fn, context) binds context to fn
bind = (fn, context) -> -> fn.apply context, [].slice.call arguments
now = Date.now ? -> new Date().getTime()
class Slideshow
constructor: (element, options = {}) ->
# test if element is a valid html element or maybe
# a jQuery object or Backbone View
unless element.nodeType is 1
if element[0]? then element = element[0] # jQuery
if element.el? then element = element.el # Backbone
if element.nodeType isnt 1
throw new Error 'No valid element provided'
@configure options
@el = element
# and go!
init.call @
configure: (options) ->
@options = extend {}, defaults, options
if typeof @options.effect is 'string' and effects[@options.effect]?
@options.effect = clone effects[@options.effect]
@options.effect.conditions ?= effects.default.conditions.concat()
# private API
defaults =
touchEventsEnabled: true
mouseEventsEnabled: true
swipeThreshold: 0
animationDuration: 400
animationDirection: 'x'
effect: 'default'
effects =
default: do ->
transformCSSProperty = do ->
style = document.createElement('div').style
return 'transform' if style['transform']?
for vendor in ['moz', 'webkit', 'khtml', 'o', 'ms']
prefixed = "#{vendor}Transform"
return prefixed if style[prefixed]?
false
conditions: [
progress: .1
time: 250
durationModifier: .5
,
progress: .3
time: 500
,
progress: .5
]
before: (slideState, slideElement) ->
slideElement.style.display = 'block'
###
slideState is either -1, 0 or 1
if slideState === 0 then this is the current slide and we want to show it, so set translateX(0)
if slideState === -1 then this is the previous slide (to the left) so translateX(-100%)
if slideState === 1 then this is the next slide (to the right) so translateX(100%)
###
X = -slideState * 100
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
progress: (slideState, progress, slideElement) ->
###
slideState = either 0 or 1
0 <= Math.abs(progress) <= 1, but progress can also be negative.
progress < 0 indicates movement to the left
progress > 0 indicates movement to the right
if slideState === 0 then this is the current slide and we want it to move away as progress increases:
X1 = 100 * p where p = progress
if slideState === 1 then this is the target slide and we want it to move in from the left/right as progress increases:
X2 = 100 * (-p / |p|) * (|p| - 1) where |p| = Math.abs(progress)
X = (1 - S) * X1 + S * X2 where S = slideState
X is the translateX value that should be set on this slide
X = (1 - S) * 100 * p + S * 100 * (-p / |p|) * (1 - |p|)
X = 100 * p * ( (1 - S) - S * (1 / |p|) * (1 - |p|) )
X = 100 * p * ( 1 - S - S * ( (1 / |p|) - 1 ) )
X = 100 * p * ( 1 - S + S * (1 - (1 / |p|) ) )
X = 100 * p * ( 1 - S + S - (S / |p|) )
X = 100 * p * ( 1 - (S / |p|) )
###
X = 100 * progress * (1 - slideState / Math.abs progress)
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
after: (slideState, slideElement) ->
###
slideState is either 0 or 1
if slideState === 0 then this is the previously visible slide and it must be hidden
if slideState === 1 then this is the currently visible slide and it must be visible
###
slideElement.style.display = if slideState > 0 then 'block' else 'none'
init = ->
initSlides.call @
initEvents.call @
initSlides = ->
# we don't want the slides to be visible outside their container
effectBefore = @options.effect.before ? Function.prototype
effectAfter = @options.effect.after ? Function.prototype
# el.children may behave weird in IE8
@slides = @el.children ? @el.childNodes
@current = 0
for slide, i in @slides when i isnt @current
# call the before and after functions once on all slides, so all slides
# are positioned properly
if i is @current
effectBefore.call @, 0, @slides[@current]
effectAfter.call @, 1, @slides[@current]
else
effectBefore.call @, 1, slide
effectAfter.call @, 0, slide
initEvents = ->
@eventStart = bind eventStart, @
@eventProgress = bind eventProgress, @
@eventEnd = bind eventEnd, @
# check for TouchEvent support and if enabled in options
if TouchEvent? and @options.touchEventsEnabled
@el.addEventListener 'touchstart', @eventStart
@el.addEventListener 'touchmove', @eventProgress
@el.addEventListener 'touchend', @eventEnd
# check for MouseEvent support and if enabled in options
if MouseEvent? and @options.mouseEventsEnabled
@el.addEventListener 'mousedown', @eventStart
@el.addEventListener 'mousemove', @eventProgress
@el.addEventListener 'mouseup', @eventEnd
@el.addEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.addEventListener 'mousedown', preventDefault
slide.addEventListener 'mousemove', preventDefault
slide.addEventListener 'mouseup', preventDefault
setCurrentSlide = (slide) ->
# set @current to slide's index in @slides
@current = indexOf @slides, slide
animateSlides = (currentSlide, targetSlide, {direction, initialProgress, durationMod}, callback) ->
# return if an animation is in progress
return if @currentAnimation?
# call onWillChange
unless @currentEvent? and @currentEvent.cancelOnWillChange
@options.onWillChange?.call @, currentSlide, targetSlide, (@current - direction / Math.abs(direction)) %% @slides.length
# progress and durationMod are only passed from a touch event
progress = initialProgress ? 0
durationMod ?= 1
# alter the duration of the animation after a touch event
duration = Math.max 1, @options.animationDuration * (1 - progress) * durationMod
# slides shouldn't be prepared if this is called from a touch event
# because this has already happened in touchStart
unless @currentEvent?
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, (if direction < 0 then 1 else -1), targetSlide
# cache the animation state
@currentAnimation = {start: now(), currentSlide, targetSlide, direction, duration, progress, callback}
# and finally start animating
requestAnimationFrame bind nextFrame, @
nextFrame = (timestamp) ->
# immediately call the next requestAnimationFrame
id = requestAnimationFrame bind nextFrame, @
anim = @currentAnimation
{start, progress, duration, direction, currentSlide, targetSlide, callback} = @currentAnimation
# calculate the actual progress (fraction of the animationDuration)
progress = progress + (now() - start) / duration * (1 - progress)
if progress >= 1
progress = 1
# the animation has ended
@currentAnimation = null
cancelAnimationFrame id
# call the after and callback functions
effectAfter = @options.effect.after ? Function.prototype
effectAfter.call @, 0, currentSlide
effectAfter.call @, 1, targetSlide
# set the new currentSlide
setCurrentSlide.call @, targetSlide
if typeof callback == 'function'
callback.call @, currentSlide, targetSlide, @current
@options.onDidChange?.call @, currentSlide, targetSlide, @current
setCurrentSlide.call @, targetSlide
# call the progress functions
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress * direction, currentSlide
effectProgress.call @, 1, progress * direction, targetSlide
eventStart = (event) ->
# do nothing if an animation or touch event is currently in progress
return if @currentAnimation? or @currentEvent?
# get the relevant slides
currentSlide = @getCurrentSlide()
prevSlide = @getPrevSlide()
nextSlide = @getNextSlide()
# prepare the slides to be animated
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, -1, prevSlide
effectBefore.call @, 1, nextSlide
# cache the touch event state
{timeStamp} = event
{pageX: startX, pageY: startY} = event.touches?[0] ? event
@currentEvent = {currentSlide, prevSlide, nextSlide, timeStamp, startX, startY}
eventProgress = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{pageX, pageY} = event.touches?[0] ? event
# calculate the progress based on the distance touched
# progress = switch @options.animationDirection
# when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
# when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
progress =
x: (pageX - @currentEvent.startX) / @el.clientWidth
y: (pageY - @currentEvent.startY) / @el.clientHeight
progress = switch @options.animationDirection
when 'x'
if Math.abs(progress.x) > Math.abs(progress.y) then progress.x
when 'y'
if Math.abs(progress.y) > Math.abs(progress.x) then progress.y
@currentEvent.shouldCancel = !progress
return unless progress?
# get the target slide
targetSlide = if progress < 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
if targetSlide isnt @currentEvent.targetSlide
@currentEvent.cancelOnWillChange = false
@currentEvent.targetSlide = targetSlide
# trigger onWillChange event
unless @currentEvent.cancelOnWillChange and progress isnt 0
@currentEvent.cancelOnWillChange = true
nextIndex = (@current - progress / Math.abs progress) %% @slides.length
@options.onWillChange?.call @, @currentEvent.currentSlide, targetSlide, nextIndex
@currentEvent.targetSlide = targetSlide
# animate the slide
requestAnimationFrame =>
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress, @currentEvent.currentSlide
effectProgress.call @, 1, progress, targetSlide
eventEnd = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{timeStamp} = event
{pageX, pageY} = event.changedTouches?[0] ? event
# calculate the final progress that has been made
progress = switch @options.animationDirection
when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
if @currentEvent.shouldCancel
currentSlide = if progress > 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
direction = progress / Math.abs progress
initialProgress = 1 - Math.abs progress
animateSlides.call @, currentSlide, @currentEvent.currentSlide, {direction, initialProgress}
@currentEvent = null
return
if progress is 0
@currentEvent = null
return
# calculate the time passed
timePassed = timeStamp - @currentEvent.timeStamp
progressAbs = Math.abs progress
# check progress and timePassed against the conditions
for condition in @options.effect.conditions
if progressAbs > condition.progress and timePassed < (condition.time ? Infinity)
# one condition passed so set durationMod from that condition
durationMod = condition.durationModifier ? 1
break
# at this point, durationMod is only set if we matched a condition
# so slide to the next slide
if durationMod?
# we matched a condition, so slide away the currentSlide and slide in
# the targetSlide. if we slided to the left, the nextSlide will be the
# targetSlide, else the prevSlide will be.
currentSlide = @currentEvent.currentSlide
direction = progress / progressAbs
if direction is 1
targetSlide = @currentEvent.prevSlide
else
targetSlide = @currentEvent.nextSlide
initialProgress = progressAbs
else
# we didn't match a condition, so slide the currentSlide back into
# position and slide targetSlide (nextSlide or prevSlide, depending on
# slide direction) away
targetSlide = @currentEvent.currentSlide
direction = -progress / progressAbs
if direction is 1
currentSlide = @currentEvent.nextSlide
else
currentSlide = @currentEvent.prevSlide
initialProgress = 1 - progressAbs
# call the animateSlides function with the parameters
animateSlides.call @, currentSlide, targetSlide, {direction, initialProgress, durationMod}, =>
@currentEvent = null
preventDefault = (event) ->
event.preventDefault()
# end private API
# public API
# get*Slide all return an HTMLElement
# get the slide at index i
# getSlide(-1) === getSlide(slides.length - 1)
# and getSlide(slides.length) === getSlide(0)
getSlide: (i) ->
@slides[i %% @slides.length]
# get the currently visible slide
getCurrentSlide: -> @slides[@current]
getCurrentIndex: -> @current
# get the slide after the currently visible one
getNextSlide: -> @getSlide @current + 1
# get the slide before the currently visible one
getPrevSlide: -> @getSlide @current - 1
# get the first slide
getFirstSlide: -> @slides[0]
# get the last slide
getLastSlide: -> @slides[@slides.length - 1]
# goTo* initiates an animation
# go to the slide at index i
goTo: (i, cb) ->
return if i is @current
currentSlide = @getCurrentSlide()
targetSlide = @getSlide i
# slide to left if i < @current, else slide to right
direction = if i < @current then 1 else -1
animateSlides.call @, currentSlide, targetSlide, {direction}, cb
# go to the next slide
goToNext: (cb) -> @goTo @current + 1, cb
# go to the previous slide
goToPrev: (cb) -> @goTo @current - 1, cb
# go to first slide
goToFirst: (cb) -> @goTo 0, cb
# go to last slide
goToLast: (cb) -> @goTo @slides.length - 1, cb
# destroy this instance
destroy: ->
@el.removeEventListener 'touchstart', @eventStart
@el.removeEventListener 'touchmove', @eventProgress
@el.removeEventListener 'touchend', @eventEnd
@el.removeEventListener 'mousedown', @eventStart
@el.removeEventListener 'mousemove', @eventProgress
@el.removeEventListener 'mouseup', @eventEnd
@el.removeEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.removeEventListener 'mousedown', preventDefault
slide.removeEventListener 'mousemove', preventDefault
slide.removeEventListener 'mouseup', preventDefault
{@el, @slides, @eventStart, @eventProgress, @eventEnd, @options} = {}
# class methods
@registerAsJQueryPlugin: (jQuery, methodName = 'Slideshow') ->
jQuery.fn[methodName] = (options) -> (new Slideshow container, options for container in @)
@registerEffect: (name, effect) ->
effect.conditions ?= effects.default.conditions.concat()
effects[name] ?= effect
# amd, commonjs and browser environment support
do (root = this) ->
# amd
if typeof define is 'function' and define.amd
define [], -> Slideshow
# commonjs
else if typeof exports isnt 'undefined'
module.exports = Slideshow
# browser
else
root.Slideshow = Slideshow
| true | 'use strict'
# requestAnimationFrame polyfill
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by PI:NAME:<NAME>END_PI. fixes from PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI
# MIT license
do (root = window ? this) ->
lastTime = 0
vendors = ['ms', 'moz', 'webkit', 'o']
i = 0
while i < vendors.length and not root.requestAnimationFrame
vendor = vendors[i++]
root.requestAnimationFrame = root["#{vendor}RequestAnimationFrame"]
root.cancelAnimationFrame = root["#{vendor}CancelAnimationFrame"] ? root["#{vendor}CancelRequestAnimationFrame"]
unless root.requestAnimationFrame?
root.requestAnimationFrame = (callback) ->
currTime = new Date().getTime()
timeToCall = Math.max 0, 16 - (currTime - lastTime)
id = root.setTimeout (-> callback currTime + timeToCall), timeToCall
lastTime = currTime + timeToCall
id
unless root.cancelAnimationFrame?
root.cancelAnimationFrame = (id) ->
clearTimeout id
# end requestAnimationFrame polyfill
# indexOf(array, match) is equivalent to array.indexOf(match)
indexOf = (array, match) ->
return unless array?
if Array::indexOf?
return Array::indexOf.call Array::slice.call(array), match
for item, i in array when item is match
return i
-1
# extend target with properties from object in objects
extend = (target, objects...) ->
return unless typeof target is 'object'
for object in objects
for own prop of object
target[prop] = object[prop]
target
# shallow clone object
clone = (object) -> extend {}, object
# bind(fn, context) binds context to fn
bind = (fn, context) -> -> fn.apply context, [].slice.call arguments
now = Date.now ? -> new Date().getTime()
class Slideshow
constructor: (element, options = {}) ->
# test if element is a valid html element or maybe
# a jQuery object or Backbone View
unless element.nodeType is 1
if element[0]? then element = element[0] # jQuery
if element.el? then element = element.el # Backbone
if element.nodeType isnt 1
throw new Error 'No valid element provided'
@configure options
@el = element
# and go!
init.call @
configure: (options) ->
@options = extend {}, defaults, options
if typeof @options.effect is 'string' and effects[@options.effect]?
@options.effect = clone effects[@options.effect]
@options.effect.conditions ?= effects.default.conditions.concat()
# private API
defaults =
touchEventsEnabled: true
mouseEventsEnabled: true
swipeThreshold: 0
animationDuration: 400
animationDirection: 'x'
effect: 'default'
effects =
default: do ->
transformCSSProperty = do ->
style = document.createElement('div').style
return 'transform' if style['transform']?
for vendor in ['moz', 'webkit', 'khtml', 'o', 'ms']
prefixed = "#{vendor}Transform"
return prefixed if style[prefixed]?
false
conditions: [
progress: .1
time: 250
durationModifier: .5
,
progress: .3
time: 500
,
progress: .5
]
before: (slideState, slideElement) ->
slideElement.style.display = 'block'
###
slideState is either -1, 0 or 1
if slideState === 0 then this is the current slide and we want to show it, so set translateX(0)
if slideState === -1 then this is the previous slide (to the left) so translateX(-100%)
if slideState === 1 then this is the next slide (to the right) so translateX(100%)
###
X = -slideState * 100
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
progress: (slideState, progress, slideElement) ->
###
slideState = either 0 or 1
0 <= Math.abs(progress) <= 1, but progress can also be negative.
progress < 0 indicates movement to the left
progress > 0 indicates movement to the right
if slideState === 0 then this is the current slide and we want it to move away as progress increases:
X1 = 100 * p where p = progress
if slideState === 1 then this is the target slide and we want it to move in from the left/right as progress increases:
X2 = 100 * (-p / |p|) * (|p| - 1) where |p| = Math.abs(progress)
X = (1 - S) * X1 + S * X2 where S = slideState
X is the translateX value that should be set on this slide
X = (1 - S) * 100 * p + S * 100 * (-p / |p|) * (1 - |p|)
X = 100 * p * ( (1 - S) - S * (1 / |p|) * (1 - |p|) )
X = 100 * p * ( 1 - S - S * ( (1 / |p|) - 1 ) )
X = 100 * p * ( 1 - S + S * (1 - (1 / |p|) ) )
X = 100 * p * ( 1 - S + S - (S / |p|) )
X = 100 * p * ( 1 - (S / |p|) )
###
X = 100 * progress * (1 - slideState / Math.abs progress)
if transformCSSProperty
slideElement.style[transformCSSProperty] = "translateX(#{X}%)"
else
slideElement.style.left = "#{X}%"
after: (slideState, slideElement) ->
###
slideState is either 0 or 1
if slideState === 0 then this is the previously visible slide and it must be hidden
if slideState === 1 then this is the currently visible slide and it must be visible
###
slideElement.style.display = if slideState > 0 then 'block' else 'none'
init = ->
initSlides.call @
initEvents.call @
initSlides = ->
# we don't want the slides to be visible outside their container
effectBefore = @options.effect.before ? Function.prototype
effectAfter = @options.effect.after ? Function.prototype
# el.children may behave weird in IE8
@slides = @el.children ? @el.childNodes
@current = 0
for slide, i in @slides when i isnt @current
# call the before and after functions once on all slides, so all slides
# are positioned properly
if i is @current
effectBefore.call @, 0, @slides[@current]
effectAfter.call @, 1, @slides[@current]
else
effectBefore.call @, 1, slide
effectAfter.call @, 0, slide
initEvents = ->
@eventStart = bind eventStart, @
@eventProgress = bind eventProgress, @
@eventEnd = bind eventEnd, @
# check for TouchEvent support and if enabled in options
if TouchEvent? and @options.touchEventsEnabled
@el.addEventListener 'touchstart', @eventStart
@el.addEventListener 'touchmove', @eventProgress
@el.addEventListener 'touchend', @eventEnd
# check for MouseEvent support and if enabled in options
if MouseEvent? and @options.mouseEventsEnabled
@el.addEventListener 'mousedown', @eventStart
@el.addEventListener 'mousemove', @eventProgress
@el.addEventListener 'mouseup', @eventEnd
@el.addEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.addEventListener 'mousedown', preventDefault
slide.addEventListener 'mousemove', preventDefault
slide.addEventListener 'mouseup', preventDefault
setCurrentSlide = (slide) ->
# set @current to slide's index in @slides
@current = indexOf @slides, slide
animateSlides = (currentSlide, targetSlide, {direction, initialProgress, durationMod}, callback) ->
# return if an animation is in progress
return if @currentAnimation?
# call onWillChange
unless @currentEvent? and @currentEvent.cancelOnWillChange
@options.onWillChange?.call @, currentSlide, targetSlide, (@current - direction / Math.abs(direction)) %% @slides.length
# progress and durationMod are only passed from a touch event
progress = initialProgress ? 0
durationMod ?= 1
# alter the duration of the animation after a touch event
duration = Math.max 1, @options.animationDuration * (1 - progress) * durationMod
# slides shouldn't be prepared if this is called from a touch event
# because this has already happened in touchStart
unless @currentEvent?
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, (if direction < 0 then 1 else -1), targetSlide
# cache the animation state
@currentAnimation = {start: now(), currentSlide, targetSlide, direction, duration, progress, callback}
# and finally start animating
requestAnimationFrame bind nextFrame, @
nextFrame = (timestamp) ->
# immediately call the next requestAnimationFrame
id = requestAnimationFrame bind nextFrame, @
anim = @currentAnimation
{start, progress, duration, direction, currentSlide, targetSlide, callback} = @currentAnimation
# calculate the actual progress (fraction of the animationDuration)
progress = progress + (now() - start) / duration * (1 - progress)
if progress >= 1
progress = 1
# the animation has ended
@currentAnimation = null
cancelAnimationFrame id
# call the after and callback functions
effectAfter = @options.effect.after ? Function.prototype
effectAfter.call @, 0, currentSlide
effectAfter.call @, 1, targetSlide
# set the new currentSlide
setCurrentSlide.call @, targetSlide
if typeof callback == 'function'
callback.call @, currentSlide, targetSlide, @current
@options.onDidChange?.call @, currentSlide, targetSlide, @current
setCurrentSlide.call @, targetSlide
# call the progress functions
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress * direction, currentSlide
effectProgress.call @, 1, progress * direction, targetSlide
eventStart = (event) ->
# do nothing if an animation or touch event is currently in progress
return if @currentAnimation? or @currentEvent?
# get the relevant slides
currentSlide = @getCurrentSlide()
prevSlide = @getPrevSlide()
nextSlide = @getNextSlide()
# prepare the slides to be animated
effectBefore = @options.effect.before ? Function.prototype
effectBefore.call @, 0, currentSlide
effectBefore.call @, -1, prevSlide
effectBefore.call @, 1, nextSlide
# cache the touch event state
{timeStamp} = event
{pageX: startX, pageY: startY} = event.touches?[0] ? event
@currentEvent = {currentSlide, prevSlide, nextSlide, timeStamp, startX, startY}
eventProgress = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{pageX, pageY} = event.touches?[0] ? event
# calculate the progress based on the distance touched
# progress = switch @options.animationDirection
# when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
# when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
progress =
x: (pageX - @currentEvent.startX) / @el.clientWidth
y: (pageY - @currentEvent.startY) / @el.clientHeight
progress = switch @options.animationDirection
when 'x'
if Math.abs(progress.x) > Math.abs(progress.y) then progress.x
when 'y'
if Math.abs(progress.y) > Math.abs(progress.x) then progress.y
@currentEvent.shouldCancel = !progress
return unless progress?
# get the target slide
targetSlide = if progress < 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
if targetSlide isnt @currentEvent.targetSlide
@currentEvent.cancelOnWillChange = false
@currentEvent.targetSlide = targetSlide
# trigger onWillChange event
unless @currentEvent.cancelOnWillChange and progress isnt 0
@currentEvent.cancelOnWillChange = true
nextIndex = (@current - progress / Math.abs progress) %% @slides.length
@options.onWillChange?.call @, @currentEvent.currentSlide, targetSlide, nextIndex
@currentEvent.targetSlide = targetSlide
# animate the slide
requestAnimationFrame =>
effectProgress = @options.effect.progress ? Function.prototype
effectProgress.call @, 0, progress, @currentEvent.currentSlide
effectProgress.call @, 1, progress, targetSlide
eventEnd = (event) ->
# do nothing if an animation is in progress, or there's no touch event in progress yet (which souldn't happen)
return if @currentAnimation or not @currentEvent?
{timeStamp} = event
{pageX, pageY} = event.changedTouches?[0] ? event
# calculate the final progress that has been made
progress = switch @options.animationDirection
when 'x' then (pageX - @currentEvent.startX) / @el.clientWidth
when 'y' then (pageY - @currentEvent.startY) / @el.clientHeight
if @currentEvent.shouldCancel
currentSlide = if progress > 0 then @currentEvent.nextSlide else @currentEvent.prevSlide
direction = progress / Math.abs progress
initialProgress = 1 - Math.abs progress
animateSlides.call @, currentSlide, @currentEvent.currentSlide, {direction, initialProgress}
@currentEvent = null
return
if progress is 0
@currentEvent = null
return
# calculate the time passed
timePassed = timeStamp - @currentEvent.timeStamp
progressAbs = Math.abs progress
# check progress and timePassed against the conditions
for condition in @options.effect.conditions
if progressAbs > condition.progress and timePassed < (condition.time ? Infinity)
# one condition passed so set durationMod from that condition
durationMod = condition.durationModifier ? 1
break
# at this point, durationMod is only set if we matched a condition
# so slide to the next slide
if durationMod?
# we matched a condition, so slide away the currentSlide and slide in
# the targetSlide. if we slided to the left, the nextSlide will be the
# targetSlide, else the prevSlide will be.
currentSlide = @currentEvent.currentSlide
direction = progress / progressAbs
if direction is 1
targetSlide = @currentEvent.prevSlide
else
targetSlide = @currentEvent.nextSlide
initialProgress = progressAbs
else
# we didn't match a condition, so slide the currentSlide back into
# position and slide targetSlide (nextSlide or prevSlide, depending on
# slide direction) away
targetSlide = @currentEvent.currentSlide
direction = -progress / progressAbs
if direction is 1
currentSlide = @currentEvent.nextSlide
else
currentSlide = @currentEvent.prevSlide
initialProgress = 1 - progressAbs
# call the animateSlides function with the parameters
animateSlides.call @, currentSlide, targetSlide, {direction, initialProgress, durationMod}, =>
@currentEvent = null
preventDefault = (event) ->
event.preventDefault()
# end private API
# public API
# get*Slide all return an HTMLElement
# get the slide at index i
# getSlide(-1) === getSlide(slides.length - 1)
# and getSlide(slides.length) === getSlide(0)
getSlide: (i) ->
@slides[i %% @slides.length]
# get the currently visible slide
getCurrentSlide: -> @slides[@current]
getCurrentIndex: -> @current
# get the slide after the currently visible one
getNextSlide: -> @getSlide @current + 1
# get the slide before the currently visible one
getPrevSlide: -> @getSlide @current - 1
# get the first slide
getFirstSlide: -> @slides[0]
# get the last slide
getLastSlide: -> @slides[@slides.length - 1]
# goTo* initiates an animation
# go to the slide at index i
goTo: (i, cb) ->
return if i is @current
currentSlide = @getCurrentSlide()
targetSlide = @getSlide i
# slide to left if i < @current, else slide to right
direction = if i < @current then 1 else -1
animateSlides.call @, currentSlide, targetSlide, {direction}, cb
# go to the next slide
goToNext: (cb) -> @goTo @current + 1, cb
# go to the previous slide
goToPrev: (cb) -> @goTo @current - 1, cb
# go to first slide
goToFirst: (cb) -> @goTo 0, cb
# go to last slide
goToLast: (cb) -> @goTo @slides.length - 1, cb
# destroy this instance
destroy: ->
@el.removeEventListener 'touchstart', @eventStart
@el.removeEventListener 'touchmove', @eventProgress
@el.removeEventListener 'touchend', @eventEnd
@el.removeEventListener 'mousedown', @eventStart
@el.removeEventListener 'mousemove', @eventProgress
@el.removeEventListener 'mouseup', @eventEnd
@el.removeEventListener 'mouseleave', @eventEnd
for slide in @slides
slide.removeEventListener 'mousedown', preventDefault
slide.removeEventListener 'mousemove', preventDefault
slide.removeEventListener 'mouseup', preventDefault
{@el, @slides, @eventStart, @eventProgress, @eventEnd, @options} = {}
# class methods
@registerAsJQueryPlugin: (jQuery, methodName = 'Slideshow') ->
jQuery.fn[methodName] = (options) -> (new Slideshow container, options for container in @)
@registerEffect: (name, effect) ->
effect.conditions ?= effects.default.conditions.concat()
effects[name] ?= effect
# amd, commonjs and browser environment support
do (root = this) ->
# amd
if typeof define is 'function' and define.amd
define [], -> Slideshow
# commonjs
else if typeof exports isnt 'undefined'
module.exports = Slideshow
# browser
else
root.Slideshow = Slideshow
|
[
{
"context": "- processing rules - ordering\n<https://github.com/fhirbase/fhirbase-plv8/issues/90>\n###\ndescribe 'Issues', -",
"end": 211,
"score": 0.9995360374450684,
"start": 203,
"tag": "USERNAME",
"value": "fhirbase"
},
{
"context": " with ordering operations\n <https://githu... | test/integration/issues/90_transaction_processing_rules_ordering_spec.coffee | micabe/fhirbase | 0 | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
helpers = require('../../helpers')
match = helpers.match
###
Issue #90
Transaction - processing rules - ordering
<https://github.com/fhirbase/fhirbase-plv8/issues/90>
###
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
describe '#90 Transaction processing ordering', ->
###
Got some issues with ordering operations
<https://github.com/fhirbase/fhirbase-plv8/issues/90#issuecomment-225587083>
Transaction returns OperationOutcome issue with code 'not-found'.
This is expected behavior because transaction should processed
in order (DELETE, POST, PUT, GET)
<http://hl7-fhir.github.io/http.html#2.1.0.16.2>
###
it 'should return OperationOutcome even read goes before delete', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["Tim"]}],
"id": "patient-to-delete-id"
}
}
');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["NameToBeUpdated"]}],
"id": "patient-to-update-id"
}
}
');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"request":{"method":"GET","url":"/Patient/patient-to-delete-id"}
},
{
"request":{"method":"GET","url":"/Patient?name=NameCreated"}
},
{
"request":{"method":"PUT","url":"/Patient/patient-to-update-id"},
"resource":{"resourceType":"Patient","active":true,"name":[{"family":["NameUpdated"]}]}
},
{
"request":{"method":"POST","url":"/Patient"},
"resource":{"resourceType":"Patient","name":[{"family":["NameCreated"]}]}
},
{
"request":{"method":"DELETE","url":"/Patient/patient-to-delete-id"}
}
]
}
');
''')[0].fhir_transaction
)
match(
transaction,
resourceType: 'OperationOutcome'
issue: [
{
severity: 'error',
code: 'not-found',
diagnostics: 'Resource Id "patient-to-delete-id" with versionId "undefined" has been deleted'
}
]
)
| 89919 | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
helpers = require('../../helpers')
match = helpers.match
###
Issue #90
Transaction - processing rules - ordering
<https://github.com/fhirbase/fhirbase-plv8/issues/90>
###
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
describe '#90 Transaction processing ordering', ->
###
Got some issues with ordering operations
<https://github.com/fhirbase/fhirbase-plv8/issues/90#issuecomment-225587083>
Transaction returns OperationOutcome issue with code 'not-found'.
This is expected behavior because transaction should processed
in order (DELETE, POST, PUT, GET)
<http://hl7-fhir.github.io/http.html#2.1.0.16.2>
###
it 'should return OperationOutcome even read goes before delete', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["<NAME>"]}],
"id": "patient-to-delete-id"
}
}
');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["NameToBeUpdated"]}],
"id": "patient-to-update-id"
}
}
');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"request":{"method":"GET","url":"/Patient/patient-to-delete-id"}
},
{
"request":{"method":"GET","url":"/Patient?name=NameCreated"}
},
{
"request":{"method":"PUT","url":"/Patient/patient-to-update-id"},
"resource":{"resourceType":"Patient","active":true,"name":[{"family":["NameUpdated"]}]}
},
{
"request":{"method":"POST","url":"/Patient"},
"resource":{"resourceType":"Patient","name":[{"family":["NameCreated"]}]}
},
{
"request":{"method":"DELETE","url":"/Patient/patient-to-delete-id"}
}
]
}
');
''')[0].fhir_transaction
)
match(
transaction,
resourceType: 'OperationOutcome'
issue: [
{
severity: 'error',
code: 'not-found',
diagnostics: 'Resource Id "patient-to-delete-id" with versionId "undefined" has been deleted'
}
]
)
| true | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
helpers = require('../../helpers')
match = helpers.match
###
Issue #90
Transaction - processing rules - ordering
<https://github.com/fhirbase/fhirbase-plv8/issues/90>
###
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
describe '#90 Transaction processing ordering', ->
###
Got some issues with ordering operations
<https://github.com/fhirbase/fhirbase-plv8/issues/90#issuecomment-225587083>
Transaction returns OperationOutcome issue with code 'not-found'.
This is expected behavior because transaction should processed
in order (DELETE, POST, PUT, GET)
<http://hl7-fhir.github.io/http.html#2.1.0.16.2>
###
it 'should return OperationOutcome even read goes before delete', ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["PI:NAME:<NAME>END_PI"]}],
"id": "patient-to-delete-id"
}
}
');
''')
plv8.execute('''
SELECT fhir_create_resource('
{
"allowId": true,
"resource": {
"resourceType": "Patient",
"name": [{"given": ["NameToBeUpdated"]}],
"id": "patient-to-update-id"
}
}
');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"request":{"method":"GET","url":"/Patient/patient-to-delete-id"}
},
{
"request":{"method":"GET","url":"/Patient?name=NameCreated"}
},
{
"request":{"method":"PUT","url":"/Patient/patient-to-update-id"},
"resource":{"resourceType":"Patient","active":true,"name":[{"family":["NameUpdated"]}]}
},
{
"request":{"method":"POST","url":"/Patient"},
"resource":{"resourceType":"Patient","name":[{"family":["NameCreated"]}]}
},
{
"request":{"method":"DELETE","url":"/Patient/patient-to-delete-id"}
}
]
}
');
''')[0].fhir_transaction
)
match(
transaction,
resourceType: 'OperationOutcome'
issue: [
{
severity: 'error',
code: 'not-found',
diagnostics: 'Resource Id "patient-to-delete-id" with versionId "undefined" has been deleted'
}
]
)
|
[
{
"context": "ForKey(node, newContextKey)\n if keypath[0] == '$root'\n context = rootContext\n keypat",
"end": 3201,
"score": 0.5965851545333862,
"start": 3199,
"tag": "KEY",
"value": "'$"
}
] | lib/assets/javascripts/twine.coffee | fakeNetflix/Shopify-repo-twine | 0 | ((root, factory) ->
if typeof root.define == 'function' && root.define.amd
root.define(['jquery'], factory)
else if typeof module == 'object' && module.exports
jQuery = if typeof window != 'undefined'
then require('jquery')
else require('jquery')(root)
module.exports = factory(jQuery)
else
root.Twine = factory(root.jQuery)
)(this, (jQuery) ->
Twine = {}
Twine.shouldDiscardEvent = {}
# Map of node binding ids to objects that describe a node's bindings.
elements = {}
# Registered components to look up
registry = {}
# The number of nodes bound since the last call to Twine.reset().
# Used to determine the next binding id.
nodeCount = 0
# Storage for all bindable data, provided by the caller of Twine.reset().
rootContext = null
keypathRegex = /^[a-z]\w*(\.[a-z]\w*|\[\d+\])*$/i # Tests if a string is a pure keypath.
refreshQueued = false
refreshCallbacks = []
rootNode = null
currentBindingCallbacks = null
Twine.getAttribute = (node, attr) ->
node.getAttribute("data-#{attr}") || node.getAttribute(attr)
# Cleans up all existing bindings and sets the root node and context.
Twine.reset = (newContext, node = document.documentElement) ->
for key of elements
if bindings = elements[key]?.bindings
obj.teardown() for obj in bindings when obj.teardown
elements = {}
rootContext = newContext
rootNode = node
rootNode.bindingId = nodeCount = 1
this
Twine.bind = (node = rootNode, context = Twine.context(node)) ->
bind(context, node, getIndexesForElement(node), true)
Twine.afterBound = (callback) ->
if currentBindingCallbacks
currentBindingCallbacks.push(callback)
else
callback()
bind = (context, node, indexes, forceSaveContext) ->
currentBindingCallbacks = []
element = null
if node.bindingId
Twine.unbind(node)
if defineArrayAttr = Twine.getAttribute(node, 'define-array')
newIndexes = defineArray(node, context, defineArrayAttr)
indexes ?= {}
for key, value of indexes when !newIndexes.hasOwnProperty(key)
newIndexes[key] = value
indexes = newIndexes
# register the element early because subsequent bindings on the same node might need to make use of the index
element = findOrCreateElementForNode(node)
element.indexes = indexes
bindingConstructors = null
for attribute in node.attributes
type = attribute.name
type = type.slice(5) if isDataAttribute(type)
constructor = Twine.bindingTypes[type]
continue unless constructor
bindingConstructors ?= []
definition = attribute.value
bindingConstructors.push([type, constructor, definition])
if bindingConstructors
element ?= findOrCreateElementForNode(node)
element.bindings ?= []
element.indexes ?= indexes
for [_, constructor, definition] in bindingConstructors.sort(bindingOrder)
binding = constructor(node, context, definition, element)
element.bindings.push(binding) if binding
if newContextKey = Twine.getAttribute(node, 'context')
keypath = keypathForKey(node, newContextKey)
if keypath[0] == '$root'
context = rootContext
keypath = keypath.slice(1)
context = getValue(context, keypath) || setValue(context, keypath, {})
if element || newContextKey || forceSaveContext
element ?= findOrCreateElementForNode(node)
element.childContext = context
element.indexes ?= indexes if indexes?
callbacks = currentBindingCallbacks
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
bind(context, childNode, if newContextKey? then null else indexes) for childNode in childrenForNode(node)
Twine.count = nodeCount
for callback in callbacks || []
callback()
currentBindingCallbacks = null
Twine
# IE and Safari don't support node.children for DocumentFragment and SVGElement nodes.
# If the element supports children we continue to traverse the children, otherwise
# we stop traversing that subtree.
# https://developer.mozilla.org/en-US/docs/Web/API/ParentNode.children
# As a result, Twine are unsupported within DocumentFragment and SVGElement nodes.
#
# We also prevent nodes from being iterated over more than once by cacheing the
# lookup for children nodes, which prevents nodes that are dynamically inserted
# or removed as siblings from causing double/ missed binds and unbinds.
childrenForNode = (node) ->
if node.children then Array::slice.call(node.children, 0) else []
findOrCreateElementForNode = (node) ->
node.bindingId ?= ++nodeCount
elements[node.bindingId] ?= {}
# Queues a refresh of the DOM, batching up calls for the current synchronous block.
# The callback will be called once when the refresh has completed.
Twine.refresh = (callback) ->
refreshCallbacks.push(callback) if callback
return if refreshQueued
refreshQueued = true
setTimeout(Twine.refreshImmediately, 0)
refreshElement = (element) ->
(obj.refresh() if obj.refresh?) for obj in element.bindings if element.bindings
return
Twine.refreshImmediately = ->
refreshQueued = false
refreshElement(element) for key, element of elements
callbacks = refreshCallbacks
refreshCallbacks = []
cb() for cb in callbacks
return
Twine.register = (name, component) ->
if registry[name]
throw new Error("Twine error: '#{name}' is already registered with Twine")
else
registry[name] = component
# Force the binding system to recognize programmatic changes to a node's value.
Twine.change = (node, bubble = false) ->
event = document.createEvent("HTMLEvents")
event.initEvent('change', bubble, true) # for IE 9/10 compatibility.
node.dispatchEvent(event)
# Cleans up everything related to a node and its subtree.
Twine.unbind = (node) ->
if id = node.bindingId
if bindings = elements[id]?.bindings
obj.teardown() for obj in bindings when obj.teardown
delete elements[id]
delete node.bindingId
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
Twine.unbind(childNode) for childNode in childrenForNode(node)
this
# Returns the binding context for a node by looking up the tree.
Twine.context = (node) -> getContext(node, false)
Twine.childContext = (node) -> getContext(node, true)
getContext = (node, child) ->
while node
return rootContext if node == rootNode
node = node.parentNode if !child
if !node
console.warn "Unable to find context; please check that the node is attached to the DOM that Twine has bound, or that bindings have been initiated on this node's DOM"
return null
if (id = node.bindingId) && (context = elements[id]?.childContext)
return context
node = node.parentNode if child
getIndexesForElement = (node) ->
firstContext = null
while node
return elements[id]?.indexes if id = node.bindingId
node = node.parentNode
# Returns the fully qualified key for a node's context
Twine.contextKey = (node, lastContext) ->
keys = []
addKey = (context) ->
for key, val of context when lastContext == val
keys.unshift(key)
break
lastContext = context
while node && node != rootNode && node = node.parentNode
addKey(context) if (id = node.bindingId) && (context = elements[id]?.childContext)
addKey(rootContext) if node == rootNode
keys.join('.')
valuePropertyForNode = (node) ->
name = node.nodeName.toLowerCase()
if name in ['input', 'textarea', 'select']
if node.getAttribute('type') in ['checkbox', 'radio'] then 'checked' else 'value'
else
'textContent'
keypathForKey = (node, key) ->
keypath = []
for key, i in key.split('.')
if (start = key.indexOf('[')) != -1
if i == 0
keypath.push(keyWithArrayIndex(key.substr(0, start), node)...)
else
keypath.push(key.substr(0, start))
key = key.substr(start)
while (end = key.indexOf(']')) != -1
keypath.push(parseInt(key.substr(1, end), 10))
key = key.substr(end + 1)
else
if i == 0
keypath.push(keyWithArrayIndex(key, node)...)
else
keypath.push(key)
keypath
keyWithArrayIndex = (key, node) ->
index = elements[node.bindingId]?.indexes?[key]
if index?
[key, index]
else
[key]
getValue = (object, keypath) ->
object = object[key] for key in keypath when object?
object
setValue = (object, keypath, value) ->
[keypath..., lastKey] = keypath
for key in keypath
object = object[key] ?= {}
object[lastKey] = value
stringifyNodeAttributes = (node) ->
[].map.call(node.attributes, (attr) -> "#{attr.name}=#{JSON.stringify(attr.value)}").join(' ')
wrapFunctionString = (code, args, node) ->
if isKeypath(code) && keypath = keypathForKey(node, code)
if keypath[0] == '$root'
($context, $root) -> getValue($root, keypath)
else
($context, $root) -> getValue($context, keypath)
else
code = "return #{code}"
code = "with($arrayPointers) { #{code} }" if nodeArrayIndexes(node)
code = "with($registry) { #{code} }" if requiresRegistry(args)
try
new Function(args, "with($context) { #{code} }")
catch e
throw "Twine error: Unable to create function on #{node.nodeName} node with attributes #{stringifyNodeAttributes(node)}"
requiresRegistry = (args) -> /\$registry/.test(args)
nodeArrayIndexes = (node) ->
node.bindingId? && elements[node.bindingId]?.indexes
arrayPointersForNode = (node, context) ->
indexes = nodeArrayIndexes(node)
return {} unless indexes
result = {}
for key, index of indexes
result[key] = context[key][index]
result
isKeypath = (value) ->
value not in ['true', 'false', 'null', 'undefined'] && keypathRegex.test(value)
isDataAttribute = (value) ->
value[0] == 'd' &&
value[1] == 'a' &&
value[2] == 't' &&
value[3] == 'a' &&
value[4] == '-'
fireCustomChangeEvent = (node) ->
event = document.createEvent('CustomEvent')
event.initCustomEvent('bindings:change', true, false, {})
node.dispatchEvent(event)
bindingOrder = ([firstType], [secondType]) ->
ORDERED_BINDINGS = {
define: 1,
bind: 2,
eval: 3
}
return 1 unless ORDERED_BINDINGS[firstType]
return -1 unless ORDERED_BINDINGS[secondType]
ORDERED_BINDINGS[firstType] - ORDERED_BINDINGS[secondType]
Twine.bindingTypes =
bind: (node, context, definition) ->
valueProp = valuePropertyForNode(node)
value = node[valueProp]
lastValue = undefined
teardown = undefined
# Radio buttons only set the value to the node value if checked.
checkedValueType = node.getAttribute('type') == 'radio'
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
refresh = ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue # return if we can and avoid a DOM operation
lastValue = newValue
return if newValue == node[valueProp]
node[valueProp] = if checkedValueType then newValue == node.value else newValue
fireCustomChangeEvent(node)
return {refresh} unless isKeypath(definition)
refreshContext = ->
if checkedValueType
return unless node.checked
setValue(context, keypath, node.value)
else
setValue(context, keypath, node[valueProp])
keypath = keypathForKey(node, definition)
twoWayBinding = valueProp != 'textContent' && node.type != 'hidden'
if keypath[0] == '$root'
context = rootContext
keypath = keypath.slice(1)
if value? && (twoWayBinding || value != '') && !(oldValue = getValue(context, keypath))?
refreshContext()
if twoWayBinding
changeHandler = ->
return if getValue(context, keypath) == this[valueProp]
refreshContext()
Twine.refreshImmediately()
jQuery(node).on 'input keyup change', changeHandler
teardown = ->
jQuery(node).off 'input keyup change', changeHandler
{refresh, teardown}
'bind-show': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = !fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue
jQuery(node).toggleClass('hide', lastValue = newValue)
'bind-class': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValues = {}
$node = jQuery(node)
return refresh: ->
newValues = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
additions = []
removals = []
for key, value of newValues
newValue = newValues[key] = !!newValues[key]
currValue = lastValues[key] ? $node.hasClass(key)
if currValue != newValue
if newValue
additions.push(key)
else
removals.push(key)
$node.removeClass(removals.join(' ')) if removals.length
$node.addClass(additions.join(' ')) if additions.length
lastValues = newValues
'bind-attribute': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = {}
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
for key, value of newValue when lastValue[key] != value
jQuery(node).attr(key, value || null)
lastValue = newValue
define: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
object = fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
context[key] = value for key, value of object
return
eval: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
return
defineArray = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root', node)
object = fn.call(node, context, rootContext)
indexes = {}
for key, value of object
context[key] ?= []
throw "Twine error: expected '#{key}' to be an array" unless context[key] instanceof Array
indexes[key] = context[key].length
context[key].push(value)
indexes
setupPropertyBinding = (attributeName, bindingName) ->
booleanProp = attributeName in ['checked', 'indeterminate', 'disabled', 'readOnly', 'draggable']
Twine.bindingTypes["bind-#{bindingName.toLowerCase()}"] = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
newValue = !!newValue if booleanProp
return if newValue == lastValue
node[attributeName] = lastValue = newValue
fireCustomChangeEvent(node) if attributeName == 'checked'
for attribute in ['placeholder', 'checked', 'indeterminate', 'disabled', 'href', 'title', 'readOnly', 'src', 'draggable']
setupPropertyBinding(attribute, attribute)
setupPropertyBinding('innerHTML', 'unsafe-html')
preventDefaultForEvent = (event) ->
(event.type == 'submit' || event.currentTarget.nodeName.toLowerCase() == 'a') &&
Twine.getAttribute(event.currentTarget, 'allow-default') in ['false', false, 0, undefined, null]
setupEventBinding = (eventName) ->
Twine.bindingTypes["bind-event-#{eventName}"] = (node, context, definition) ->
onEventHandler = (event, data) ->
discardEvent = Twine.shouldDiscardEvent[eventName]?(event)
if discardEvent || preventDefaultForEvent(event)
event.preventDefault()
return if discardEvent
wrapFunctionString(definition, '$context,$root,$arrayPointers,event,data', node).call(node, context, rootContext, arrayPointersForNode(node, context), event, data)
Twine.refreshImmediately()
jQuery(node).on eventName, onEventHandler
return teardown: ->
jQuery(node).off eventName, onEventHandler
for eventName in ['click', 'dblclick', 'mouseenter', 'mouseleave', 'mouseover', 'mouseout', 'mousedown', 'mouseup',
'submit', 'dragenter', 'dragleave', 'dragover', 'drop', 'drag', 'change', 'keypress', 'keydown', 'keyup', 'input',
'error', 'done', 'success', 'fail', 'blur', 'focus', 'load', 'paste']
setupEventBinding(eventName)
Twine
)
| 47536 | ((root, factory) ->
if typeof root.define == 'function' && root.define.amd
root.define(['jquery'], factory)
else if typeof module == 'object' && module.exports
jQuery = if typeof window != 'undefined'
then require('jquery')
else require('jquery')(root)
module.exports = factory(jQuery)
else
root.Twine = factory(root.jQuery)
)(this, (jQuery) ->
Twine = {}
Twine.shouldDiscardEvent = {}
# Map of node binding ids to objects that describe a node's bindings.
elements = {}
# Registered components to look up
registry = {}
# The number of nodes bound since the last call to Twine.reset().
# Used to determine the next binding id.
nodeCount = 0
# Storage for all bindable data, provided by the caller of Twine.reset().
rootContext = null
keypathRegex = /^[a-z]\w*(\.[a-z]\w*|\[\d+\])*$/i # Tests if a string is a pure keypath.
refreshQueued = false
refreshCallbacks = []
rootNode = null
currentBindingCallbacks = null
Twine.getAttribute = (node, attr) ->
node.getAttribute("data-#{attr}") || node.getAttribute(attr)
# Cleans up all existing bindings and sets the root node and context.
Twine.reset = (newContext, node = document.documentElement) ->
for key of elements
if bindings = elements[key]?.bindings
obj.teardown() for obj in bindings when obj.teardown
elements = {}
rootContext = newContext
rootNode = node
rootNode.bindingId = nodeCount = 1
this
Twine.bind = (node = rootNode, context = Twine.context(node)) ->
bind(context, node, getIndexesForElement(node), true)
Twine.afterBound = (callback) ->
if currentBindingCallbacks
currentBindingCallbacks.push(callback)
else
callback()
bind = (context, node, indexes, forceSaveContext) ->
currentBindingCallbacks = []
element = null
if node.bindingId
Twine.unbind(node)
if defineArrayAttr = Twine.getAttribute(node, 'define-array')
newIndexes = defineArray(node, context, defineArrayAttr)
indexes ?= {}
for key, value of indexes when !newIndexes.hasOwnProperty(key)
newIndexes[key] = value
indexes = newIndexes
# register the element early because subsequent bindings on the same node might need to make use of the index
element = findOrCreateElementForNode(node)
element.indexes = indexes
bindingConstructors = null
for attribute in node.attributes
type = attribute.name
type = type.slice(5) if isDataAttribute(type)
constructor = Twine.bindingTypes[type]
continue unless constructor
bindingConstructors ?= []
definition = attribute.value
bindingConstructors.push([type, constructor, definition])
if bindingConstructors
element ?= findOrCreateElementForNode(node)
element.bindings ?= []
element.indexes ?= indexes
for [_, constructor, definition] in bindingConstructors.sort(bindingOrder)
binding = constructor(node, context, definition, element)
element.bindings.push(binding) if binding
if newContextKey = Twine.getAttribute(node, 'context')
keypath = keypathForKey(node, newContextKey)
if keypath[0] == <KEY>root'
context = rootContext
keypath = keypath.slice(1)
context = getValue(context, keypath) || setValue(context, keypath, {})
if element || newContextKey || forceSaveContext
element ?= findOrCreateElementForNode(node)
element.childContext = context
element.indexes ?= indexes if indexes?
callbacks = currentBindingCallbacks
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
bind(context, childNode, if newContextKey? then null else indexes) for childNode in childrenForNode(node)
Twine.count = nodeCount
for callback in callbacks || []
callback()
currentBindingCallbacks = null
Twine
# IE and Safari don't support node.children for DocumentFragment and SVGElement nodes.
# If the element supports children we continue to traverse the children, otherwise
# we stop traversing that subtree.
# https://developer.mozilla.org/en-US/docs/Web/API/ParentNode.children
# As a result, Twine are unsupported within DocumentFragment and SVGElement nodes.
#
# We also prevent nodes from being iterated over more than once by cacheing the
# lookup for children nodes, which prevents nodes that are dynamically inserted
# or removed as siblings from causing double/ missed binds and unbinds.
childrenForNode = (node) ->
if node.children then Array::slice.call(node.children, 0) else []
findOrCreateElementForNode = (node) ->
node.bindingId ?= ++nodeCount
elements[node.bindingId] ?= {}
# Queues a refresh of the DOM, batching up calls for the current synchronous block.
# The callback will be called once when the refresh has completed.
Twine.refresh = (callback) ->
refreshCallbacks.push(callback) if callback
return if refreshQueued
refreshQueued = true
setTimeout(Twine.refreshImmediately, 0)
refreshElement = (element) ->
(obj.refresh() if obj.refresh?) for obj in element.bindings if element.bindings
return
Twine.refreshImmediately = ->
refreshQueued = false
refreshElement(element) for key, element of elements
callbacks = refreshCallbacks
refreshCallbacks = []
cb() for cb in callbacks
return
Twine.register = (name, component) ->
if registry[name]
throw new Error("Twine error: '#{name}' is already registered with Twine")
else
registry[name] = component
# Force the binding system to recognize programmatic changes to a node's value.
Twine.change = (node, bubble = false) ->
event = document.createEvent("HTMLEvents")
event.initEvent('change', bubble, true) # for IE 9/10 compatibility.
node.dispatchEvent(event)
# Cleans up everything related to a node and its subtree.
Twine.unbind = (node) ->
if id = node.bindingId
if bindings = elements[id]?.bindings
obj.teardown() for obj in bindings when obj.teardown
delete elements[id]
delete node.bindingId
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
Twine.unbind(childNode) for childNode in childrenForNode(node)
this
# Returns the binding context for a node by looking up the tree.
Twine.context = (node) -> getContext(node, false)
Twine.childContext = (node) -> getContext(node, true)
getContext = (node, child) ->
while node
return rootContext if node == rootNode
node = node.parentNode if !child
if !node
console.warn "Unable to find context; please check that the node is attached to the DOM that Twine has bound, or that bindings have been initiated on this node's DOM"
return null
if (id = node.bindingId) && (context = elements[id]?.childContext)
return context
node = node.parentNode if child
getIndexesForElement = (node) ->
firstContext = null
while node
return elements[id]?.indexes if id = node.bindingId
node = node.parentNode
# Returns the fully qualified key for a node's context
Twine.contextKey = (node, lastContext) ->
keys = []
addKey = (context) ->
for key, val of context when lastContext == val
keys.unshift(key)
break
lastContext = context
while node && node != rootNode && node = node.parentNode
addKey(context) if (id = node.bindingId) && (context = elements[id]?.childContext)
addKey(rootContext) if node == rootNode
keys.join('.')
valuePropertyForNode = (node) ->
name = node.nodeName.toLowerCase()
if name in ['input', 'textarea', 'select']
if node.getAttribute('type') in ['checkbox', 'radio'] then 'checked' else 'value'
else
'textContent'
keypathForKey = (node, key) ->
keypath = []
for key, i in key.split('.')
if (start = key.indexOf('[')) != -1
if i == 0
keypath.push(keyWithArrayIndex(key.substr(0, start), node)...)
else
keypath.push(key.substr(0, start))
key = key.substr(start)
while (end = key.indexOf(']')) != -1
keypath.push(parseInt(key.substr(1, end), 10))
key = key.substr(end + 1)
else
if i == 0
keypath.push(keyWithArrayIndex(key, node)...)
else
keypath.push(key)
keypath
keyWithArrayIndex = (key, node) ->
index = elements[node.bindingId]?.indexes?[key]
if index?
[key, index]
else
[key]
getValue = (object, keypath) ->
object = object[key] for key in keypath when object?
object
setValue = (object, keypath, value) ->
[keypath..., lastKey] = keypath
for key in keypath
object = object[key] ?= {}
object[lastKey] = value
stringifyNodeAttributes = (node) ->
[].map.call(node.attributes, (attr) -> "#{attr.name}=#{JSON.stringify(attr.value)}").join(' ')
wrapFunctionString = (code, args, node) ->
if isKeypath(code) && keypath = keypathForKey(node, code)
if keypath[0] == '$root'
($context, $root) -> getValue($root, keypath)
else
($context, $root) -> getValue($context, keypath)
else
code = "return #{code}"
code = "with($arrayPointers) { #{code} }" if nodeArrayIndexes(node)
code = "with($registry) { #{code} }" if requiresRegistry(args)
try
new Function(args, "with($context) { #{code} }")
catch e
throw "Twine error: Unable to create function on #{node.nodeName} node with attributes #{stringifyNodeAttributes(node)}"
requiresRegistry = (args) -> /\$registry/.test(args)
nodeArrayIndexes = (node) ->
node.bindingId? && elements[node.bindingId]?.indexes
arrayPointersForNode = (node, context) ->
indexes = nodeArrayIndexes(node)
return {} unless indexes
result = {}
for key, index of indexes
result[key] = context[key][index]
result
isKeypath = (value) ->
value not in ['true', 'false', 'null', 'undefined'] && keypathRegex.test(value)
isDataAttribute = (value) ->
value[0] == 'd' &&
value[1] == 'a' &&
value[2] == 't' &&
value[3] == 'a' &&
value[4] == '-'
fireCustomChangeEvent = (node) ->
event = document.createEvent('CustomEvent')
event.initCustomEvent('bindings:change', true, false, {})
node.dispatchEvent(event)
bindingOrder = ([firstType], [secondType]) ->
ORDERED_BINDINGS = {
define: 1,
bind: 2,
eval: 3
}
return 1 unless ORDERED_BINDINGS[firstType]
return -1 unless ORDERED_BINDINGS[secondType]
ORDERED_BINDINGS[firstType] - ORDERED_BINDINGS[secondType]
Twine.bindingTypes =
bind: (node, context, definition) ->
valueProp = valuePropertyForNode(node)
value = node[valueProp]
lastValue = undefined
teardown = undefined
# Radio buttons only set the value to the node value if checked.
checkedValueType = node.getAttribute('type') == 'radio'
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
refresh = ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue # return if we can and avoid a DOM operation
lastValue = newValue
return if newValue == node[valueProp]
node[valueProp] = if checkedValueType then newValue == node.value else newValue
fireCustomChangeEvent(node)
return {refresh} unless isKeypath(definition)
refreshContext = ->
if checkedValueType
return unless node.checked
setValue(context, keypath, node.value)
else
setValue(context, keypath, node[valueProp])
keypath = keypathForKey(node, definition)
twoWayBinding = valueProp != 'textContent' && node.type != 'hidden'
if keypath[0] == '$root'
context = rootContext
keypath = keypath.slice(1)
if value? && (twoWayBinding || value != '') && !(oldValue = getValue(context, keypath))?
refreshContext()
if twoWayBinding
changeHandler = ->
return if getValue(context, keypath) == this[valueProp]
refreshContext()
Twine.refreshImmediately()
jQuery(node).on 'input keyup change', changeHandler
teardown = ->
jQuery(node).off 'input keyup change', changeHandler
{refresh, teardown}
'bind-show': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = !fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue
jQuery(node).toggleClass('hide', lastValue = newValue)
'bind-class': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValues = {}
$node = jQuery(node)
return refresh: ->
newValues = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
additions = []
removals = []
for key, value of newValues
newValue = newValues[key] = !!newValues[key]
currValue = lastValues[key] ? $node.hasClass(key)
if currValue != newValue
if newValue
additions.push(key)
else
removals.push(key)
$node.removeClass(removals.join(' ')) if removals.length
$node.addClass(additions.join(' ')) if additions.length
lastValues = newValues
'bind-attribute': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = {}
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
for key, value of newValue when lastValue[key] != value
jQuery(node).attr(key, value || null)
lastValue = newValue
define: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
object = fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
context[key] = value for key, value of object
return
eval: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
return
defineArray = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root', node)
object = fn.call(node, context, rootContext)
indexes = {}
for key, value of object
context[key] ?= []
throw "Twine error: expected '#{key}' to be an array" unless context[key] instanceof Array
indexes[key] = context[key].length
context[key].push(value)
indexes
setupPropertyBinding = (attributeName, bindingName) ->
booleanProp = attributeName in ['checked', 'indeterminate', 'disabled', 'readOnly', 'draggable']
Twine.bindingTypes["bind-#{bindingName.toLowerCase()}"] = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
newValue = !!newValue if booleanProp
return if newValue == lastValue
node[attributeName] = lastValue = newValue
fireCustomChangeEvent(node) if attributeName == 'checked'
for attribute in ['placeholder', 'checked', 'indeterminate', 'disabled', 'href', 'title', 'readOnly', 'src', 'draggable']
setupPropertyBinding(attribute, attribute)
setupPropertyBinding('innerHTML', 'unsafe-html')
preventDefaultForEvent = (event) ->
(event.type == 'submit' || event.currentTarget.nodeName.toLowerCase() == 'a') &&
Twine.getAttribute(event.currentTarget, 'allow-default') in ['false', false, 0, undefined, null]
setupEventBinding = (eventName) ->
Twine.bindingTypes["bind-event-#{eventName}"] = (node, context, definition) ->
onEventHandler = (event, data) ->
discardEvent = Twine.shouldDiscardEvent[eventName]?(event)
if discardEvent || preventDefaultForEvent(event)
event.preventDefault()
return if discardEvent
wrapFunctionString(definition, '$context,$root,$arrayPointers,event,data', node).call(node, context, rootContext, arrayPointersForNode(node, context), event, data)
Twine.refreshImmediately()
jQuery(node).on eventName, onEventHandler
return teardown: ->
jQuery(node).off eventName, onEventHandler
for eventName in ['click', 'dblclick', 'mouseenter', 'mouseleave', 'mouseover', 'mouseout', 'mousedown', 'mouseup',
'submit', 'dragenter', 'dragleave', 'dragover', 'drop', 'drag', 'change', 'keypress', 'keydown', 'keyup', 'input',
'error', 'done', 'success', 'fail', 'blur', 'focus', 'load', 'paste']
setupEventBinding(eventName)
Twine
)
| true | ((root, factory) ->
if typeof root.define == 'function' && root.define.amd
root.define(['jquery'], factory)
else if typeof module == 'object' && module.exports
jQuery = if typeof window != 'undefined'
then require('jquery')
else require('jquery')(root)
module.exports = factory(jQuery)
else
root.Twine = factory(root.jQuery)
)(this, (jQuery) ->
Twine = {}
Twine.shouldDiscardEvent = {}
# Map of node binding ids to objects that describe a node's bindings.
elements = {}
# Registered components to look up
registry = {}
# The number of nodes bound since the last call to Twine.reset().
# Used to determine the next binding id.
nodeCount = 0
# Storage for all bindable data, provided by the caller of Twine.reset().
rootContext = null
keypathRegex = /^[a-z]\w*(\.[a-z]\w*|\[\d+\])*$/i # Tests if a string is a pure keypath.
refreshQueued = false
refreshCallbacks = []
rootNode = null
currentBindingCallbacks = null
Twine.getAttribute = (node, attr) ->
node.getAttribute("data-#{attr}") || node.getAttribute(attr)
# Cleans up all existing bindings and sets the root node and context.
Twine.reset = (newContext, node = document.documentElement) ->
for key of elements
if bindings = elements[key]?.bindings
obj.teardown() for obj in bindings when obj.teardown
elements = {}
rootContext = newContext
rootNode = node
rootNode.bindingId = nodeCount = 1
this
Twine.bind = (node = rootNode, context = Twine.context(node)) ->
bind(context, node, getIndexesForElement(node), true)
Twine.afterBound = (callback) ->
if currentBindingCallbacks
currentBindingCallbacks.push(callback)
else
callback()
bind = (context, node, indexes, forceSaveContext) ->
currentBindingCallbacks = []
element = null
if node.bindingId
Twine.unbind(node)
if defineArrayAttr = Twine.getAttribute(node, 'define-array')
newIndexes = defineArray(node, context, defineArrayAttr)
indexes ?= {}
for key, value of indexes when !newIndexes.hasOwnProperty(key)
newIndexes[key] = value
indexes = newIndexes
# register the element early because subsequent bindings on the same node might need to make use of the index
element = findOrCreateElementForNode(node)
element.indexes = indexes
bindingConstructors = null
for attribute in node.attributes
type = attribute.name
type = type.slice(5) if isDataAttribute(type)
constructor = Twine.bindingTypes[type]
continue unless constructor
bindingConstructors ?= []
definition = attribute.value
bindingConstructors.push([type, constructor, definition])
if bindingConstructors
element ?= findOrCreateElementForNode(node)
element.bindings ?= []
element.indexes ?= indexes
for [_, constructor, definition] in bindingConstructors.sort(bindingOrder)
binding = constructor(node, context, definition, element)
element.bindings.push(binding) if binding
if newContextKey = Twine.getAttribute(node, 'context')
keypath = keypathForKey(node, newContextKey)
if keypath[0] == PI:KEY:<KEY>END_PIroot'
context = rootContext
keypath = keypath.slice(1)
context = getValue(context, keypath) || setValue(context, keypath, {})
if element || newContextKey || forceSaveContext
element ?= findOrCreateElementForNode(node)
element.childContext = context
element.indexes ?= indexes if indexes?
callbacks = currentBindingCallbacks
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
bind(context, childNode, if newContextKey? then null else indexes) for childNode in childrenForNode(node)
Twine.count = nodeCount
for callback in callbacks || []
callback()
currentBindingCallbacks = null
Twine
# IE and Safari don't support node.children for DocumentFragment and SVGElement nodes.
# If the element supports children we continue to traverse the children, otherwise
# we stop traversing that subtree.
# https://developer.mozilla.org/en-US/docs/Web/API/ParentNode.children
# As a result, Twine are unsupported within DocumentFragment and SVGElement nodes.
#
# We also prevent nodes from being iterated over more than once by cacheing the
# lookup for children nodes, which prevents nodes that are dynamically inserted
# or removed as siblings from causing double/ missed binds and unbinds.
childrenForNode = (node) ->
if node.children then Array::slice.call(node.children, 0) else []
findOrCreateElementForNode = (node) ->
node.bindingId ?= ++nodeCount
elements[node.bindingId] ?= {}
# Queues a refresh of the DOM, batching up calls for the current synchronous block.
# The callback will be called once when the refresh has completed.
Twine.refresh = (callback) ->
refreshCallbacks.push(callback) if callback
return if refreshQueued
refreshQueued = true
setTimeout(Twine.refreshImmediately, 0)
refreshElement = (element) ->
(obj.refresh() if obj.refresh?) for obj in element.bindings if element.bindings
return
Twine.refreshImmediately = ->
refreshQueued = false
refreshElement(element) for key, element of elements
callbacks = refreshCallbacks
refreshCallbacks = []
cb() for cb in callbacks
return
Twine.register = (name, component) ->
if registry[name]
throw new Error("Twine error: '#{name}' is already registered with Twine")
else
registry[name] = component
# Force the binding system to recognize programmatic changes to a node's value.
Twine.change = (node, bubble = false) ->
event = document.createEvent("HTMLEvents")
event.initEvent('change', bubble, true) # for IE 9/10 compatibility.
node.dispatchEvent(event)
# Cleans up everything related to a node and its subtree.
Twine.unbind = (node) ->
if id = node.bindingId
if bindings = elements[id]?.bindings
obj.teardown() for obj in bindings when obj.teardown
delete elements[id]
delete node.bindingId
# IE and Safari don't support node.children for DocumentFragment or SVGElement,
# See explanation in childrenForNode()
Twine.unbind(childNode) for childNode in childrenForNode(node)
this
# Returns the binding context for a node by looking up the tree.
Twine.context = (node) -> getContext(node, false)
Twine.childContext = (node) -> getContext(node, true)
getContext = (node, child) ->
while node
return rootContext if node == rootNode
node = node.parentNode if !child
if !node
console.warn "Unable to find context; please check that the node is attached to the DOM that Twine has bound, or that bindings have been initiated on this node's DOM"
return null
if (id = node.bindingId) && (context = elements[id]?.childContext)
return context
node = node.parentNode if child
getIndexesForElement = (node) ->
firstContext = null
while node
return elements[id]?.indexes if id = node.bindingId
node = node.parentNode
# Returns the fully qualified key for a node's context
Twine.contextKey = (node, lastContext) ->
keys = []
addKey = (context) ->
for key, val of context when lastContext == val
keys.unshift(key)
break
lastContext = context
while node && node != rootNode && node = node.parentNode
addKey(context) if (id = node.bindingId) && (context = elements[id]?.childContext)
addKey(rootContext) if node == rootNode
keys.join('.')
valuePropertyForNode = (node) ->
name = node.nodeName.toLowerCase()
if name in ['input', 'textarea', 'select']
if node.getAttribute('type') in ['checkbox', 'radio'] then 'checked' else 'value'
else
'textContent'
keypathForKey = (node, key) ->
keypath = []
for key, i in key.split('.')
if (start = key.indexOf('[')) != -1
if i == 0
keypath.push(keyWithArrayIndex(key.substr(0, start), node)...)
else
keypath.push(key.substr(0, start))
key = key.substr(start)
while (end = key.indexOf(']')) != -1
keypath.push(parseInt(key.substr(1, end), 10))
key = key.substr(end + 1)
else
if i == 0
keypath.push(keyWithArrayIndex(key, node)...)
else
keypath.push(key)
keypath
keyWithArrayIndex = (key, node) ->
index = elements[node.bindingId]?.indexes?[key]
if index?
[key, index]
else
[key]
getValue = (object, keypath) ->
object = object[key] for key in keypath when object?
object
setValue = (object, keypath, value) ->
[keypath..., lastKey] = keypath
for key in keypath
object = object[key] ?= {}
object[lastKey] = value
stringifyNodeAttributes = (node) ->
[].map.call(node.attributes, (attr) -> "#{attr.name}=#{JSON.stringify(attr.value)}").join(' ')
wrapFunctionString = (code, args, node) ->
if isKeypath(code) && keypath = keypathForKey(node, code)
if keypath[0] == '$root'
($context, $root) -> getValue($root, keypath)
else
($context, $root) -> getValue($context, keypath)
else
code = "return #{code}"
code = "with($arrayPointers) { #{code} }" if nodeArrayIndexes(node)
code = "with($registry) { #{code} }" if requiresRegistry(args)
try
new Function(args, "with($context) { #{code} }")
catch e
throw "Twine error: Unable to create function on #{node.nodeName} node with attributes #{stringifyNodeAttributes(node)}"
requiresRegistry = (args) -> /\$registry/.test(args)
nodeArrayIndexes = (node) ->
node.bindingId? && elements[node.bindingId]?.indexes
arrayPointersForNode = (node, context) ->
indexes = nodeArrayIndexes(node)
return {} unless indexes
result = {}
for key, index of indexes
result[key] = context[key][index]
result
isKeypath = (value) ->
value not in ['true', 'false', 'null', 'undefined'] && keypathRegex.test(value)
isDataAttribute = (value) ->
value[0] == 'd' &&
value[1] == 'a' &&
value[2] == 't' &&
value[3] == 'a' &&
value[4] == '-'
fireCustomChangeEvent = (node) ->
event = document.createEvent('CustomEvent')
event.initCustomEvent('bindings:change', true, false, {})
node.dispatchEvent(event)
bindingOrder = ([firstType], [secondType]) ->
ORDERED_BINDINGS = {
define: 1,
bind: 2,
eval: 3
}
return 1 unless ORDERED_BINDINGS[firstType]
return -1 unless ORDERED_BINDINGS[secondType]
ORDERED_BINDINGS[firstType] - ORDERED_BINDINGS[secondType]
Twine.bindingTypes =
bind: (node, context, definition) ->
valueProp = valuePropertyForNode(node)
value = node[valueProp]
lastValue = undefined
teardown = undefined
# Radio buttons only set the value to the node value if checked.
checkedValueType = node.getAttribute('type') == 'radio'
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
refresh = ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue # return if we can and avoid a DOM operation
lastValue = newValue
return if newValue == node[valueProp]
node[valueProp] = if checkedValueType then newValue == node.value else newValue
fireCustomChangeEvent(node)
return {refresh} unless isKeypath(definition)
refreshContext = ->
if checkedValueType
return unless node.checked
setValue(context, keypath, node.value)
else
setValue(context, keypath, node[valueProp])
keypath = keypathForKey(node, definition)
twoWayBinding = valueProp != 'textContent' && node.type != 'hidden'
if keypath[0] == '$root'
context = rootContext
keypath = keypath.slice(1)
if value? && (twoWayBinding || value != '') && !(oldValue = getValue(context, keypath))?
refreshContext()
if twoWayBinding
changeHandler = ->
return if getValue(context, keypath) == this[valueProp]
refreshContext()
Twine.refreshImmediately()
jQuery(node).on 'input keyup change', changeHandler
teardown = ->
jQuery(node).off 'input keyup change', changeHandler
{refresh, teardown}
'bind-show': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = !fn.call(node, context, rootContext, arrayPointersForNode(node, context))
return if newValue == lastValue
jQuery(node).toggleClass('hide', lastValue = newValue)
'bind-class': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValues = {}
$node = jQuery(node)
return refresh: ->
newValues = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
additions = []
removals = []
for key, value of newValues
newValue = newValues[key] = !!newValues[key]
currValue = lastValues[key] ? $node.hasClass(key)
if currValue != newValue
if newValue
additions.push(key)
else
removals.push(key)
$node.removeClass(removals.join(' ')) if removals.length
$node.addClass(additions.join(' ')) if additions.length
lastValues = newValues
'bind-attribute': (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = {}
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
for key, value of newValue when lastValue[key] != value
jQuery(node).attr(key, value || null)
lastValue = newValue
define: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
object = fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
context[key] = value for key, value of object
return
eval: (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$registry,$arrayPointers', node)
fn.call(node, context, rootContext, registry, arrayPointersForNode(node, context))
return
defineArray = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root', node)
object = fn.call(node, context, rootContext)
indexes = {}
for key, value of object
context[key] ?= []
throw "Twine error: expected '#{key}' to be an array" unless context[key] instanceof Array
indexes[key] = context[key].length
context[key].push(value)
indexes
setupPropertyBinding = (attributeName, bindingName) ->
booleanProp = attributeName in ['checked', 'indeterminate', 'disabled', 'readOnly', 'draggable']
Twine.bindingTypes["bind-#{bindingName.toLowerCase()}"] = (node, context, definition) ->
fn = wrapFunctionString(definition, '$context,$root,$arrayPointers', node)
lastValue = undefined
return refresh: ->
newValue = fn.call(node, context, rootContext, arrayPointersForNode(node, context))
newValue = !!newValue if booleanProp
return if newValue == lastValue
node[attributeName] = lastValue = newValue
fireCustomChangeEvent(node) if attributeName == 'checked'
for attribute in ['placeholder', 'checked', 'indeterminate', 'disabled', 'href', 'title', 'readOnly', 'src', 'draggable']
setupPropertyBinding(attribute, attribute)
setupPropertyBinding('innerHTML', 'unsafe-html')
preventDefaultForEvent = (event) ->
(event.type == 'submit' || event.currentTarget.nodeName.toLowerCase() == 'a') &&
Twine.getAttribute(event.currentTarget, 'allow-default') in ['false', false, 0, undefined, null]
setupEventBinding = (eventName) ->
Twine.bindingTypes["bind-event-#{eventName}"] = (node, context, definition) ->
onEventHandler = (event, data) ->
discardEvent = Twine.shouldDiscardEvent[eventName]?(event)
if discardEvent || preventDefaultForEvent(event)
event.preventDefault()
return if discardEvent
wrapFunctionString(definition, '$context,$root,$arrayPointers,event,data', node).call(node, context, rootContext, arrayPointersForNode(node, context), event, data)
Twine.refreshImmediately()
jQuery(node).on eventName, onEventHandler
return teardown: ->
jQuery(node).off eventName, onEventHandler
for eventName in ['click', 'dblclick', 'mouseenter', 'mouseleave', 'mouseover', 'mouseout', 'mousedown', 'mouseup',
'submit', 'dragenter', 'dragleave', 'dragover', 'drop', 'drag', 'change', 'keypress', 'keydown', 'keyup', 'input',
'error', 'done', 'success', 'fail', 'blur', 'focus', 'load', 'paste']
setupEventBinding(eventName)
Twine
)
|
[
{
"context": " date: new Date()\n from: [new Contact(name: 'Ben', email: 'ben@example.com')]\n subject: \"Hell",
"end": 1121,
"score": 0.9997450709342957,
"start": 1118,
"tag": "NAME",
"value": "Ben"
},
{
"context": "e()\n from: [new Contact(name: 'Ben', email: 'ben@ex... | app/internal_packages/unread-notifications/spec/main-spec.coffee | immershy/nodemail | 0 | _ = require 'underscore'
Contact = require '../../../src/flux/models/contact'
Message = require '../../../src/flux/models/message'
Thread = require '../../../src/flux/models/thread'
Category = require '../../../src/flux/models/category'
CategoryStore = require '../../../src/flux/stores/category-store'
DatabaseStore = require '../../../src/flux/stores/database-store'
AccountStore = require '../../../src/flux/stores/account-store'
SoundRegistry = require '../../../src/sound-registry'
NativeNotifications = require '../../../src/native-notifications'
Main = require '../lib/main'
describe "UnreadNotifications", ->
beforeEach ->
Main.activate()
inbox = new Category(id: "l1", name: "inbox", displayName: "Inbox")
archive = new Category(id: "l2", name: "archive", displayName: "Archive")
spyOn(CategoryStore, "getStandardCategory").andReturn inbox
account = AccountStore.accounts()[0]
@threadA = new Thread
categories: [inbox]
@threadB = new Thread
categories: [archive]
@msg1 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "A"
@msgNoSender = new Message
unread: true
date: new Date()
from: []
subject: "Hello World"
threadId: "A"
@msg2 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Mark', email: 'mark@example.com')]
subject: "Hello World 2"
threadId: "A"
@msg3 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "A"
@msg4 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "A"
@msg5 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "A"
@msgUnreadButArchived = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Mark', email: 'mark@example.com')]
subject: "Hello World 2"
threadId: "B"
@msgRead = new Message
unread: false
date: new Date()
from: [new Contact(name: 'Mark', email: 'mark@example.com')]
subject: "Hello World Read Already"
threadId: "A"
@msgOld = new Message
unread: true
date: new Date(2000,1,1)
from: [new Contact(name: 'Mark', email: 'mark@example.com')]
subject: "Hello World Old"
threadId: "A"
@msgFromMe = new Message
unread: true
date: new Date()
from: [account.me()]
subject: "A Sent Mail!"
threadId: "A"
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(@threadA) if id is 'A'
return Promise.resolve(@threadB) if id is 'B'
return Promise.resolve(null)
spyOn(NativeNotifications, 'displayNotification').andCallFake ->
spyOn(Promise, 'props').andCallFake (dict) ->
dictOut = {}
for key, val of dict
if val.value?
dictOut[key] = val.value()
else
dictOut[key] = val
Promise.resolve(dictOut)
afterEach ->
Main.deactivate()
it "should create a Notification if there is one unread message", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead, @msg1]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'Ben',
subtitle: 'Hello World',
body: undefined,
canReply: true,
tag: 'unread-update'
})
it "should create multiple Notifications if there is more than one but less than five unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1, @msg2, @msg3]})
.then ->
#Need to call advance clock twice because we call setTimeout twice
advanceClock(2000)
advanceClock(2000)
expect(NativeNotifications.displayNotification.callCount).toEqual(3)
it "should create a Notification if there are five or more unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({
message: [@msg1, @msg2, @msg3, @msg4, @msg5]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
expect(NativeNotifications.displayNotification.mostRecentCall.args).toEqual([{
title: '5 Unread Messages',
tag: 'unread-update'
}])
it "should create a Notification correctly, even if new mail has no sender", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoSender]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'Unknown',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if there are no new messages", ->
waitsForPromise ->
Main._onNewMailReceived({message: []})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
waitsForPromise ->
Main._onNewMailReceived({})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not notify about unread messages that are outside the inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgUnreadButArchived, @msg1]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'Ben',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if the new messages are not unread", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new messages are actually old ones", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgOld]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new message is one I sent", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgFromMe]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should play a sound when it gets new mail", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return true
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).toHaveBeenCalledWith("new-mail")
it "should not play a sound if the config is off", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return false
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).not.toHaveBeenCalled()
describe "when the message has no matching thread", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "missing"
it "should not create a Notification, since it cannot be determined whether the message is in the Inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoThread]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should call _onNewMessagesMissingThreads to try displaying a notification again in 10 seconds", ->
waitsForPromise =>
spyOn(Main, '_onNewMessagesMissingThreads')
Main._onNewMailReceived({message: [@msgNoThread]})
.then =>
advanceClock(2000)
expect(Main._onNewMessagesMissingThreads).toHaveBeenCalledWith([@msgNoThread])
describe "_onNewMessagesMissingThreads", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: 'Ben', email: 'ben@example.com')]
subject: "Hello World"
threadId: "missing"
spyOn(Main, '_onNewMailReceived')
Main._onNewMessagesMissingThreads([@msgNoThread])
advanceClock(2000)
it "should wait 10 seconds and then re-query for threads", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
@msgNoThread.threadId = "A"
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).toHaveBeenCalledWith({message: [@msgNoThread], thread: [@threadA]})
it "should do nothing if the threads still can't be found", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).not.toHaveBeenCalled()
| 178348 | _ = require 'underscore'
Contact = require '../../../src/flux/models/contact'
Message = require '../../../src/flux/models/message'
Thread = require '../../../src/flux/models/thread'
Category = require '../../../src/flux/models/category'
CategoryStore = require '../../../src/flux/stores/category-store'
DatabaseStore = require '../../../src/flux/stores/database-store'
AccountStore = require '../../../src/flux/stores/account-store'
SoundRegistry = require '../../../src/sound-registry'
NativeNotifications = require '../../../src/native-notifications'
Main = require '../lib/main'
describe "UnreadNotifications", ->
beforeEach ->
Main.activate()
inbox = new Category(id: "l1", name: "inbox", displayName: "Inbox")
archive = new Category(id: "l2", name: "archive", displayName: "Archive")
spyOn(CategoryStore, "getStandardCategory").andReturn inbox
account = AccountStore.accounts()[0]
@threadA = new Thread
categories: [inbox]
@threadB = new Thread
categories: [archive]
@msg1 = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World"
threadId: "A"
@msgNoSender = new Message
unread: true
date: new Date()
from: []
subject: "Hello World"
threadId: "A"
@msg2 = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World 2"
threadId: "A"
@msg3 = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World"
threadId: "A"
@msg4 = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World"
threadId: "A"
@msg5 = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World"
threadId: "A"
@msgUnreadButArchived = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World 2"
threadId: "B"
@msgRead = new Message
unread: false
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World Read Already"
threadId: "A"
@msgOld = new Message
unread: true
date: new Date(2000,1,1)
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World Old"
threadId: "A"
@msgFromMe = new Message
unread: true
date: new Date()
from: [account.me()]
subject: "A Sent Mail!"
threadId: "A"
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(@threadA) if id is 'A'
return Promise.resolve(@threadB) if id is 'B'
return Promise.resolve(null)
spyOn(NativeNotifications, 'displayNotification').andCallFake ->
spyOn(Promise, 'props').andCallFake (dict) ->
dictOut = {}
for key, val of dict
if val.value?
dictOut[key] = val.value()
else
dictOut[key] = val
Promise.resolve(dictOut)
afterEach ->
Main.deactivate()
it "should create a Notification if there is one unread message", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead, @msg1]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: '<NAME>',
subtitle: 'Hello World',
body: undefined,
canReply: true,
tag: 'unread-update'
})
it "should create multiple Notifications if there is more than one but less than five unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1, @msg2, @msg3]})
.then ->
#Need to call advance clock twice because we call setTimeout twice
advanceClock(2000)
advanceClock(2000)
expect(NativeNotifications.displayNotification.callCount).toEqual(3)
it "should create a Notification if there are five or more unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({
message: [@msg1, @msg2, @msg3, @msg4, @msg5]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
expect(NativeNotifications.displayNotification.mostRecentCall.args).toEqual([{
title: '5 Unread Messages',
tag: 'unread-update'
}])
it "should create a Notification correctly, even if new mail has no sender", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoSender]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'Unknown',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if there are no new messages", ->
waitsForPromise ->
Main._onNewMailReceived({message: []})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
waitsForPromise ->
Main._onNewMailReceived({})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not notify about unread messages that are outside the inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgUnreadButArchived, @msg1]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: '<NAME>',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if the new messages are not unread", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new messages are actually old ones", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgOld]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new message is one I sent", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgFromMe]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should play a sound when it gets new mail", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return true
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).toHaveBeenCalledWith("new-mail")
it "should not play a sound if the config is off", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return false
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).not.toHaveBeenCalled()
describe "when the message has no matching thread", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: 'ben<EMAIL>')]
subject: "Hello World"
threadId: "missing"
it "should not create a Notification, since it cannot be determined whether the message is in the Inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoThread]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should call _onNewMessagesMissingThreads to try displaying a notification again in 10 seconds", ->
waitsForPromise =>
spyOn(Main, '_onNewMessagesMissingThreads')
Main._onNewMailReceived({message: [@msgNoThread]})
.then =>
advanceClock(2000)
expect(Main._onNewMessagesMissingThreads).toHaveBeenCalledWith([@msgNoThread])
describe "_onNewMessagesMissingThreads", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: '<NAME>', email: '<EMAIL>')]
subject: "Hello World"
threadId: "missing"
spyOn(Main, '_onNewMailReceived')
Main._onNewMessagesMissingThreads([@msgNoThread])
advanceClock(2000)
it "should wait 10 seconds and then re-query for threads", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
@msgNoThread.threadId = "A"
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).toHaveBeenCalledWith({message: [@msgNoThread], thread: [@threadA]})
it "should do nothing if the threads still can't be found", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).not.toHaveBeenCalled()
| true | _ = require 'underscore'
Contact = require '../../../src/flux/models/contact'
Message = require '../../../src/flux/models/message'
Thread = require '../../../src/flux/models/thread'
Category = require '../../../src/flux/models/category'
CategoryStore = require '../../../src/flux/stores/category-store'
DatabaseStore = require '../../../src/flux/stores/database-store'
AccountStore = require '../../../src/flux/stores/account-store'
SoundRegistry = require '../../../src/sound-registry'
NativeNotifications = require '../../../src/native-notifications'
Main = require '../lib/main'
describe "UnreadNotifications", ->
beforeEach ->
Main.activate()
inbox = new Category(id: "l1", name: "inbox", displayName: "Inbox")
archive = new Category(id: "l2", name: "archive", displayName: "Archive")
spyOn(CategoryStore, "getStandardCategory").andReturn inbox
account = AccountStore.accounts()[0]
@threadA = new Thread
categories: [inbox]
@threadB = new Thread
categories: [archive]
@msg1 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "A"
@msgNoSender = new Message
unread: true
date: new Date()
from: []
subject: "Hello World"
threadId: "A"
@msg2 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World 2"
threadId: "A"
@msg3 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "A"
@msg4 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "A"
@msg5 = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "A"
@msgUnreadButArchived = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World 2"
threadId: "B"
@msgRead = new Message
unread: false
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World Read Already"
threadId: "A"
@msgOld = new Message
unread: true
date: new Date(2000,1,1)
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World Old"
threadId: "A"
@msgFromMe = new Message
unread: true
date: new Date()
from: [account.me()]
subject: "A Sent Mail!"
threadId: "A"
spyOn(DatabaseStore, 'find').andCallFake (klass, id) =>
return Promise.resolve(@threadA) if id is 'A'
return Promise.resolve(@threadB) if id is 'B'
return Promise.resolve(null)
spyOn(NativeNotifications, 'displayNotification').andCallFake ->
spyOn(Promise, 'props').andCallFake (dict) ->
dictOut = {}
for key, val of dict
if val.value?
dictOut[key] = val.value()
else
dictOut[key] = val
Promise.resolve(dictOut)
afterEach ->
Main.deactivate()
it "should create a Notification if there is one unread message", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead, @msg1]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'PI:NAME:<NAME>END_PI',
subtitle: 'Hello World',
body: undefined,
canReply: true,
tag: 'unread-update'
})
it "should create multiple Notifications if there is more than one but less than five unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1, @msg2, @msg3]})
.then ->
#Need to call advance clock twice because we call setTimeout twice
advanceClock(2000)
advanceClock(2000)
expect(NativeNotifications.displayNotification.callCount).toEqual(3)
it "should create a Notification if there are five or more unread messages", ->
waitsForPromise =>
Main._onNewMailReceived({
message: [@msg1, @msg2, @msg3, @msg4, @msg5]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
expect(NativeNotifications.displayNotification.mostRecentCall.args).toEqual([{
title: '5 Unread Messages',
tag: 'unread-update'
}])
it "should create a Notification correctly, even if new mail has no sender", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoSender]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'Unknown',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if there are no new messages", ->
waitsForPromise ->
Main._onNewMailReceived({message: []})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
waitsForPromise ->
Main._onNewMailReceived({})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not notify about unread messages that are outside the inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgUnreadButArchived, @msg1]})
.then ->
expect(NativeNotifications.displayNotification).toHaveBeenCalled()
options = NativeNotifications.displayNotification.mostRecentCall.args[0]
delete options['onActivate']
expect(options).toEqual({
title: 'PI:NAME:<NAME>END_PI',
subtitle: 'Hello World',
body: undefined,
canReply : true,
tag: 'unread-update'
})
it "should not create a Notification if the new messages are not unread", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgRead]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new messages are actually old ones", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgOld]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should not create a Notification if the new message is one I sent", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgFromMe]})
.then ->
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should play a sound when it gets new mail", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return true
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).toHaveBeenCalledWith("new-mail")
it "should not play a sound if the config is off", ->
spyOn(NylasEnv.config, "get").andCallFake (config) ->
if config is "core.notifications.enabled" then return true
if config is "core.notifications.sounds" then return false
spyOn(SoundRegistry, "playSound")
waitsForPromise =>
Main._onNewMailReceived({message: [@msg1]})
.then ->
expect(NylasEnv.config.get.calls[1].args[0]).toBe "core.notifications.sounds"
expect(SoundRegistry.playSound).not.toHaveBeenCalled()
describe "when the message has no matching thread", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'benPI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "missing"
it "should not create a Notification, since it cannot be determined whether the message is in the Inbox", ->
waitsForPromise =>
Main._onNewMailReceived({message: [@msgNoThread]})
.then ->
advanceClock(2000)
expect(NativeNotifications.displayNotification).not.toHaveBeenCalled()
it "should call _onNewMessagesMissingThreads to try displaying a notification again in 10 seconds", ->
waitsForPromise =>
spyOn(Main, '_onNewMessagesMissingThreads')
Main._onNewMailReceived({message: [@msgNoThread]})
.then =>
advanceClock(2000)
expect(Main._onNewMessagesMissingThreads).toHaveBeenCalledWith([@msgNoThread])
describe "_onNewMessagesMissingThreads", ->
beforeEach ->
@msgNoThread = new Message
unread: true
date: new Date()
from: [new Contact(name: 'PI:NAME:<NAME>END_PI', email: 'PI:EMAIL:<EMAIL>END_PI')]
subject: "Hello World"
threadId: "missing"
spyOn(Main, '_onNewMailReceived')
Main._onNewMessagesMissingThreads([@msgNoThread])
advanceClock(2000)
it "should wait 10 seconds and then re-query for threads", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
@msgNoThread.threadId = "A"
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).toHaveBeenCalledWith({message: [@msgNoThread], thread: [@threadA]})
it "should do nothing if the threads still can't be found", ->
expect(DatabaseStore.find).not.toHaveBeenCalled()
advanceClock(10000)
expect(DatabaseStore.find).toHaveBeenCalled()
advanceClock()
expect(Main._onNewMailReceived).not.toHaveBeenCalled()
|
[
{
"context": " }, {\n# id: \"author1\",\n# name: \"Charles Dickens\"\n# }]\n#\n# Then .name would return \"Charles D",
"end": 731,
"score": 0.9998783469200134,
"start": 716,
"tag": "NAME",
"value": "Charles Dickens"
},
{
"context": "s Dickens\"\n# }]\n#\n#... | src/expression.coffee | umd-mith/mithgrid | 1 | # # Expression Parser
#
# Everything here is private except for a few exported objects and functions.
#
#
# ## Expressions
#
# Expressions describe a path through the data graph held in a data store.
#
# Expressions hop from node to node in one of two directions: forward or backward. Forward goes from an item ID through a property
# to arrive at a new value. Backward goes from a value through a property to arrive at a new item ID.
#
# For example, if we have a data store with items holding information about books, such as the following:
#
# [{
# id: "book1",
# author: "author1",
# title: "A Tale of Two Cities",
# pages: 254
# }, {
# id: "author1",
# name: "Charles Dickens"
# }]
#
# Then .name would return "Charles Dickens" if we started with the item ID "author1". But .author.name would return the same
# value if we started with the item ID "book1".
#
# If we start with "Charles Dickens" (the value), we can find the number of pages in the books with the following expression:
# !name!author.pages (or <-name<-author->pages using the longer notation).
#
# . and -> use a forward index and must have an item ID on the left side
#
# ! and <- use a reverse index and will result in an item ID on the right side
#
# .foo* means to follow the foo property until you can't any more, returning
# the ids along the way
# !foo* means to follow the foo property backward until you can't any more,
# returning the ids along the way
# (...)* means to apply the subgraph-traversal as many times as possible
#
MITHgrid.namespace "Expression.Basic", (exports) ->
Expression = {}
_operators =
"+":
argumentType: "number"
valueType: "number"
f: (a, b) -> a + b
"-":
argumentType: "number"
valueType: "number"
f: (a, b) -> a - b
"*":
argumentType: "number"
valueType: "number"
f: (a, b) -> a * b
"/":
argumentType: "number"
valueType: "number"
f: (a, b) -> a / b
"=":
valueType: "boolean"
f: (a, b) -> a == b
"<>":
valueType: "boolean"
f: (a, b) -> a != b
"><":
valueType: "boolean"
f: (a, b) -> a != b
"<":
valueType: "boolean"
f: (a, b) -> a < b
">":
valueType: "boolean"
f: (a, b) -> a > b
"<=":
valueType: "boolean"
f: (a, b) -> a <= b
">=":
valueType: "boolean"
f: (a, b) -> a >= b
# ## MITHgrid.Expression.Basic.controls
#
# Control functions may be defined for use in expressions. See the existing control functions for examples of
# how to write them.
#
# All control functions take the following parameters:
#
# * args
# * roots
# * rootValueTypes
# * defaultRootName
# * database
#
# All control functions should return a collection of items (using MITHgrid.Expression.initCollection collections)
#
Expression.controls = exports.controls =
# ### if
#
"if":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
conditionCollection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
condition = false
conditionCollection.forEachValue (v) ->
if v
condition = true
return true
else
return undefined
if condition
args[1].evaluate roots, rootValueTypes, defaultRootName, database
else
args[2].evaluate roots, rootValueTypes, defaultRootName, database
# ### foreach
#
"foreach":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
collection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
oldValue = roots.value
oldValueType = rootValueTypes.value
results = []
valueType = "text"
rootValueTypes.value = collection.valueType
collection.forEachValue (element) ->
roots.value = element
collection2 = args[1].evaluate roots, rootValueTypes, defaultRootName, database
valueType = collection2.valueType
collection2.forEachValue (result) ->
results.push result
roots.value = oldValue
rootValueTypes.value = oldValueType
Expression.initCollection results, valueType
"default":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
for arg in args
collection = arg.evaluate roots, rootValueTypes, defaultRootName, database
if collection.size() > 0
return collection
Expression.initCollection [], "text"
Expression.initExpression = (rootNode) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
return {
values: collection.getSet()
valueType: collection.valueType
size: collection.size
}
that.evaluateOnItem = (itemID, database) ->
that.evaluate({
"value": itemID
}, {
"value": "item"
},
"value",
database
)
that.evaluateSingle = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
result =
value: null
valueType: collection.valueType
collection.forEachValue (v) ->
result.value = v
true
result;
that.isPath = rootNode.isPath
if that.isPath
that.getPath = -> rootNode
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
rootNode.testExists roots, rootValueTypes, defaultRootName, database
else
that.getPath = -> null
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).values.size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
rootNode.walkBackward [value], valueType, filter, database
that.walkForward = (values, valueType, database) ->
rootNode.walkForward values, valueType, database
that.walkBackward = (values, valueType, filter, database) ->
rootNode.walkBackward values, valueType, filter, database
that
Expression.initCollection = exports.initCollection = (values, valueType) ->
that =
valueType: valueType
if values instanceof Array
that.forEachValue = (f) ->
for v in values
if f(v) == true
break;
that.getSet = -> MITHgrid.Data.Set.initInstance values
that.contains = (v) -> v in values
that.size = -> values.length
else
that.forEachValue = values.visit
that.size = values.size
that.getSet = -> values
that.contains = values.contains
that.isPath = false;
that
Expression.initConstant = (value, valueType) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) -> Expression.initCollection [value], valueType
that.isPath = false;
that
Expression.initOperator = (operator, args) ->
that = {}
_operator = operator
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
values = []
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database) for a in _args
operator = _operators[_operator]
f = operator.f
if operator.argumentType == "number"
args[0].forEachValue (v1) ->
if typeof(v1) != "number"
v1 = parseFloat v1
args[1].forEachValue (v2) ->
if typeof(v2) != "number"
v2 = parseFloat v2
values.push f(v1, v2)
else
args[0].forEachValue (v1) ->
args[1].forEachValue (v2) -> values.push f(v1, v2)
Expression.initCollection values, operator.valueType
that.isPath = false
that
Expression.initFunctionCall = (name, args) ->
that = {}
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database ) for a in _args
if Expression.functions[name]?.f?
return Expression.functions[name].f args
else
throw new Error "No such function named #{_name}"
that.isPath = false
that
Expression.initControlCall = (name, args) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
Expression.controls[name].f args, roots, rootValueTypes, defaultRootName, database
that.isPath = false
that
Expression.initPath = (property, forward) ->
that = {}
_rootName = null
_segments = []
#
# If isMultiple == true (.@ or !@ instead of . or !), then we
# collect all matching values regardless of multiplicity. Otherwise,
# we only return one instance of each matching value.
#
walkForward = (collection, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) -> a.push v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) -> a.push v2
a
for i in [ 0 ... _segments.length ]
segment = _segments[i]
if segment.expression?
if segment.forward
# simply evaluate the expressions and report the results
collection = segment.expression.evaluateOnItem(collection.getSet().items(), database)
else
# walk backward
else if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else
if segment.forward
values = database.getObjectsUnion collection.getSet(), segment.property
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
else
values = database.getSubjectsUnion collection.getSet(), segment.property
collection = Expression.initCollection values, "item"
collection
walkBackward = (collection, filter, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
if filter instanceof Array
filter = MITHgrid.Data.Set.initInstance filter
for i in [ _segments.length-1 .. 0 ]
segment = _segments[i];
if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else if segment.forward
values = database.getSubjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
collection = Expression.initCollection values, "item"
else
values = database.getObjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
collection
if property?
_segments.push
property: property
forward: forward
isMultiple: false
that.isPath = true
that.setRootName = (rootName) -> _rootName = rootName
that.appendSegment = (property, hopOperator) ->
_segments.push
property: property
forward: hopOperator[0] == "."
isMultiple: hopOperator.length > 1
that.getSegment = (index) ->
if index < _segments.length
segment = _segments[index]
return {
property: segment.property
forward: segment.forward
isMultiple: segment.isMultiple
}
else
return null
that.appendSubPath = (expression) ->
_segments.push
expression: expression
forward: true
isMultiple: true
that.getLastSegment = -> that.getSegment _segments.length - 1
that.getSegmentCount = -> _segments.length
that.rangeBackward = (from, to, filter, database) ->
set = MITHgrid.Data.Set.initInstance()
valueType = "item"
if _segments.length > 0
segment = _segments[_segments.length - 1]
if segment.forward
database.getSubjectsInRange(segment.property, from, to, false, set, if _segments.length == 1 then filter else null)
else
throw new Error "Last path of segment must be forward"
for i in [ _segments.length - 2 .. 0 ]
segment = _segments[i]
if segment.forward
set = database.getSubjectsUnion(set, segment.property, null, if i == 0 then filter else null)
valueType = "item"
else
set = database.getObjectsUnion(set, segment.property, null, if i == 0 then filter else null)
property = database.getPropertysegment.property
valueType = if property? then property.getValueType() else "text"
return {
valueType: valueType
values: set
count: set.size()
}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
rootName = if _rootName? then _rootName else defaultRootName
valueType = if rootValueTypes[rootName]? then rootValueTypes[rootName] else "text"
collection = null
if roots[rootName]?
root = roots[rootName]
if $.isPlainObject(root) or root instanceof Array
collection = Expression.initCollection root, valueType
else
collection = Expression.initCollection [root], valueType
return walkForward collection, database
else
throw new Error "No such variable called " + rootName
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
collection = Expression.initCollection [value], valueType
walkBackward collection, filter, database
that.walkForward = (values, valueType, database) ->
walkForward Expression.initCollection(values, valueType), database
that.walkBackward = (values, valueType, filter, database) ->
walkBackward Expression.initCollection(values, valueType), filter, database
that
# This allows us to do the following:
# .foo(.bar.baz)*.bat and follow any number of .bar.baz segments
# .foo(.bar,.baz)*.bat follows any number of .bar or .baz segments
Expression.initClosure = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (roots, database) ->
finalSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
nextRoots = finalSet.items()
while nextRoots.length > 0
nextSet = MITHGrid.Data.Set.initInstance()
for ex in expressions
set = ex.evaluate({ "value": nextRoots }, { "value": "item" }, "value", database)
set.getSet().visit (v) ->
if !finalSet.contains(v)
nextSet.add(v)
finalSet.add(v)
nextRoots = nextSet.items()
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
that
Expression.initExpressionSet = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (root, database) ->
finaleSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
Expression.initParser = exports.initInstance = ->
that = {}
internalParse = (scanner, several) ->
token = scanner.token()
Scanner = Expression.initScanner
next = ->
scanner.next()
token = scanner.token()
parseExpressionList = ->
expressions = [parseExpression()]
while token? and token.type == Scanner.DELIMITER and token.value == ","
next()
expressions.push parseExpression()
expressions
makePosition = -> if token? then token.start else scanner.index()
parsePath = ->
path = Expression.initPath()
while token? && !(token.type == Scanner.OPERATOR || token.type == Scanner.DELIMITER && token.value == ')')
if token.type == Scanner.PATH_OPERATOR
hopOperator = token.value
next()
if token? and token.type == Scanner.IDENTIFIER
path.appendSegment token.value, hopOperator
next()
else
throw new Error "Missing property ID at position " + makePosition()
else if token.type == Scanner.DELIMITER and token.value == '('
next()
expressions = parseExpressionList()
if token && token.type == Scanner.DELIMITER
if token.value == ')'
next()
if token && token.type == Scanner.OPERATOR and token.value == '*'
next()
path.appendSubPath Expression.initClosure expressions
else
path.appendSubPath Expression.initExpressionSet expressions
else
throw new Error "Mismatched ')' at position " + makePosition()
else
throw new Error "Mismatched ')' at position " + makePosition()
path
parseSubExpression = ->
result = null
args = []
if !token?
throw new Error "Missing factor at end of expression"
switch token.type
when Scanner.OPERATOR
return result
when Scanner.NUMBER
result = Expression.initConstant token.value, "number"
next()
when Scanner.STRING
result = Expression.initConstant(token.value, "text");
next();
when Scanner.PATH_OPERATOR then result = parsePath()
when Scanner.IDENTIFIER
identifier = token.value
next()
if Expression.controls[identifier]?
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initControlCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) to end " + identifier + " at position " + makePosition()
else
throw new Error "Missing ( to start " + identifier + " at position " + makePosition()
else
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initFunctionCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) after function call " + identifier + " at position " + makePosition()
else
result = parsePath()
result.setRootName identifier
when Scanner.DELIMITER
if token.value == "("
next()
result = parseExpression()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
result
parseExpression = ->
expression = parseSubExpression()
while token?.type == Scanner.OPERATOR && token.value in [ "=", "<", ">", "<>", "<=", ">=" ]
operator = token.value
next()
expression = Expression.initOperator operator, [ expression, parseSubExpression() ]
expression
if several
roots = parseExpressionList()
expressions = []
expressions.push Expression.initExpression(r) for r in roots
return expressions
else
return [Expression.initExpression(parseExpression())]
that.parse = (s, startIndex, results) ->
startIndex ?= 0
results ?= {}
scanner = Expression.initScanner s, startIndex
try
return internalParse(scanner, false)[0]
finally
results.index = if scanner.token()? then scanner.token().start else scanner.index()
that
Expression.initScanner = (text, startIndex) ->
that = {}
_text = text + " "
_maxIndex = text.length
_index = startIndex
_token = null
isDigit = (c) -> "0123456789".indexOf(c) >= 0
that.token = -> _token
that.index = -> _index
that.next = ->
_token = null
_index += 1 while _index < _maxIndex and " \t\r\n".indexOf(_text[_index]) >= 0
if _index < _maxIndex
c1 = _text.charAt _index
c2 = _text.charAt _index + 1
c3 = _text.charAt _index + 2
if ".!".indexOf(c1) >= 0
if c2 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if c1 == "<" and c2 == "-"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!"
start: _index
end: _index + 2
_index += 2
else if c1 == "-" and c2 == ">"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: ".@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "."
start: _index
end: _index + 2
_index += 2
else if "<>".indexOf(c1) >= 0
if (c2 == "=") or ("<>".indexOf(c2) >= 0 and c1 != c2)
_token =
type: Expression.initScanner.OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "+-*/=".indexOf(c1) >= 0
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "()".indexOf(c1) >= 0
_token =
type: Expression.initScanner.DELIMITER
value: c1
start: _index
end: _index + 1
_index += 1
else if "\"'".indexOf(c1) >= 0
# quoted strings
i = _index + 1
while i < _maxIndex
break if _text.charAt(i) == c1 and _text.charAt(i - 1) != "\\"
i += 1
if i < _maxIndex
_token =
type: Expression.initScanner.STRING
value: _text.substring(_index + 1, i).replace(/\\'/g, "'").replace(/\\"/g, '"')
start: _index
end: i + 1
_index = i + 1
else
throw new Error "Unterminated string starting at " + String(_index)
else if isDigit c1
# number
i = _index
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
if i < _maxIndex and _text.charAt(i) == "."
i += 1
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
_token =
type: Expression.initScanner.NUMBER
value: parseFloat(_text.substring(_index, i))
start: _index
end: i
_index = i;
else
# identifier
i = _index
while i < _maxIndex
c = _text.charAt i
break unless "(),.!@ \t".indexOf(c) < 0
i += 1
_token =
type: Expression.initScanner.IDENTIFIER
value: _text.substring(_index, i)
start: _index
end: i
_index = i
that.next()
that
Expression.initScanner.DELIMITER = 0
Expression.initScanner.NUMBER = 1
Expression.initScanner.STRING = 2
Expression.initScanner.IDENTIFIER = 3
Expression.initScanner.OPERATOR = 4
Expression.initScanner.PATH_OPERATOR = 5
Expression.functions = { }
Expression.FunctionUtilities = { }
exports.registerSimpleMappingFunction = (name, f, valueType) ->
Expression.functions[name] =
f: (args) ->
set = MITHgrid.Data.Set.initInstance()
evalArg = (arg) ->
arg.forEachValue (v) ->
v2 = f(v)
set.add v2 if v2?
evalArg arg for arg in args
Expression.initCollection set, valueType
| 192210 | # # Expression Parser
#
# Everything here is private except for a few exported objects and functions.
#
#
# ## Expressions
#
# Expressions describe a path through the data graph held in a data store.
#
# Expressions hop from node to node in one of two directions: forward or backward. Forward goes from an item ID through a property
# to arrive at a new value. Backward goes from a value through a property to arrive at a new item ID.
#
# For example, if we have a data store with items holding information about books, such as the following:
#
# [{
# id: "book1",
# author: "author1",
# title: "A Tale of Two Cities",
# pages: 254
# }, {
# id: "author1",
# name: "<NAME>"
# }]
#
# Then .name would return "<NAME>" if we started with the item ID "author1". But .author.name would return the same
# value if we started with the item ID "book1".
#
# If we start with "<NAME>" (the value), we can find the number of pages in the books with the following expression:
# !name!author.pages (or <-name<-author->pages using the longer notation).
#
# . and -> use a forward index and must have an item ID on the left side
#
# ! and <- use a reverse index and will result in an item ID on the right side
#
# .foo* means to follow the foo property until you can't any more, returning
# the ids along the way
# !foo* means to follow the foo property backward until you can't any more,
# returning the ids along the way
# (...)* means to apply the subgraph-traversal as many times as possible
#
MITHgrid.namespace "Expression.Basic", (exports) ->
Expression = {}
_operators =
"+":
argumentType: "number"
valueType: "number"
f: (a, b) -> a + b
"-":
argumentType: "number"
valueType: "number"
f: (a, b) -> a - b
"*":
argumentType: "number"
valueType: "number"
f: (a, b) -> a * b
"/":
argumentType: "number"
valueType: "number"
f: (a, b) -> a / b
"=":
valueType: "boolean"
f: (a, b) -> a == b
"<>":
valueType: "boolean"
f: (a, b) -> a != b
"><":
valueType: "boolean"
f: (a, b) -> a != b
"<":
valueType: "boolean"
f: (a, b) -> a < b
">":
valueType: "boolean"
f: (a, b) -> a > b
"<=":
valueType: "boolean"
f: (a, b) -> a <= b
">=":
valueType: "boolean"
f: (a, b) -> a >= b
# ## MITHgrid.Expression.Basic.controls
#
# Control functions may be defined for use in expressions. See the existing control functions for examples of
# how to write them.
#
# All control functions take the following parameters:
#
# * args
# * roots
# * rootValueTypes
# * defaultRootName
# * database
#
# All control functions should return a collection of items (using MITHgrid.Expression.initCollection collections)
#
Expression.controls = exports.controls =
# ### if
#
"if":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
conditionCollection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
condition = false
conditionCollection.forEachValue (v) ->
if v
condition = true
return true
else
return undefined
if condition
args[1].evaluate roots, rootValueTypes, defaultRootName, database
else
args[2].evaluate roots, rootValueTypes, defaultRootName, database
# ### foreach
#
"foreach":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
collection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
oldValue = roots.value
oldValueType = rootValueTypes.value
results = []
valueType = "text"
rootValueTypes.value = collection.valueType
collection.forEachValue (element) ->
roots.value = element
collection2 = args[1].evaluate roots, rootValueTypes, defaultRootName, database
valueType = collection2.valueType
collection2.forEachValue (result) ->
results.push result
roots.value = oldValue
rootValueTypes.value = oldValueType
Expression.initCollection results, valueType
"default":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
for arg in args
collection = arg.evaluate roots, rootValueTypes, defaultRootName, database
if collection.size() > 0
return collection
Expression.initCollection [], "text"
Expression.initExpression = (rootNode) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
return {
values: collection.getSet()
valueType: collection.valueType
size: collection.size
}
that.evaluateOnItem = (itemID, database) ->
that.evaluate({
"value": itemID
}, {
"value": "item"
},
"value",
database
)
that.evaluateSingle = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
result =
value: null
valueType: collection.valueType
collection.forEachValue (v) ->
result.value = v
true
result;
that.isPath = rootNode.isPath
if that.isPath
that.getPath = -> rootNode
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
rootNode.testExists roots, rootValueTypes, defaultRootName, database
else
that.getPath = -> null
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).values.size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
rootNode.walkBackward [value], valueType, filter, database
that.walkForward = (values, valueType, database) ->
rootNode.walkForward values, valueType, database
that.walkBackward = (values, valueType, filter, database) ->
rootNode.walkBackward values, valueType, filter, database
that
Expression.initCollection = exports.initCollection = (values, valueType) ->
that =
valueType: valueType
if values instanceof Array
that.forEachValue = (f) ->
for v in values
if f(v) == true
break;
that.getSet = -> MITHgrid.Data.Set.initInstance values
that.contains = (v) -> v in values
that.size = -> values.length
else
that.forEachValue = values.visit
that.size = values.size
that.getSet = -> values
that.contains = values.contains
that.isPath = false;
that
Expression.initConstant = (value, valueType) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) -> Expression.initCollection [value], valueType
that.isPath = false;
that
Expression.initOperator = (operator, args) ->
that = {}
_operator = operator
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
values = []
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database) for a in _args
operator = _operators[_operator]
f = operator.f
if operator.argumentType == "number"
args[0].forEachValue (v1) ->
if typeof(v1) != "number"
v1 = parseFloat v1
args[1].forEachValue (v2) ->
if typeof(v2) != "number"
v2 = parseFloat v2
values.push f(v1, v2)
else
args[0].forEachValue (v1) ->
args[1].forEachValue (v2) -> values.push f(v1, v2)
Expression.initCollection values, operator.valueType
that.isPath = false
that
Expression.initFunctionCall = (name, args) ->
that = {}
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database ) for a in _args
if Expression.functions[name]?.f?
return Expression.functions[name].f args
else
throw new Error "No such function named #{_name}"
that.isPath = false
that
Expression.initControlCall = (name, args) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
Expression.controls[name].f args, roots, rootValueTypes, defaultRootName, database
that.isPath = false
that
Expression.initPath = (property, forward) ->
that = {}
_rootName = null
_segments = []
#
# If isMultiple == true (.@ or !@ instead of . or !), then we
# collect all matching values regardless of multiplicity. Otherwise,
# we only return one instance of each matching value.
#
walkForward = (collection, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) -> a.push v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) -> a.push v2
a
for i in [ 0 ... _segments.length ]
segment = _segments[i]
if segment.expression?
if segment.forward
# simply evaluate the expressions and report the results
collection = segment.expression.evaluateOnItem(collection.getSet().items(), database)
else
# walk backward
else if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else
if segment.forward
values = database.getObjectsUnion collection.getSet(), segment.property
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
else
values = database.getSubjectsUnion collection.getSet(), segment.property
collection = Expression.initCollection values, "item"
collection
walkBackward = (collection, filter, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
if filter instanceof Array
filter = MITHgrid.Data.Set.initInstance filter
for i in [ _segments.length-1 .. 0 ]
segment = _segments[i];
if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else if segment.forward
values = database.getSubjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
collection = Expression.initCollection values, "item"
else
values = database.getObjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
collection
if property?
_segments.push
property: property
forward: forward
isMultiple: false
that.isPath = true
that.setRootName = (rootName) -> _rootName = rootName
that.appendSegment = (property, hopOperator) ->
_segments.push
property: property
forward: hopOperator[0] == "."
isMultiple: hopOperator.length > 1
that.getSegment = (index) ->
if index < _segments.length
segment = _segments[index]
return {
property: segment.property
forward: segment.forward
isMultiple: segment.isMultiple
}
else
return null
that.appendSubPath = (expression) ->
_segments.push
expression: expression
forward: true
isMultiple: true
that.getLastSegment = -> that.getSegment _segments.length - 1
that.getSegmentCount = -> _segments.length
that.rangeBackward = (from, to, filter, database) ->
set = MITHgrid.Data.Set.initInstance()
valueType = "item"
if _segments.length > 0
segment = _segments[_segments.length - 1]
if segment.forward
database.getSubjectsInRange(segment.property, from, to, false, set, if _segments.length == 1 then filter else null)
else
throw new Error "Last path of segment must be forward"
for i in [ _segments.length - 2 .. 0 ]
segment = _segments[i]
if segment.forward
set = database.getSubjectsUnion(set, segment.property, null, if i == 0 then filter else null)
valueType = "item"
else
set = database.getObjectsUnion(set, segment.property, null, if i == 0 then filter else null)
property = database.getPropertysegment.property
valueType = if property? then property.getValueType() else "text"
return {
valueType: valueType
values: set
count: set.size()
}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
rootName = if _rootName? then _rootName else defaultRootName
valueType = if rootValueTypes[rootName]? then rootValueTypes[rootName] else "text"
collection = null
if roots[rootName]?
root = roots[rootName]
if $.isPlainObject(root) or root instanceof Array
collection = Expression.initCollection root, valueType
else
collection = Expression.initCollection [root], valueType
return walkForward collection, database
else
throw new Error "No such variable called " + rootName
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
collection = Expression.initCollection [value], valueType
walkBackward collection, filter, database
that.walkForward = (values, valueType, database) ->
walkForward Expression.initCollection(values, valueType), database
that.walkBackward = (values, valueType, filter, database) ->
walkBackward Expression.initCollection(values, valueType), filter, database
that
# This allows us to do the following:
# .foo(.bar.baz)*.bat and follow any number of .bar.baz segments
# .foo(.bar,.baz)*.bat follows any number of .bar or .baz segments
Expression.initClosure = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (roots, database) ->
finalSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
nextRoots = finalSet.items()
while nextRoots.length > 0
nextSet = MITHGrid.Data.Set.initInstance()
for ex in expressions
set = ex.evaluate({ "value": nextRoots }, { "value": "item" }, "value", database)
set.getSet().visit (v) ->
if !finalSet.contains(v)
nextSet.add(v)
finalSet.add(v)
nextRoots = nextSet.items()
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
that
Expression.initExpressionSet = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (root, database) ->
finaleSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
Expression.initParser = exports.initInstance = ->
that = {}
internalParse = (scanner, several) ->
token = scanner.token()
Scanner = Expression.initScanner
next = ->
scanner.next()
token = scanner.token()
parseExpressionList = ->
expressions = [parseExpression()]
while token? and token.type == Scanner.DELIMITER and token.value == ","
next()
expressions.push parseExpression()
expressions
makePosition = -> if token? then token.start else scanner.index()
parsePath = ->
path = Expression.initPath()
while token? && !(token.type == Scanner.OPERATOR || token.type == Scanner.DELIMITER && token.value == ')')
if token.type == Scanner.PATH_OPERATOR
hopOperator = token.value
next()
if token? and token.type == Scanner.IDENTIFIER
path.appendSegment token.value, hopOperator
next()
else
throw new Error "Missing property ID at position " + makePosition()
else if token.type == Scanner.DELIMITER and token.value == '('
next()
expressions = parseExpressionList()
if token && token.type == Scanner.DELIMITER
if token.value == ')'
next()
if token && token.type == Scanner.OPERATOR and token.value == '*'
next()
path.appendSubPath Expression.initClosure expressions
else
path.appendSubPath Expression.initExpressionSet expressions
else
throw new Error "Mismatched ')' at position " + makePosition()
else
throw new Error "Mismatched ')' at position " + makePosition()
path
parseSubExpression = ->
result = null
args = []
if !token?
throw new Error "Missing factor at end of expression"
switch token.type
when Scanner.OPERATOR
return result
when Scanner.NUMBER
result = Expression.initConstant token.value, "number"
next()
when Scanner.STRING
result = Expression.initConstant(token.value, "text");
next();
when Scanner.PATH_OPERATOR then result = parsePath()
when Scanner.IDENTIFIER
identifier = token.value
next()
if Expression.controls[identifier]?
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initControlCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) to end " + identifier + " at position " + makePosition()
else
throw new Error "Missing ( to start " + identifier + " at position " + makePosition()
else
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initFunctionCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) after function call " + identifier + " at position " + makePosition()
else
result = parsePath()
result.setRootName identifier
when Scanner.DELIMITER
if token.value == "("
next()
result = parseExpression()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
result
parseExpression = ->
expression = parseSubExpression()
while token?.type == Scanner.OPERATOR && token.value in [ "=", "<", ">", "<>", "<=", ">=" ]
operator = token.value
next()
expression = Expression.initOperator operator, [ expression, parseSubExpression() ]
expression
if several
roots = parseExpressionList()
expressions = []
expressions.push Expression.initExpression(r) for r in roots
return expressions
else
return [Expression.initExpression(parseExpression())]
that.parse = (s, startIndex, results) ->
startIndex ?= 0
results ?= {}
scanner = Expression.initScanner s, startIndex
try
return internalParse(scanner, false)[0]
finally
results.index = if scanner.token()? then scanner.token().start else scanner.index()
that
Expression.initScanner = (text, startIndex) ->
that = {}
_text = text + " "
_maxIndex = text.length
_index = startIndex
_token = null
isDigit = (c) -> "0123456789".indexOf(c) >= 0
that.token = -> _token
that.index = -> _index
that.next = ->
_token = null
_index += 1 while _index < _maxIndex and " \t\r\n".indexOf(_text[_index]) >= 0
if _index < _maxIndex
c1 = _text.charAt _index
c2 = _text.charAt _index + 1
c3 = _text.charAt _index + 2
if ".!".indexOf(c1) >= 0
if c2 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if c1 == "<" and c2 == "-"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!"
start: _index
end: _index + 2
_index += 2
else if c1 == "-" and c2 == ">"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: ".@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "."
start: _index
end: _index + 2
_index += 2
else if "<>".indexOf(c1) >= 0
if (c2 == "=") or ("<>".indexOf(c2) >= 0 and c1 != c2)
_token =
type: Expression.initScanner.OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "+-*/=".indexOf(c1) >= 0
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "()".indexOf(c1) >= 0
_token =
type: Expression.initScanner.DELIMITER
value: c1
start: _index
end: _index + 1
_index += 1
else if "\"'".indexOf(c1) >= 0
# quoted strings
i = _index + 1
while i < _maxIndex
break if _text.charAt(i) == c1 and _text.charAt(i - 1) != "\\"
i += 1
if i < _maxIndex
_token =
type: Expression.initScanner.STRING
value: _text.substring(_index + 1, i).replace(/\\'/g, "'").replace(/\\"/g, '"')
start: _index
end: i + 1
_index = i + 1
else
throw new Error "Unterminated string starting at " + String(_index)
else if isDigit c1
# number
i = _index
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
if i < _maxIndex and _text.charAt(i) == "."
i += 1
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
_token =
type: Expression.initScanner.NUMBER
value: parseFloat(_text.substring(_index, i))
start: _index
end: i
_index = i;
else
# identifier
i = _index
while i < _maxIndex
c = _text.charAt i
break unless "(),.!@ \t".indexOf(c) < 0
i += 1
_token =
type: Expression.initScanner.IDENTIFIER
value: _text.substring(_index, i)
start: _index
end: i
_index = i
that.next()
that
Expression.initScanner.DELIMITER = 0
Expression.initScanner.NUMBER = 1
Expression.initScanner.STRING = 2
Expression.initScanner.IDENTIFIER = 3
Expression.initScanner.OPERATOR = 4
Expression.initScanner.PATH_OPERATOR = 5
Expression.functions = { }
Expression.FunctionUtilities = { }
exports.registerSimpleMappingFunction = (name, f, valueType) ->
Expression.functions[name] =
f: (args) ->
set = MITHgrid.Data.Set.initInstance()
evalArg = (arg) ->
arg.forEachValue (v) ->
v2 = f(v)
set.add v2 if v2?
evalArg arg for arg in args
Expression.initCollection set, valueType
| true | # # Expression Parser
#
# Everything here is private except for a few exported objects and functions.
#
#
# ## Expressions
#
# Expressions describe a path through the data graph held in a data store.
#
# Expressions hop from node to node in one of two directions: forward or backward. Forward goes from an item ID through a property
# to arrive at a new value. Backward goes from a value through a property to arrive at a new item ID.
#
# For example, if we have a data store with items holding information about books, such as the following:
#
# [{
# id: "book1",
# author: "author1",
# title: "A Tale of Two Cities",
# pages: 254
# }, {
# id: "author1",
# name: "PI:NAME:<NAME>END_PI"
# }]
#
# Then .name would return "PI:NAME:<NAME>END_PI" if we started with the item ID "author1". But .author.name would return the same
# value if we started with the item ID "book1".
#
# If we start with "PI:NAME:<NAME>END_PI" (the value), we can find the number of pages in the books with the following expression:
# !name!author.pages (or <-name<-author->pages using the longer notation).
#
# . and -> use a forward index and must have an item ID on the left side
#
# ! and <- use a reverse index and will result in an item ID on the right side
#
# .foo* means to follow the foo property until you can't any more, returning
# the ids along the way
# !foo* means to follow the foo property backward until you can't any more,
# returning the ids along the way
# (...)* means to apply the subgraph-traversal as many times as possible
#
MITHgrid.namespace "Expression.Basic", (exports) ->
Expression = {}
_operators =
"+":
argumentType: "number"
valueType: "number"
f: (a, b) -> a + b
"-":
argumentType: "number"
valueType: "number"
f: (a, b) -> a - b
"*":
argumentType: "number"
valueType: "number"
f: (a, b) -> a * b
"/":
argumentType: "number"
valueType: "number"
f: (a, b) -> a / b
"=":
valueType: "boolean"
f: (a, b) -> a == b
"<>":
valueType: "boolean"
f: (a, b) -> a != b
"><":
valueType: "boolean"
f: (a, b) -> a != b
"<":
valueType: "boolean"
f: (a, b) -> a < b
">":
valueType: "boolean"
f: (a, b) -> a > b
"<=":
valueType: "boolean"
f: (a, b) -> a <= b
">=":
valueType: "boolean"
f: (a, b) -> a >= b
# ## MITHgrid.Expression.Basic.controls
#
# Control functions may be defined for use in expressions. See the existing control functions for examples of
# how to write them.
#
# All control functions take the following parameters:
#
# * args
# * roots
# * rootValueTypes
# * defaultRootName
# * database
#
# All control functions should return a collection of items (using MITHgrid.Expression.initCollection collections)
#
Expression.controls = exports.controls =
# ### if
#
"if":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
conditionCollection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
condition = false
conditionCollection.forEachValue (v) ->
if v
condition = true
return true
else
return undefined
if condition
args[1].evaluate roots, rootValueTypes, defaultRootName, database
else
args[2].evaluate roots, rootValueTypes, defaultRootName, database
# ### foreach
#
"foreach":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
collection = args[0].evaluate roots, rootValueTypes, defaultRootName, database
oldValue = roots.value
oldValueType = rootValueTypes.value
results = []
valueType = "text"
rootValueTypes.value = collection.valueType
collection.forEachValue (element) ->
roots.value = element
collection2 = args[1].evaluate roots, rootValueTypes, defaultRootName, database
valueType = collection2.valueType
collection2.forEachValue (result) ->
results.push result
roots.value = oldValue
rootValueTypes.value = oldValueType
Expression.initCollection results, valueType
"default":
f: (args, roots, rootValueTypes, defaultRootName, database) ->
for arg in args
collection = arg.evaluate roots, rootValueTypes, defaultRootName, database
if collection.size() > 0
return collection
Expression.initCollection [], "text"
Expression.initExpression = (rootNode) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
return {
values: collection.getSet()
valueType: collection.valueType
size: collection.size
}
that.evaluateOnItem = (itemID, database) ->
that.evaluate({
"value": itemID
}, {
"value": "item"
},
"value",
database
)
that.evaluateSingle = (roots, rootValueTypes, defaultRootName, database) ->
collection = rootNode.evaluate roots, rootValueTypes, defaultRootName, database
result =
value: null
valueType: collection.valueType
collection.forEachValue (v) ->
result.value = v
true
result;
that.isPath = rootNode.isPath
if that.isPath
that.getPath = -> rootNode
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
rootNode.testExists roots, rootValueTypes, defaultRootName, database
else
that.getPath = -> null
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).values.size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
rootNode.walkBackward [value], valueType, filter, database
that.walkForward = (values, valueType, database) ->
rootNode.walkForward values, valueType, database
that.walkBackward = (values, valueType, filter, database) ->
rootNode.walkBackward values, valueType, filter, database
that
Expression.initCollection = exports.initCollection = (values, valueType) ->
that =
valueType: valueType
if values instanceof Array
that.forEachValue = (f) ->
for v in values
if f(v) == true
break;
that.getSet = -> MITHgrid.Data.Set.initInstance values
that.contains = (v) -> v in values
that.size = -> values.length
else
that.forEachValue = values.visit
that.size = values.size
that.getSet = -> values
that.contains = values.contains
that.isPath = false;
that
Expression.initConstant = (value, valueType) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) -> Expression.initCollection [value], valueType
that.isPath = false;
that
Expression.initOperator = (operator, args) ->
that = {}
_operator = operator
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
values = []
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database) for a in _args
operator = _operators[_operator]
f = operator.f
if operator.argumentType == "number"
args[0].forEachValue (v1) ->
if typeof(v1) != "number"
v1 = parseFloat v1
args[1].forEachValue (v2) ->
if typeof(v2) != "number"
v2 = parseFloat v2
values.push f(v1, v2)
else
args[0].forEachValue (v1) ->
args[1].forEachValue (v2) -> values.push f(v1, v2)
Expression.initCollection values, operator.valueType
that.isPath = false
that
Expression.initFunctionCall = (name, args) ->
that = {}
_args = args
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
args = []
args.push(a.evaluate roots, rootValueTypes, defaultRootName, database ) for a in _args
if Expression.functions[name]?.f?
return Expression.functions[name].f args
else
throw new Error "No such function named #{_name}"
that.isPath = false
that
Expression.initControlCall = (name, args) ->
that = {}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
Expression.controls[name].f args, roots, rootValueTypes, defaultRootName, database
that.isPath = false
that
Expression.initPath = (property, forward) ->
that = {}
_rootName = null
_segments = []
#
# If isMultiple == true (.@ or !@ instead of . or !), then we
# collect all matching values regardless of multiplicity. Otherwise,
# we only return one instance of each matching value.
#
walkForward = (collection, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) -> a.push v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) -> a.push v2
a
for i in [ 0 ... _segments.length ]
segment = _segments[i]
if segment.expression?
if segment.forward
# simply evaluate the expressions and report the results
collection = segment.expression.evaluateOnItem(collection.getSet().items(), database)
else
# walk backward
else if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else
if segment.forward
values = database.getObjectsUnion collection.getSet(), segment.property
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
else
values = database.getSubjectsUnion collection.getSet(), segment.property
collection = Expression.initCollection values, "item"
collection
walkBackward = (collection, filter, database) ->
forwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getSubjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
backwardArraySegmentFn = (segment) ->
a = []
collection.forEachValue (v) ->
database.getObjects(v, segment.property).visit (v2) ->
a.push v2 if i > 0 or !filter? or filter.contains v2
a
if filter instanceof Array
filter = MITHgrid.Data.Set.initInstance filter
for i in [ _segments.length-1 .. 0 ]
segment = _segments[i];
if segment.isMultiple
a = []
if segment.forward
a = forwardArraySegmentFn segment
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
else
a = backwardArraySegmentFn segment
valueType = "item"
collection = Expression.initCollection a, valueType
else if segment.forward
values = database.getSubjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
collection = Expression.initCollection values, "item"
else
values = database.getObjectsUnion(collection.getSet(), segment.property, null, if i == 0 then filter else null)
property = database.getProperty segment.property
valueType = if property? then property.getValueType() else "text"
collection = Expression.initCollection values, valueType
collection
if property?
_segments.push
property: property
forward: forward
isMultiple: false
that.isPath = true
that.setRootName = (rootName) -> _rootName = rootName
that.appendSegment = (property, hopOperator) ->
_segments.push
property: property
forward: hopOperator[0] == "."
isMultiple: hopOperator.length > 1
that.getSegment = (index) ->
if index < _segments.length
segment = _segments[index]
return {
property: segment.property
forward: segment.forward
isMultiple: segment.isMultiple
}
else
return null
that.appendSubPath = (expression) ->
_segments.push
expression: expression
forward: true
isMultiple: true
that.getLastSegment = -> that.getSegment _segments.length - 1
that.getSegmentCount = -> _segments.length
that.rangeBackward = (from, to, filter, database) ->
set = MITHgrid.Data.Set.initInstance()
valueType = "item"
if _segments.length > 0
segment = _segments[_segments.length - 1]
if segment.forward
database.getSubjectsInRange(segment.property, from, to, false, set, if _segments.length == 1 then filter else null)
else
throw new Error "Last path of segment must be forward"
for i in [ _segments.length - 2 .. 0 ]
segment = _segments[i]
if segment.forward
set = database.getSubjectsUnion(set, segment.property, null, if i == 0 then filter else null)
valueType = "item"
else
set = database.getObjectsUnion(set, segment.property, null, if i == 0 then filter else null)
property = database.getPropertysegment.property
valueType = if property? then property.getValueType() else "text"
return {
valueType: valueType
values: set
count: set.size()
}
that.evaluate = (roots, rootValueTypes, defaultRootName, database) ->
rootName = if _rootName? then _rootName else defaultRootName
valueType = if rootValueTypes[rootName]? then rootValueTypes[rootName] else "text"
collection = null
if roots[rootName]?
root = roots[rootName]
if $.isPlainObject(root) or root instanceof Array
collection = Expression.initCollection root, valueType
else
collection = Expression.initCollection [root], valueType
return walkForward collection, database
else
throw new Error "No such variable called " + rootName
that.testExists = (roots, rootValueTypes, defaultRootName, database) ->
that.evaluate(roots, rootValueTypes, defaultRootName, database).size() > 0
that.evaluateBackward = (value, valueType, filter, database) ->
collection = Expression.initCollection [value], valueType
walkBackward collection, filter, database
that.walkForward = (values, valueType, database) ->
walkForward Expression.initCollection(values, valueType), database
that.walkBackward = (values, valueType, filter, database) ->
walkBackward Expression.initCollection(values, valueType), filter, database
that
# This allows us to do the following:
# .foo(.bar.baz)*.bat and follow any number of .bar.baz segments
# .foo(.bar,.baz)*.bat follows any number of .bar or .baz segments
Expression.initClosure = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (roots, database) ->
finalSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
nextRoots = finalSet.items()
while nextRoots.length > 0
nextSet = MITHGrid.Data.Set.initInstance()
for ex in expressions
set = ex.evaluate({ "value": nextRoots }, { "value": "item" }, "value", database)
set.getSet().visit (v) ->
if !finalSet.contains(v)
nextSet.add(v)
finalSet.add(v)
nextRoots = nextSet.items()
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
that
Expression.initExpressionSet = (expressions) ->
that = {}
that.isPath = false
expressions = [ expressions ] unless $.isArray expressions
that.evaluateOnItem = (root, database) ->
finaleSet = MITHGrid.Data.Set.initInstance()
valueType = null
for ex in expressions
set = ex.evaluate({ "value": roots }, { "value": "item" }, "value", database)
set.getSet().visit finalSet.add
valueType ?= set.valueType
return {
values: finalSet
getSet: -> finalSet
valueType: valueType || "text"
size: finalSet.size()
}
Expression.initParser = exports.initInstance = ->
that = {}
internalParse = (scanner, several) ->
token = scanner.token()
Scanner = Expression.initScanner
next = ->
scanner.next()
token = scanner.token()
parseExpressionList = ->
expressions = [parseExpression()]
while token? and token.type == Scanner.DELIMITER and token.value == ","
next()
expressions.push parseExpression()
expressions
makePosition = -> if token? then token.start else scanner.index()
parsePath = ->
path = Expression.initPath()
while token? && !(token.type == Scanner.OPERATOR || token.type == Scanner.DELIMITER && token.value == ')')
if token.type == Scanner.PATH_OPERATOR
hopOperator = token.value
next()
if token? and token.type == Scanner.IDENTIFIER
path.appendSegment token.value, hopOperator
next()
else
throw new Error "Missing property ID at position " + makePosition()
else if token.type == Scanner.DELIMITER and token.value == '('
next()
expressions = parseExpressionList()
if token && token.type == Scanner.DELIMITER
if token.value == ')'
next()
if token && token.type == Scanner.OPERATOR and token.value == '*'
next()
path.appendSubPath Expression.initClosure expressions
else
path.appendSubPath Expression.initExpressionSet expressions
else
throw new Error "Mismatched ')' at position " + makePosition()
else
throw new Error "Mismatched ')' at position " + makePosition()
path
parseSubExpression = ->
result = null
args = []
if !token?
throw new Error "Missing factor at end of expression"
switch token.type
when Scanner.OPERATOR
return result
when Scanner.NUMBER
result = Expression.initConstant token.value, "number"
next()
when Scanner.STRING
result = Expression.initConstant(token.value, "text");
next();
when Scanner.PATH_OPERATOR then result = parsePath()
when Scanner.IDENTIFIER
identifier = token.value
next()
if Expression.controls[identifier]?
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initControlCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) to end " + identifier + " at position " + makePosition()
else
throw new Error "Missing ( to start " + identifier + " at position " + makePosition()
else
if token? and token.type == Scanner.DELIMITER and token.value == "("
next()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
args = []
else
args = parseExpressionList()
result = Expression.initFunctionCall identifier, args
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) after function call " + identifier + " at position " + makePosition()
else
result = parsePath()
result.setRootName identifier
when Scanner.DELIMITER
if token.value == "("
next()
result = parseExpression()
if token? and token.type == Scanner.DELIMITER and token.value == ")"
next()
else
throw new Error "Missing ) at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
else
throw new Error "Unexpected text " + token.value + " at position " + makePosition()
result
parseExpression = ->
expression = parseSubExpression()
while token?.type == Scanner.OPERATOR && token.value in [ "=", "<", ">", "<>", "<=", ">=" ]
operator = token.value
next()
expression = Expression.initOperator operator, [ expression, parseSubExpression() ]
expression
if several
roots = parseExpressionList()
expressions = []
expressions.push Expression.initExpression(r) for r in roots
return expressions
else
return [Expression.initExpression(parseExpression())]
that.parse = (s, startIndex, results) ->
startIndex ?= 0
results ?= {}
scanner = Expression.initScanner s, startIndex
try
return internalParse(scanner, false)[0]
finally
results.index = if scanner.token()? then scanner.token().start else scanner.index()
that
Expression.initScanner = (text, startIndex) ->
that = {}
_text = text + " "
_maxIndex = text.length
_index = startIndex
_token = null
isDigit = (c) -> "0123456789".indexOf(c) >= 0
that.token = -> _token
that.index = -> _index
that.next = ->
_token = null
_index += 1 while _index < _maxIndex and " \t\r\n".indexOf(_text[_index]) >= 0
if _index < _maxIndex
c1 = _text.charAt _index
c2 = _text.charAt _index + 1
c3 = _text.charAt _index + 2
if ".!".indexOf(c1) >= 0
if c2 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if c1 == "<" and c2 == "-"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "!"
start: _index
end: _index + 2
_index += 2
else if c1 == "-" and c2 == ">"
if c3 == "@"
_token =
type: Expression.initScanner.PATH_OPERATOR
value: ".@"
start: _index
end: _index + 3
_index += 3
else
_token =
type: Expression.initScanner.PATH_OPERATOR
value: "."
start: _index
end: _index + 2
_index += 2
else if "<>".indexOf(c1) >= 0
if (c2 == "=") or ("<>".indexOf(c2) >= 0 and c1 != c2)
_token =
type: Expression.initScanner.OPERATOR
value: c1 + c2
start: _index
end: _index + 2
_index += 2
else
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "+-*/=".indexOf(c1) >= 0
_token =
type: Expression.initScanner.OPERATOR
value: c1
start: _index
end: _index + 1
_index += 1
else if "()".indexOf(c1) >= 0
_token =
type: Expression.initScanner.DELIMITER
value: c1
start: _index
end: _index + 1
_index += 1
else if "\"'".indexOf(c1) >= 0
# quoted strings
i = _index + 1
while i < _maxIndex
break if _text.charAt(i) == c1 and _text.charAt(i - 1) != "\\"
i += 1
if i < _maxIndex
_token =
type: Expression.initScanner.STRING
value: _text.substring(_index + 1, i).replace(/\\'/g, "'").replace(/\\"/g, '"')
start: _index
end: i + 1
_index = i + 1
else
throw new Error "Unterminated string starting at " + String(_index)
else if isDigit c1
# number
i = _index
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
if i < _maxIndex and _text.charAt(i) == "."
i += 1
i += 1 while i < _maxIndex and isDigit(_text.charAt i)
_token =
type: Expression.initScanner.NUMBER
value: parseFloat(_text.substring(_index, i))
start: _index
end: i
_index = i;
else
# identifier
i = _index
while i < _maxIndex
c = _text.charAt i
break unless "(),.!@ \t".indexOf(c) < 0
i += 1
_token =
type: Expression.initScanner.IDENTIFIER
value: _text.substring(_index, i)
start: _index
end: i
_index = i
that.next()
that
Expression.initScanner.DELIMITER = 0
Expression.initScanner.NUMBER = 1
Expression.initScanner.STRING = 2
Expression.initScanner.IDENTIFIER = 3
Expression.initScanner.OPERATOR = 4
Expression.initScanner.PATH_OPERATOR = 5
Expression.functions = { }
Expression.FunctionUtilities = { }
exports.registerSimpleMappingFunction = (name, f, valueType) ->
Expression.functions[name] =
f: (args) ->
set = MITHgrid.Data.Set.initInstance()
evalArg = (arg) ->
arg.forEachValue (v) ->
v2 = f(v)
set.add v2 if v2?
evalArg arg for arg in args
Expression.initCollection set, valueType
|
[
{
"context": "s the name in the state', ->\n c.char.name = 'Doe'\n reloadState()\n expect(c.char.name).to",
"end": 9887,
"score": 0.9996848702430725,
"start": 9884,
"tag": "NAME",
"value": "Doe"
},
{
"context": " reloadState()\n expect(c.char.name).toBe 'Doe'\n... | spec/common/creation.spec.coffee | saua/srgen | 2 | cr = require '../../src/common/creation'
bt = require '../../src/common/basetypes'
describe 'Priority Creation', ->
c = null
beforeEach ->
c = new cr.Creation
it 'starts out with an empty char', ->
expect(c.char).toBeDefined()
expect(c.char.name).toBe null
expect(c.char.attributes.bod).toBeDefined()
expect(c.char.attributes.bod.value.value).toBe null
describe 'Attributes', ->
beforeEach ->
c.setMetatype 'human'
it 'does not allow default attribute values to be decrease', ->
expect(c.canDecreaseAttribute 'int').toBe false
it 'allows default attribute values to be increase', ->
expect(c.canIncreaseAttribute 'int').toBe true
it 'increasing a attribute raises it', ->
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 2
it 'increasing a attribute twice raises it even further', ->
c.increaseAttribute 'int'
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 3
it 'can not raise an attribute past its maximum', ->
c.increaseAttribute 'int', 5
expect(c.canIncreaseAttribute 'int').toBe false
it 'can not raise an attribute past its maximum, all at once', ->
expect(c.canIncreaseAttribute 'int', 6).toBe false
it 'decreasing an increased attribute lowers it', ->
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 1
it 'increases used attribute points when increasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints+1
it 'resets used attribute points when decreasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints
it 'can not raise magic attribute without magic', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not flag a failing maigc attribute as invalid', ->
expect(c.attributeValueValid 'mag').toBe true
it 'uses special attribute points to increase edge', ->
c.increaseAttribute 'edg'
expect(c.points.specialAttributes.used).toBe 1
expect(c.points.attributes.used).toBe 0
it 'does not allow magic to be increased without a magic type', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not allow resonance to be increased without a resonance type', ->
expect(c.canIncreaseAttribute 'res').toBe false
describe 'Metatype', ->
it 'does not give special attributes to unknown metatypes', ->
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBeUndefined()
it 'gives 7 special attributes to human on B', ->
c.setMetatype 'human'
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBe 7
it 'gives new minimum values to non-humans', ->
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.min.value).toBe 3
expect(c.char.attributes.bod.value.value).toBe 3
it 'adapts attribute values when switching metataype', ->
c.setMetatype 'human'
c.increaseAttribute 'bod'
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.value.value).toBe 4
describe 'Attributes', ->
it 'does not give attribute points without priority', ->
expect(c.points.attributes.available).toBe 0
it 'gives 20 attribute points on priority B', ->
c.setPriority 'attributes', 'B'
expect(c.points.attributes.available).toBe 20
describe 'Magic', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'does not give magic or resonance without magic selection', ->
expect(c.char.attributes.mag).toBeUndefined()
expect(c.char.attributes.res).toBeUndefined()
it 'gives 4 magic when selecting magician on priority B', ->
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe 4
it 'gives 6 magic when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.mag.value.value).toBe 6
it 'gives 5 magic when selecting aspected magician on priority B', ->
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 5
it 'gives 4 magic when selecting aspected magician on priority B and lowering essence by 0.1', ->
c.setMagicType 'aspectedMagician'
c.char.attributes.ess.value.addEffect new bt.ModValue -0.1
expect(c.char.attributes.mag.value.value).toBe 4
it 'does not allow magic to be lowered below the initial value', ->
c.setMagicType 'aspectedMagician'
expect(c.canDecreaseAttribute 'mag').toBe false
expect(-> c.decreaseAttribute 'mag').toThrow()
it 'remembers the attribute increase when switching magic type', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 6
it 'can take away magic', ->
c.setMagicType 'magician'
c.setMagicType null
expect(c.char.attributes.mag).toBeUndefined()
it 'resets the magic attribute when removing the magic type', ->
c.setMagicType 'magician'
origMagicValue = c.char.attributes.mag.value.value
c.increaseAttribute 'mag'
c.setMagicType null
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe origMagicValue
expect(c.points.specialAttributes.used).toBe 0
it 'does not reset magic attribute when switching between magic types', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
magicValue = c.char.attributes.mag.value.value
c.setMagicType 'adept'
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe magicValue
it 'reduces magic to 0 when reducing the magic priority below a required level', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.char.attributes.mag.value.value).toBe 0
it 'reports the magic type as valid if the magic priority is high enough', ->
c.setMagicType 'magician'
expect(c.validateMagicType()).toEqual []
it 'reports the magic type as invalid if the magic priority is not high enough', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.validateMagicType()).not.toEqual []
it 'can take away power points', ->
c.setMagicType 'adept'
c.setMagicType null
expect(c.char.attributes.pp).toBeUndefined()
it 'gives 0 powerpoints when selecting mystic adept on priority B', ->
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'allows powerpoints to be purchased for mystic adepts', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
expect(c.char.attributes.pp.value.value).toBe 1
expect(c.points.karma.used).toBe 2
it 'resets powerpoints mods when switching away from mystic adept', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
expect(c.points.karma.used).toBe 0
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'gives 6 powerpoints when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
describe 'Resonance', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'gives 4 resonance when selecting technomancer on priority B', ->
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe 4
it 'resets the resonance attribute when removing the resonance type', ->
c.setResonanceType 'technomancer'
origResonanceValue = c.char.attributes.res.value.value
c.increaseAttribute 'res'
c.setResonanceType null
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe origResonanceValue
expect(c.points.specialAttributes.used).toBe 0
it 'reduces resonance to 0 when reducing the resonance/magic priority below a required level', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.char.attributes.res.value.value).toBe 0
it 'reports the resonance type as valid if the magic priority is high enough', ->
c.setResonanceType 'technomancer'
expect(c.validateResonanceType()).toEqual []
it 'reports the resonance type as invalid if the magic priority is not high enough', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.validateResonanceType()).not.toEqual []
describe 'Skills', ->
it 'does not give skill points without priority', ->
expect(c.points.skills.available).toBe 0
expect(c.points.skillGroups.available).toBe 0
it 'gives 36/5 points for skills/skill groups on priority B', ->
c.setPriority 'skills', 'B'
expect(c.points.skills.available).toBe 36
expect(c.points.skillGroups.available).toBe 5
describe 'Resources', ->
it 'does not give resources without priority', ->
expect(c.points.resources.available).toBe 0
it 'gives 275k Nuyen on priority B', ->
c.setPriority 'resources', 'B'
expect(c.points.resources.available).toBe 275000
describe 'State Handling', ->
reloadState = ->
state = c.exportState()
c = new cr.Creation state
it 'can handle the initial state', ->
reloadState()
expect(c.metatype).toBe null
it 'remembers the name in the state', ->
c.char.name = 'Doe'
reloadState()
expect(c.char.name).toBe 'Doe'
it 'remembers partial priorities', ->
c.setPriority('skills', 'A')
reloadState()
expect(c.priority.skills).toBe 'A'
expect(c.priority.magic).toBe null
it 'remembers the metatype', ->
c.setMetatype 'human'
reloadState()
expect(c.metatype).toBe 'human'
it 'remembers the magicType', ->
c.setMagicType 'adept'
reloadState()
expect(c.char.magicType.name).toBe 'adept'
it 'remembers the resonanceType', ->
c.setResonanceType 'technomancer'
reloadState()
expect(c.char.resonanceType.name).toBe 'technomancer'
it 'remembers attribute points', ->
c.setMetatype 'human'
c.increaseAttribute 'int'
reloadState()
expect(c.char.attributes.int.value.value).toBe 2
| 108154 | cr = require '../../src/common/creation'
bt = require '../../src/common/basetypes'
describe 'Priority Creation', ->
c = null
beforeEach ->
c = new cr.Creation
it 'starts out with an empty char', ->
expect(c.char).toBeDefined()
expect(c.char.name).toBe null
expect(c.char.attributes.bod).toBeDefined()
expect(c.char.attributes.bod.value.value).toBe null
describe 'Attributes', ->
beforeEach ->
c.setMetatype 'human'
it 'does not allow default attribute values to be decrease', ->
expect(c.canDecreaseAttribute 'int').toBe false
it 'allows default attribute values to be increase', ->
expect(c.canIncreaseAttribute 'int').toBe true
it 'increasing a attribute raises it', ->
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 2
it 'increasing a attribute twice raises it even further', ->
c.increaseAttribute 'int'
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 3
it 'can not raise an attribute past its maximum', ->
c.increaseAttribute 'int', 5
expect(c.canIncreaseAttribute 'int').toBe false
it 'can not raise an attribute past its maximum, all at once', ->
expect(c.canIncreaseAttribute 'int', 6).toBe false
it 'decreasing an increased attribute lowers it', ->
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 1
it 'increases used attribute points when increasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints+1
it 'resets used attribute points when decreasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints
it 'can not raise magic attribute without magic', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not flag a failing maigc attribute as invalid', ->
expect(c.attributeValueValid 'mag').toBe true
it 'uses special attribute points to increase edge', ->
c.increaseAttribute 'edg'
expect(c.points.specialAttributes.used).toBe 1
expect(c.points.attributes.used).toBe 0
it 'does not allow magic to be increased without a magic type', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not allow resonance to be increased without a resonance type', ->
expect(c.canIncreaseAttribute 'res').toBe false
describe 'Metatype', ->
it 'does not give special attributes to unknown metatypes', ->
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBeUndefined()
it 'gives 7 special attributes to human on B', ->
c.setMetatype 'human'
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBe 7
it 'gives new minimum values to non-humans', ->
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.min.value).toBe 3
expect(c.char.attributes.bod.value.value).toBe 3
it 'adapts attribute values when switching metataype', ->
c.setMetatype 'human'
c.increaseAttribute 'bod'
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.value.value).toBe 4
describe 'Attributes', ->
it 'does not give attribute points without priority', ->
expect(c.points.attributes.available).toBe 0
it 'gives 20 attribute points on priority B', ->
c.setPriority 'attributes', 'B'
expect(c.points.attributes.available).toBe 20
describe 'Magic', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'does not give magic or resonance without magic selection', ->
expect(c.char.attributes.mag).toBeUndefined()
expect(c.char.attributes.res).toBeUndefined()
it 'gives 4 magic when selecting magician on priority B', ->
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe 4
it 'gives 6 magic when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.mag.value.value).toBe 6
it 'gives 5 magic when selecting aspected magician on priority B', ->
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 5
it 'gives 4 magic when selecting aspected magician on priority B and lowering essence by 0.1', ->
c.setMagicType 'aspectedMagician'
c.char.attributes.ess.value.addEffect new bt.ModValue -0.1
expect(c.char.attributes.mag.value.value).toBe 4
it 'does not allow magic to be lowered below the initial value', ->
c.setMagicType 'aspectedMagician'
expect(c.canDecreaseAttribute 'mag').toBe false
expect(-> c.decreaseAttribute 'mag').toThrow()
it 'remembers the attribute increase when switching magic type', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 6
it 'can take away magic', ->
c.setMagicType 'magician'
c.setMagicType null
expect(c.char.attributes.mag).toBeUndefined()
it 'resets the magic attribute when removing the magic type', ->
c.setMagicType 'magician'
origMagicValue = c.char.attributes.mag.value.value
c.increaseAttribute 'mag'
c.setMagicType null
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe origMagicValue
expect(c.points.specialAttributes.used).toBe 0
it 'does not reset magic attribute when switching between magic types', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
magicValue = c.char.attributes.mag.value.value
c.setMagicType 'adept'
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe magicValue
it 'reduces magic to 0 when reducing the magic priority below a required level', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.char.attributes.mag.value.value).toBe 0
it 'reports the magic type as valid if the magic priority is high enough', ->
c.setMagicType 'magician'
expect(c.validateMagicType()).toEqual []
it 'reports the magic type as invalid if the magic priority is not high enough', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.validateMagicType()).not.toEqual []
it 'can take away power points', ->
c.setMagicType 'adept'
c.setMagicType null
expect(c.char.attributes.pp).toBeUndefined()
it 'gives 0 powerpoints when selecting mystic adept on priority B', ->
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'allows powerpoints to be purchased for mystic adepts', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
expect(c.char.attributes.pp.value.value).toBe 1
expect(c.points.karma.used).toBe 2
it 'resets powerpoints mods when switching away from mystic adept', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
expect(c.points.karma.used).toBe 0
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'gives 6 powerpoints when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
describe 'Resonance', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'gives 4 resonance when selecting technomancer on priority B', ->
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe 4
it 'resets the resonance attribute when removing the resonance type', ->
c.setResonanceType 'technomancer'
origResonanceValue = c.char.attributes.res.value.value
c.increaseAttribute 'res'
c.setResonanceType null
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe origResonanceValue
expect(c.points.specialAttributes.used).toBe 0
it 'reduces resonance to 0 when reducing the resonance/magic priority below a required level', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.char.attributes.res.value.value).toBe 0
it 'reports the resonance type as valid if the magic priority is high enough', ->
c.setResonanceType 'technomancer'
expect(c.validateResonanceType()).toEqual []
it 'reports the resonance type as invalid if the magic priority is not high enough', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.validateResonanceType()).not.toEqual []
describe 'Skills', ->
it 'does not give skill points without priority', ->
expect(c.points.skills.available).toBe 0
expect(c.points.skillGroups.available).toBe 0
it 'gives 36/5 points for skills/skill groups on priority B', ->
c.setPriority 'skills', 'B'
expect(c.points.skills.available).toBe 36
expect(c.points.skillGroups.available).toBe 5
describe 'Resources', ->
it 'does not give resources without priority', ->
expect(c.points.resources.available).toBe 0
it 'gives 275k Nuyen on priority B', ->
c.setPriority 'resources', 'B'
expect(c.points.resources.available).toBe 275000
describe 'State Handling', ->
reloadState = ->
state = c.exportState()
c = new cr.Creation state
it 'can handle the initial state', ->
reloadState()
expect(c.metatype).toBe null
it 'remembers the name in the state', ->
c.char.name = '<NAME>'
reloadState()
expect(c.char.name).toBe '<NAME>'
it 'remembers partial priorities', ->
c.setPriority('skills', 'A')
reloadState()
expect(c.priority.skills).toBe 'A'
expect(c.priority.magic).toBe null
it 'remembers the metatype', ->
c.setMetatype 'human'
reloadState()
expect(c.metatype).toBe 'human'
it 'remembers the magicType', ->
c.setMagicType 'adept'
reloadState()
expect(c.char.magicType.name).toBe 'adept'
it 'remembers the resonanceType', ->
c.setResonanceType 'technomancer'
reloadState()
expect(c.char.resonanceType.name).toBe 'technomancer'
it 'remembers attribute points', ->
c.setMetatype 'human'
c.increaseAttribute 'int'
reloadState()
expect(c.char.attributes.int.value.value).toBe 2
| true | cr = require '../../src/common/creation'
bt = require '../../src/common/basetypes'
describe 'Priority Creation', ->
c = null
beforeEach ->
c = new cr.Creation
it 'starts out with an empty char', ->
expect(c.char).toBeDefined()
expect(c.char.name).toBe null
expect(c.char.attributes.bod).toBeDefined()
expect(c.char.attributes.bod.value.value).toBe null
describe 'Attributes', ->
beforeEach ->
c.setMetatype 'human'
it 'does not allow default attribute values to be decrease', ->
expect(c.canDecreaseAttribute 'int').toBe false
it 'allows default attribute values to be increase', ->
expect(c.canIncreaseAttribute 'int').toBe true
it 'increasing a attribute raises it', ->
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 2
it 'increasing a attribute twice raises it even further', ->
c.increaseAttribute 'int'
c.increaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 3
it 'can not raise an attribute past its maximum', ->
c.increaseAttribute 'int', 5
expect(c.canIncreaseAttribute 'int').toBe false
it 'can not raise an attribute past its maximum, all at once', ->
expect(c.canIncreaseAttribute 'int', 6).toBe false
it 'decreasing an increased attribute lowers it', ->
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.char.attributes.int.value.value).toBe 1
it 'increases used attribute points when increasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints+1
it 'resets used attribute points when decreasing an attribute', ->
attributePoints = c.points.attributes.used
c.increaseAttribute 'int'
c.decreaseAttribute 'int'
expect(c.points.attributes.used).toBe attributePoints
it 'can not raise magic attribute without magic', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not flag a failing maigc attribute as invalid', ->
expect(c.attributeValueValid 'mag').toBe true
it 'uses special attribute points to increase edge', ->
c.increaseAttribute 'edg'
expect(c.points.specialAttributes.used).toBe 1
expect(c.points.attributes.used).toBe 0
it 'does not allow magic to be increased without a magic type', ->
expect(c.canIncreaseAttribute 'mag').toBe false
it 'does not allow resonance to be increased without a resonance type', ->
expect(c.canIncreaseAttribute 'res').toBe false
describe 'Metatype', ->
it 'does not give special attributes to unknown metatypes', ->
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBeUndefined()
it 'gives 7 special attributes to human on B', ->
c.setMetatype 'human'
c.setPriority 'metatype', 'B'
expect(c.points.specialAttributes.available).toBe 7
it 'gives new minimum values to non-humans', ->
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.min.value).toBe 3
expect(c.char.attributes.bod.value.value).toBe 3
it 'adapts attribute values when switching metataype', ->
c.setMetatype 'human'
c.increaseAttribute 'bod'
c.setMetatype 'dwarf'
expect(c.char.attributes.bod.value.value).toBe 4
describe 'Attributes', ->
it 'does not give attribute points without priority', ->
expect(c.points.attributes.available).toBe 0
it 'gives 20 attribute points on priority B', ->
c.setPriority 'attributes', 'B'
expect(c.points.attributes.available).toBe 20
describe 'Magic', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'does not give magic or resonance without magic selection', ->
expect(c.char.attributes.mag).toBeUndefined()
expect(c.char.attributes.res).toBeUndefined()
it 'gives 4 magic when selecting magician on priority B', ->
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe 4
it 'gives 6 magic when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.mag.value.value).toBe 6
it 'gives 5 magic when selecting aspected magician on priority B', ->
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 5
it 'gives 4 magic when selecting aspected magician on priority B and lowering essence by 0.1', ->
c.setMagicType 'aspectedMagician'
c.char.attributes.ess.value.addEffect new bt.ModValue -0.1
expect(c.char.attributes.mag.value.value).toBe 4
it 'does not allow magic to be lowered below the initial value', ->
c.setMagicType 'aspectedMagician'
expect(c.canDecreaseAttribute 'mag').toBe false
expect(-> c.decreaseAttribute 'mag').toThrow()
it 'remembers the attribute increase when switching magic type', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
c.setMagicType 'aspectedMagician'
expect(c.char.attributes.mag.value.value).toBe 6
it 'can take away magic', ->
c.setMagicType 'magician'
c.setMagicType null
expect(c.char.attributes.mag).toBeUndefined()
it 'resets the magic attribute when removing the magic type', ->
c.setMagicType 'magician'
origMagicValue = c.char.attributes.mag.value.value
c.increaseAttribute 'mag'
c.setMagicType null
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe origMagicValue
expect(c.points.specialAttributes.used).toBe 0
it 'does not reset magic attribute when switching between magic types', ->
c.setMagicType 'magician'
c.increaseAttribute 'mag'
magicValue = c.char.attributes.mag.value.value
c.setMagicType 'adept'
c.setMagicType 'magician'
expect(c.char.attributes.mag.value.value).toBe magicValue
it 'reduces magic to 0 when reducing the magic priority below a required level', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.char.attributes.mag.value.value).toBe 0
it 'reports the magic type as valid if the magic priority is high enough', ->
c.setMagicType 'magician'
expect(c.validateMagicType()).toEqual []
it 'reports the magic type as invalid if the magic priority is not high enough', ->
c.setMagicType 'magician'
c.setPriority 'magic', 'D'
expect(c.validateMagicType()).not.toEqual []
it 'can take away power points', ->
c.setMagicType 'adept'
c.setMagicType null
expect(c.char.attributes.pp).toBeUndefined()
it 'gives 0 powerpoints when selecting mystic adept on priority B', ->
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'allows powerpoints to be purchased for mystic adepts', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
expect(c.char.attributes.pp.value.value).toBe 1
expect(c.points.karma.used).toBe 2
it 'resets powerpoints mods when switching away from mystic adept', ->
c.setMagicType 'mysticAdept'
c.increaseAttribute 'pp'
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
expect(c.points.karma.used).toBe 0
c.setMagicType 'mysticAdept'
expect(c.char.attributes.pp.value.value).toBe 0
it 'gives 6 powerpoints when selecting adept on priority B', ->
c.setMagicType 'adept'
expect(c.char.attributes.pp.value.value).toBe 6
describe 'Resonance', ->
beforeEach ->
c = new cr.Creation
c.setMetatype 'human'
c.setPriority 'magic', 'B'
it 'gives 4 resonance when selecting technomancer on priority B', ->
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe 4
it 'resets the resonance attribute when removing the resonance type', ->
c.setResonanceType 'technomancer'
origResonanceValue = c.char.attributes.res.value.value
c.increaseAttribute 'res'
c.setResonanceType null
c.setResonanceType 'technomancer'
expect(c.char.attributes.res.value.value).toBe origResonanceValue
expect(c.points.specialAttributes.used).toBe 0
it 'reduces resonance to 0 when reducing the resonance/magic priority below a required level', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.char.attributes.res.value.value).toBe 0
it 'reports the resonance type as valid if the magic priority is high enough', ->
c.setResonanceType 'technomancer'
expect(c.validateResonanceType()).toEqual []
it 'reports the resonance type as invalid if the magic priority is not high enough', ->
c.setResonanceType 'technomancer'
c.setPriority 'magic', 'D'
expect(c.validateResonanceType()).not.toEqual []
describe 'Skills', ->
it 'does not give skill points without priority', ->
expect(c.points.skills.available).toBe 0
expect(c.points.skillGroups.available).toBe 0
it 'gives 36/5 points for skills/skill groups on priority B', ->
c.setPriority 'skills', 'B'
expect(c.points.skills.available).toBe 36
expect(c.points.skillGroups.available).toBe 5
describe 'Resources', ->
it 'does not give resources without priority', ->
expect(c.points.resources.available).toBe 0
it 'gives 275k Nuyen on priority B', ->
c.setPriority 'resources', 'B'
expect(c.points.resources.available).toBe 275000
describe 'State Handling', ->
reloadState = ->
state = c.exportState()
c = new cr.Creation state
it 'can handle the initial state', ->
reloadState()
expect(c.metatype).toBe null
it 'remembers the name in the state', ->
c.char.name = 'PI:NAME:<NAME>END_PI'
reloadState()
expect(c.char.name).toBe 'PI:NAME:<NAME>END_PI'
it 'remembers partial priorities', ->
c.setPriority('skills', 'A')
reloadState()
expect(c.priority.skills).toBe 'A'
expect(c.priority.magic).toBe null
it 'remembers the metatype', ->
c.setMetatype 'human'
reloadState()
expect(c.metatype).toBe 'human'
it 'remembers the magicType', ->
c.setMagicType 'adept'
reloadState()
expect(c.char.magicType.name).toBe 'adept'
it 'remembers the resonanceType', ->
c.setResonanceType 'technomancer'
reloadState()
expect(c.char.resonanceType.name).toBe 'technomancer'
it 'remembers attribute points', ->
c.setMetatype 'human'
c.increaseAttribute 'int'
reloadState()
expect(c.char.attributes.int.value.value).toBe 2
|
[
{
"context": "sages = for message in @state.messages\n key = \"#{message.text}:#{message.timestamp}\"\n (\n <li key={key}>\n <Message",
"end": 1793,
"score": 0.9940725564956665,
"start": 1755,
"tag": "KEY",
"value": "\"#{message.text}:#{message.timestamp}\""
}
] | app/components/TextChat.react.coffee | ccns1/ccns71 | 52 | # @cjsx React.DOM
React = require('react')
Message = require('./Message.react')
assign = require('object-assign')
MessageStore = require('../stores/MessageStore')
SessionStore = require('../stores/SessionStore')
MessageActionCreators = require('../actions/MessageActionCreators')
PeerActionCreators = require('../actions/PeerActionCreators')
ENTER_KEY_CODE = 13
getStateFromStores = ->
result =
messages: MessageStore.getMessages()
identity: SessionStore.getIdentity()
return result
module.exports = React.createClass
getInitialState: ->
return assign {text: ''}, getStateFromStores()
componentDidMount: ->
MessageStore.addChangeListener(@_onChange)
SessionStore.addChangeListener(@_onChange)
componentDidUpdate: ->
@_scrollMessages()
@refs.messageBox.getDOMNode().focus()
componentWillUnmount: ->
MessageStore.removeChangeListener(@_onChange)
SessionStore.removeChangeListener(@_onChange)
_scrollMessages: ->
messagesEl = @refs.messages.getDOMNode();
messagesEl.scrollTop = messagesEl.scrollHeight;
_onChange: (event)->
@setState(getStateFromStores())
_onTextChange: (event)->
@setState(text: event.target.value)
_onKeyDown: (event)->
@_onSubmit(event) if event.keyCode == ENTER_KEY_CODE
_onSubmit: (event)->
event.preventDefault()
text = @state.text.trim()
if text != ''
message =
identity: @state.identity
text: text
type: 'message'
timestamp: (new Date).getTime()
MessageActionCreators.createMessage(message)
@setState(text: '')
render: ->
console.log "@state.identity:", @state.identity
console.log "readOnly:", !!@state.identity
messages = for message in @state.messages
key = "#{message.text}:#{message.timestamp}"
(
<li key={key}>
<Message message={message} />
</li>
)
return (
<div className="chat-wrapper">
<h4>Group Chat</h4>
<ul ref="messages" className="messages">
{messages}
</ul>
<div className="new-message">
<form onSubmit={@_onSubmit}>
<textarea ref="messageBox" placeholder="Type a message ..." onKeyDown={@_onKeyDown} onChange={@_onTextChange} value={@state.text} disabled={!@state.identity} />
</form>
</div>
</div>
)
_onChange: ->
@setState(getStateFromStores())
| 96695 | # @cjsx React.DOM
React = require('react')
Message = require('./Message.react')
assign = require('object-assign')
MessageStore = require('../stores/MessageStore')
SessionStore = require('../stores/SessionStore')
MessageActionCreators = require('../actions/MessageActionCreators')
PeerActionCreators = require('../actions/PeerActionCreators')
ENTER_KEY_CODE = 13
getStateFromStores = ->
result =
messages: MessageStore.getMessages()
identity: SessionStore.getIdentity()
return result
module.exports = React.createClass
getInitialState: ->
return assign {text: ''}, getStateFromStores()
componentDidMount: ->
MessageStore.addChangeListener(@_onChange)
SessionStore.addChangeListener(@_onChange)
componentDidUpdate: ->
@_scrollMessages()
@refs.messageBox.getDOMNode().focus()
componentWillUnmount: ->
MessageStore.removeChangeListener(@_onChange)
SessionStore.removeChangeListener(@_onChange)
_scrollMessages: ->
messagesEl = @refs.messages.getDOMNode();
messagesEl.scrollTop = messagesEl.scrollHeight;
_onChange: (event)->
@setState(getStateFromStores())
_onTextChange: (event)->
@setState(text: event.target.value)
_onKeyDown: (event)->
@_onSubmit(event) if event.keyCode == ENTER_KEY_CODE
_onSubmit: (event)->
event.preventDefault()
text = @state.text.trim()
if text != ''
message =
identity: @state.identity
text: text
type: 'message'
timestamp: (new Date).getTime()
MessageActionCreators.createMessage(message)
@setState(text: '')
render: ->
console.log "@state.identity:", @state.identity
console.log "readOnly:", !!@state.identity
messages = for message in @state.messages
key = <KEY>
(
<li key={key}>
<Message message={message} />
</li>
)
return (
<div className="chat-wrapper">
<h4>Group Chat</h4>
<ul ref="messages" className="messages">
{messages}
</ul>
<div className="new-message">
<form onSubmit={@_onSubmit}>
<textarea ref="messageBox" placeholder="Type a message ..." onKeyDown={@_onKeyDown} onChange={@_onTextChange} value={@state.text} disabled={!@state.identity} />
</form>
</div>
</div>
)
_onChange: ->
@setState(getStateFromStores())
| true | # @cjsx React.DOM
React = require('react')
Message = require('./Message.react')
assign = require('object-assign')
MessageStore = require('../stores/MessageStore')
SessionStore = require('../stores/SessionStore')
MessageActionCreators = require('../actions/MessageActionCreators')
PeerActionCreators = require('../actions/PeerActionCreators')
ENTER_KEY_CODE = 13
getStateFromStores = ->
result =
messages: MessageStore.getMessages()
identity: SessionStore.getIdentity()
return result
module.exports = React.createClass
getInitialState: ->
return assign {text: ''}, getStateFromStores()
componentDidMount: ->
MessageStore.addChangeListener(@_onChange)
SessionStore.addChangeListener(@_onChange)
componentDidUpdate: ->
@_scrollMessages()
@refs.messageBox.getDOMNode().focus()
componentWillUnmount: ->
MessageStore.removeChangeListener(@_onChange)
SessionStore.removeChangeListener(@_onChange)
_scrollMessages: ->
messagesEl = @refs.messages.getDOMNode();
messagesEl.scrollTop = messagesEl.scrollHeight;
_onChange: (event)->
@setState(getStateFromStores())
_onTextChange: (event)->
@setState(text: event.target.value)
_onKeyDown: (event)->
@_onSubmit(event) if event.keyCode == ENTER_KEY_CODE
_onSubmit: (event)->
event.preventDefault()
text = @state.text.trim()
if text != ''
message =
identity: @state.identity
text: text
type: 'message'
timestamp: (new Date).getTime()
MessageActionCreators.createMessage(message)
@setState(text: '')
render: ->
console.log "@state.identity:", @state.identity
console.log "readOnly:", !!@state.identity
messages = for message in @state.messages
key = PI:KEY:<KEY>END_PI
(
<li key={key}>
<Message message={message} />
</li>
)
return (
<div className="chat-wrapper">
<h4>Group Chat</h4>
<ul ref="messages" className="messages">
{messages}
</ul>
<div className="new-message">
<form onSubmit={@_onSubmit}>
<textarea ref="messageBox" placeholder="Type a message ..." onKeyDown={@_onKeyDown} onChange={@_onTextChange} value={@state.text} disabled={!@state.identity} />
</form>
</div>
</div>
)
_onChange: ->
@setState(getStateFromStores())
|
[
{
"context": "oarding Gold', 'Decoy Drill', 'Yakstraction', 'Sarven Brawl', 'Desert Combat', 'Dust', 'Sarven Rescue',",
"end": 3210,
"score": 0.8255314230918884,
"start": 3207,
"tag": "NAME",
"value": "ven"
},
{
"context": "ven Rescue', 'Sacred Statue', 'Mirage Maker', 'Sarven Savior... | app/views/courses/mock1/CoursesMockData.coffee | kumabotz/codecombat | 1 | data = {}
data.concepts = [
'Advanced Strings'
'Algorithms'
'Arithmetic'
'Arrays'
'Basic Syntax'
'Boolean Logic'
'Break Statements'
'Classes'
'For Loops'
'Functions'
'If Statements'
'Input Handling'
'Math Operations'
'Object Literals'
'Strings'
'Variables'
'Vectors'
'While Loops'
]
data.courses = [
{
title: 'Introduction to Computer Science'
description: 'Learn basic syntax, while loops, and the CodeCombat learning environment.'
topics: ['Basic Syntax', 'Strings', 'Loops']
duration: 1
levels: ['Dungeons of Kithgard', 'Gems in the Deep', 'Shadow Guard', 'Kounter Kithwise', 'Crawlways of Kithgard', 'Enemy Mine', 'Illusory Interruption', 'Forgetful Gemsmith', 'Signs and Portents', 'Favorable Odds', 'True Names', 'The Prisoner', 'Banefire', 'The Raised Sword', 'Haunted Kithmaze', 'Riddling Kithmaze', 'Descending Further', 'The Second Kithmaze', 'Dread Door', 'Cupboards of Kithgard', 'Hack and Dash']
campaign: 'intro'
image: '/images/pages/courses/101_info.png'
},
{
title: 'Computer Science 2'
description: 'Introduce Arguments, Variables, If Statements, and Arithmetic.'
topics: ['Arguments', 'Variables', 'If Statements', 'Arithmetic']
duration: 5
levels: ['Known Enemy', 'Master of Names', 'Lowly Kithmen', 'Closing the Distance', 'Tactical Strike', 'The Final Kithmaze', 'The Gauntlet', 'Radiant Aura', 'Kithgard Gates', 'Destroying Angel', 'Deadly Dungeon Rescue', 'Kithgard Brawl', 'Cavern Survival', 'Breakout', 'Attack Wisely!', 'Kithgard Mastery', 'Kithgard Apprentice', 'Long Kithmaze', 'Boom! and Bust', 'Defense of Plainswood', 'Winding Trail', 'Thumb Biter', 'Gems or Death', 'Backwoods Ambush', 'Patrol Buster', 'Endangered Burl', 'Village Guard', 'Thornbush Farm', 'Back to Back', 'Ogre Encampment', 'Woodland Cleaver', 'Shield Rush', 'Peasant Protection', 'Munchkin Swarm']
image: '/images/pages/courses/102_info.png'
},
{
title: 'Computer Science 3'
description: 'Learn how to handle input.'
topics: ['If Statements', 'Arithmetic', 'Input Handling']
duration: 5
levels: ['Munchkin Harvest', 'Swift Dagger', 'Shrapnel', 'Arcane Ally', 'Touch of Death', 'Bonemender', 'Coinucopia', 'Copper Meadows', 'Drop the Flag', 'Deadly Pursuit', 'Rich Forager', 'Siege of Stonehold', 'Multiplayer Treasure Grove', 'Dueling Grounds', 'Backwoods Brawl', 'Backwoods Treasure', 'Range Finder', 'Stillness in Motion', 'The Agrippa Defense', 'Storming the Towers of Areth', 'Hold the Forest Pass', 'Hold for Reinforcements', 'Storming the Farmhouse', 'Wild Horses', 'Boulder Woods', 'Unfair Support', 'Tactical Timing', 'Apocalypse', 'Doom Glade', 'Defend the Garrison', 'Lost Viking', 'Forest Flower Grove', 'The Dunes', 'The Mighty Sand Yak', 'Oasis', 'Sarven Road', 'Sarven Gaps', 'Thunderhooves', 'Medical Attention', 'The Great Yak Stampede', 'Minesweeper', 'Sarven Sentry', 'Keeping Time']
image: '/images/pages/courses/103_info.png'
},
{
title: 'Computer Science 4'
description: 'Time to tackle arrays and some pvp stuff.'
topics: ['Loops', 'Break Statements', 'Arrays']
duration: 5
levels: ['Hoarding Gold', 'Decoy Drill', 'Yakstraction', 'Sarven Brawl', 'Desert Combat', 'Dust', 'Sarven Rescue', 'Sacred Statue', 'Mirage Maker', 'Sarven Savior', 'Odd Sandstorm', 'Lurkers', 'Preferential Treatment', 'Sarven Shepherd', 'Shine Getter', 'The Trials', 'Mad Maxer', 'Mad Maxer Strikes Back', 'Mad Maxer Sells Out', 'Mad Maxer Gets Greedy', 'Mad Maxer: Redemption', 'Sarven Treasure', 'Harrowland', 'Sarven Siege', 'Clash of Clones', 'Sand Snakes', 'Crag Tag']
image: '/images/pages/courses/104_info.png'
},
{
title: 'Computer Science 5'
description: 'Time to tackle arrays and some PVP.'
topics: ['Break Statements', 'Arrays', 'Object Literals']
duration: 5
levels: ['Slalom', 'Black Diamond', 'Treasure Cave', 'Ogre Gorge Gouger', 'Dance-Off', 'Alpine Rally', 'Cloudrip Commander', 'Mountain Mercenaries']
image: '/images/pages/courses/105_info.png'
},
{
title: 'Computer Science 6'
description: 'For loops!'
topics: ['Break Statements', 'Object Literals', 'For loops']
duration: 5
levels: ['Timber Guard', 'Hunting Party', 'Zoo Keeper', 'Cloudrip Brawl', 'Cloudrip Treasure', 'Cloudrip Siege', 'Noble Sacrifice', 'Zero Sum', 'Borrowed Sword', 'Protect and Serve']
image: '/images/pages/courses/106_info.png'
},
{
title: 'Computer Science 7'
description: 'Functions!'
topics: ['Object Literals', 'For loops', 'Functions']
duration: 5
levels: ['Vital Powers', 'Timber Turncoat', 'Restless Dead', 'Ring Bearer', 'The Two Flowers', 'The Geometry of Flowers', 'Mountain Flower Grove', 'Hunters and Prey', 'Library Tactician']
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 8'
description: 'Maths.'
topics: ['For loops', 'Functions', 'Math Operations']
duration: 5
levels: ['Steelclaw Gap', 'Pesky Yaks', 'Mixed Unit Tactics', 'Sowing Fire', 'Reaping Fire', 'Toil and Trouble', 'What in Carnation', 'Misty Island Mine', 'Raiders of the Long Dark', 'Grim Determination', 'Deadly Discs', "Summit's Gate"]
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 9'
description: 'Vectors and strings.'
topics: ['Vectors', 'Advanced Strings']
duration: 5
levels: ['Circle Walking', 'Skating Away', 'Kelvintaph Crusader', 'Kelvintaph Burgler', 'Ice Soccer', 'Razorfray']
image: '/images/pages/courses/107_info.png'
}
]
getStudents = ->
students = ['Jill', 'Billy', 'Sarah', 'Tom', 'June', 'Bob', 'Kristin', 'Samantha', 'Eric']
_.shuffle(students).slice(_.random(0, 5))
data.instances = [
{
name: "Mr. Smith's First Period"
description: "Homework due on Friday."
code: 'b2KF7'
students: getStudents()
},
{
name: "Mr. Smith's Second Period"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
},
{
name: "Summer Camp 2015"
description: "You should have received an email with extra credit homework."
code: 'b2KF7'
students: getStudents()
},
{
name: "Maple High 4th"
code: 'b2KF7'
students: getStudents()
},
{
name: "Test class name one"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
}
]
data.praise = [
{
quote: "The kids love it."
source: "Leo Joseph Tran, Athlos Leadership Academy"
},
{
quote: "My students have been using the site for a couple of weeks and they love it."
source: "Scott Hatfield, Computer Applications Teacher, School Technology Coordinator, Eastside Middle School"
},
{
quote: "Thanks for the captivating site. My eighth graders love it."
source: "Janet Cook, Ansbach Middle/High School"
},
{
quote: "My students have started working on CodeCombat and love it! I love that they are learning coding and problem solving skills without them even knowing it!!"
source: "Kristin Huff, Special Education Teacher, Webb City School District"
},
{
quote: "I recently introduced Code Combat to a few of my fifth graders and they are loving it!"
source: "Shauna Hamman, Fifth Grade Teacher, Four Peaks Elementary School"
},
{
quote: "Overall I think it's a fantastic service. Variables, arrays, loops, all covered in very fun and imaginative ways. Every kid who has tried it is a fan."
source: "Aibinder Andrew, Technology Teacher"
},
{
quote: "I love what you have created. The kids are so engaged."
source: "Desmond Smith, 4KS Academy"
},
{
quote: "My students love the website and I hope on having content structured around it in the near future."
source: "Michael Leonard, Science Teacher, Clearwater Central Catholic High School"
}
]
module.exports = data
| 26315 | data = {}
data.concepts = [
'Advanced Strings'
'Algorithms'
'Arithmetic'
'Arrays'
'Basic Syntax'
'Boolean Logic'
'Break Statements'
'Classes'
'For Loops'
'Functions'
'If Statements'
'Input Handling'
'Math Operations'
'Object Literals'
'Strings'
'Variables'
'Vectors'
'While Loops'
]
data.courses = [
{
title: 'Introduction to Computer Science'
description: 'Learn basic syntax, while loops, and the CodeCombat learning environment.'
topics: ['Basic Syntax', 'Strings', 'Loops']
duration: 1
levels: ['Dungeons of Kithgard', 'Gems in the Deep', 'Shadow Guard', 'Kounter Kithwise', 'Crawlways of Kithgard', 'Enemy Mine', 'Illusory Interruption', 'Forgetful Gemsmith', 'Signs and Portents', 'Favorable Odds', 'True Names', 'The Prisoner', 'Banefire', 'The Raised Sword', 'Haunted Kithmaze', 'Riddling Kithmaze', 'Descending Further', 'The Second Kithmaze', 'Dread Door', 'Cupboards of Kithgard', 'Hack and Dash']
campaign: 'intro'
image: '/images/pages/courses/101_info.png'
},
{
title: 'Computer Science 2'
description: 'Introduce Arguments, Variables, If Statements, and Arithmetic.'
topics: ['Arguments', 'Variables', 'If Statements', 'Arithmetic']
duration: 5
levels: ['Known Enemy', 'Master of Names', 'Lowly Kithmen', 'Closing the Distance', 'Tactical Strike', 'The Final Kithmaze', 'The Gauntlet', 'Radiant Aura', 'Kithgard Gates', 'Destroying Angel', 'Deadly Dungeon Rescue', 'Kithgard Brawl', 'Cavern Survival', 'Breakout', 'Attack Wisely!', 'Kithgard Mastery', 'Kithgard Apprentice', 'Long Kithmaze', 'Boom! and Bust', 'Defense of Plainswood', 'Winding Trail', 'Thumb Biter', 'Gems or Death', 'Backwoods Ambush', 'Patrol Buster', 'Endangered Burl', 'Village Guard', 'Thornbush Farm', 'Back to Back', 'Ogre Encampment', 'Woodland Cleaver', 'Shield Rush', 'Peasant Protection', 'Munchkin Swarm']
image: '/images/pages/courses/102_info.png'
},
{
title: 'Computer Science 3'
description: 'Learn how to handle input.'
topics: ['If Statements', 'Arithmetic', 'Input Handling']
duration: 5
levels: ['Munchkin Harvest', 'Swift Dagger', 'Shrapnel', 'Arcane Ally', 'Touch of Death', 'Bonemender', 'Coinucopia', 'Copper Meadows', 'Drop the Flag', 'Deadly Pursuit', 'Rich Forager', 'Siege of Stonehold', 'Multiplayer Treasure Grove', 'Dueling Grounds', 'Backwoods Brawl', 'Backwoods Treasure', 'Range Finder', 'Stillness in Motion', 'The Agrippa Defense', 'Storming the Towers of Areth', 'Hold the Forest Pass', 'Hold for Reinforcements', 'Storming the Farmhouse', 'Wild Horses', 'Boulder Woods', 'Unfair Support', 'Tactical Timing', 'Apocalypse', 'Doom Glade', 'Defend the Garrison', 'Lost Viking', 'Forest Flower Grove', 'The Dunes', 'The Mighty Sand Yak', 'Oasis', 'Sarven Road', 'Sarven Gaps', 'Thunderhooves', 'Medical Attention', 'The Great Yak Stampede', 'Minesweeper', 'Sarven Sentry', 'Keeping Time']
image: '/images/pages/courses/103_info.png'
},
{
title: 'Computer Science 4'
description: 'Time to tackle arrays and some pvp stuff.'
topics: ['Loops', 'Break Statements', 'Arrays']
duration: 5
levels: ['Hoarding Gold', 'Decoy Drill', 'Yakstraction', 'Sar<NAME> Brawl', 'Desert Combat', 'Dust', 'Sarven Rescue', 'Sacred Statue', 'Mirage Maker', 'Sar<NAME>avior', 'Odd Sandstorm', 'Lurkers', 'Preferential Treatment', 'Sar<NAME> Shepherd', 'Shine Getter', 'The Trials', 'Mad Maxer', 'Mad Maxer Strikes Back', 'Mad Maxer Sells Out', 'Mad Maxer Gets Greedy', 'Mad Maxer: Redemption', 'Sarven Treasure', 'Harrowland', 'Sarven Siege', 'Clash of Clones', 'Sand Snakes', 'Crag Tag']
image: '/images/pages/courses/104_info.png'
},
{
title: 'Computer Science 5'
description: 'Time to tackle arrays and some PVP.'
topics: ['Break Statements', 'Arrays', 'Object Literals']
duration: 5
levels: ['Slalom', 'Black Diamond', 'Treasure Cave', 'Ogre Gorge Gouger', 'Dance-Off', 'Alpine Rally', 'Cloudrip Commander', 'Mountain Mercenaries']
image: '/images/pages/courses/105_info.png'
},
{
title: 'Computer Science 6'
description: 'For loops!'
topics: ['Break Statements', 'Object Literals', 'For loops']
duration: 5
levels: ['Timber Guard', 'Hunting Party', 'Zoo Keeper', 'Cloudrip Brawl', 'Cloudrip Treasure', 'Cloudrip Siege', 'Noble Sacrifice', 'Zero Sum', 'Borrowed Sword', 'Protect and Serve']
image: '/images/pages/courses/106_info.png'
},
{
title: 'Computer Science 7'
description: 'Functions!'
topics: ['Object Literals', 'For loops', 'Functions']
duration: 5
levels: ['Vital Powers', 'Timber Turncoat', 'Restless Dead', 'Ring Bearer', 'The Two Flowers', 'The Geometry of Flowers', 'Mountain Flower Grove', 'Hunters and Prey', 'Library Tactician']
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 8'
description: 'Maths.'
topics: ['For loops', 'Functions', 'Math Operations']
duration: 5
levels: ['Steelclaw Gap', 'Pesky Yaks', 'Mixed Unit Tactics', 'Sowing Fire', 'Reaping Fire', 'Toil and Trouble', 'What in Carnation', 'Misty Island Mine', 'Raiders of the Long Dark', 'Grim Determination', 'Deadly Discs', "Summit's Gate"]
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 9'
description: 'Vectors and strings.'
topics: ['Vectors', 'Advanced Strings']
duration: 5
levels: ['Circle Walking', 'Skating Away', 'Kelvintaph <NAME>', '<NAME>', 'Ice Soccer', 'Razorfray']
image: '/images/pages/courses/107_info.png'
}
]
getStudents = ->
students = ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>']
_.shuffle(students).slice(_.random(0, 5))
data.instances = [
{
name: "Mr. Smith's First Period"
description: "Homework due on Friday."
code: 'b2KF7'
students: getStudents()
},
{
name: "Mr. Smith's Second Period"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
},
{
name: "Summer Camp 2015"
description: "You should have received an email with extra credit homework."
code: 'b2KF7'
students: getStudents()
},
{
name: "Maple High 4th"
code: 'b2KF7'
students: getStudents()
},
{
name: "Test class name one"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
}
]
data.praise = [
{
quote: "The kids love it."
source: "<NAME>, Athlos Leadership Academy"
},
{
quote: "My students have been using the site for a couple of weeks and they love it."
source: "<NAME>, Computer Applications Teacher, School Technology Coordinator, Eastside Middle School"
},
{
quote: "Thanks for the captivating site. My eighth graders love it."
source: "<NAME>, Ansbach Middle/High School"
},
{
quote: "My students have started working on CodeCombat and love it! I love that they are learning coding and problem solving skills without them even knowing it!!"
source: "<NAME>, Special Education Teacher, Webb City School District"
},
{
quote: "I recently introduced Code Combat to a few of my fifth graders and they are loving it!"
source: "<NAME>, Fifth Grade Teacher, Four Peaks Elementary School"
},
{
quote: "Overall I think it's a fantastic service. Variables, arrays, loops, all covered in very fun and imaginative ways. Every kid who has tried it is a fan."
source: "<NAME>, Technology Teacher"
},
{
quote: "I love what you have created. The kids are so engaged."
source: "Desmond Smith, 4KS Academy"
},
{
quote: "My students love the website and I hope on having content structured around it in the near future."
source: "Michael Leonard, Science Teacher, Clearwater Central Catholic High School"
}
]
module.exports = data
| true | data = {}
data.concepts = [
'Advanced Strings'
'Algorithms'
'Arithmetic'
'Arrays'
'Basic Syntax'
'Boolean Logic'
'Break Statements'
'Classes'
'For Loops'
'Functions'
'If Statements'
'Input Handling'
'Math Operations'
'Object Literals'
'Strings'
'Variables'
'Vectors'
'While Loops'
]
data.courses = [
{
title: 'Introduction to Computer Science'
description: 'Learn basic syntax, while loops, and the CodeCombat learning environment.'
topics: ['Basic Syntax', 'Strings', 'Loops']
duration: 1
levels: ['Dungeons of Kithgard', 'Gems in the Deep', 'Shadow Guard', 'Kounter Kithwise', 'Crawlways of Kithgard', 'Enemy Mine', 'Illusory Interruption', 'Forgetful Gemsmith', 'Signs and Portents', 'Favorable Odds', 'True Names', 'The Prisoner', 'Banefire', 'The Raised Sword', 'Haunted Kithmaze', 'Riddling Kithmaze', 'Descending Further', 'The Second Kithmaze', 'Dread Door', 'Cupboards of Kithgard', 'Hack and Dash']
campaign: 'intro'
image: '/images/pages/courses/101_info.png'
},
{
title: 'Computer Science 2'
description: 'Introduce Arguments, Variables, If Statements, and Arithmetic.'
topics: ['Arguments', 'Variables', 'If Statements', 'Arithmetic']
duration: 5
levels: ['Known Enemy', 'Master of Names', 'Lowly Kithmen', 'Closing the Distance', 'Tactical Strike', 'The Final Kithmaze', 'The Gauntlet', 'Radiant Aura', 'Kithgard Gates', 'Destroying Angel', 'Deadly Dungeon Rescue', 'Kithgard Brawl', 'Cavern Survival', 'Breakout', 'Attack Wisely!', 'Kithgard Mastery', 'Kithgard Apprentice', 'Long Kithmaze', 'Boom! and Bust', 'Defense of Plainswood', 'Winding Trail', 'Thumb Biter', 'Gems or Death', 'Backwoods Ambush', 'Patrol Buster', 'Endangered Burl', 'Village Guard', 'Thornbush Farm', 'Back to Back', 'Ogre Encampment', 'Woodland Cleaver', 'Shield Rush', 'Peasant Protection', 'Munchkin Swarm']
image: '/images/pages/courses/102_info.png'
},
{
title: 'Computer Science 3'
description: 'Learn how to handle input.'
topics: ['If Statements', 'Arithmetic', 'Input Handling']
duration: 5
levels: ['Munchkin Harvest', 'Swift Dagger', 'Shrapnel', 'Arcane Ally', 'Touch of Death', 'Bonemender', 'Coinucopia', 'Copper Meadows', 'Drop the Flag', 'Deadly Pursuit', 'Rich Forager', 'Siege of Stonehold', 'Multiplayer Treasure Grove', 'Dueling Grounds', 'Backwoods Brawl', 'Backwoods Treasure', 'Range Finder', 'Stillness in Motion', 'The Agrippa Defense', 'Storming the Towers of Areth', 'Hold the Forest Pass', 'Hold for Reinforcements', 'Storming the Farmhouse', 'Wild Horses', 'Boulder Woods', 'Unfair Support', 'Tactical Timing', 'Apocalypse', 'Doom Glade', 'Defend the Garrison', 'Lost Viking', 'Forest Flower Grove', 'The Dunes', 'The Mighty Sand Yak', 'Oasis', 'Sarven Road', 'Sarven Gaps', 'Thunderhooves', 'Medical Attention', 'The Great Yak Stampede', 'Minesweeper', 'Sarven Sentry', 'Keeping Time']
image: '/images/pages/courses/103_info.png'
},
{
title: 'Computer Science 4'
description: 'Time to tackle arrays and some pvp stuff.'
topics: ['Loops', 'Break Statements', 'Arrays']
duration: 5
levels: ['Hoarding Gold', 'Decoy Drill', 'Yakstraction', 'SarPI:NAME:<NAME>END_PI Brawl', 'Desert Combat', 'Dust', 'Sarven Rescue', 'Sacred Statue', 'Mirage Maker', 'SarPI:NAME:<NAME>END_PIavior', 'Odd Sandstorm', 'Lurkers', 'Preferential Treatment', 'SarPI:NAME:<NAME>END_PI Shepherd', 'Shine Getter', 'The Trials', 'Mad Maxer', 'Mad Maxer Strikes Back', 'Mad Maxer Sells Out', 'Mad Maxer Gets Greedy', 'Mad Maxer: Redemption', 'Sarven Treasure', 'Harrowland', 'Sarven Siege', 'Clash of Clones', 'Sand Snakes', 'Crag Tag']
image: '/images/pages/courses/104_info.png'
},
{
title: 'Computer Science 5'
description: 'Time to tackle arrays and some PVP.'
topics: ['Break Statements', 'Arrays', 'Object Literals']
duration: 5
levels: ['Slalom', 'Black Diamond', 'Treasure Cave', 'Ogre Gorge Gouger', 'Dance-Off', 'Alpine Rally', 'Cloudrip Commander', 'Mountain Mercenaries']
image: '/images/pages/courses/105_info.png'
},
{
title: 'Computer Science 6'
description: 'For loops!'
topics: ['Break Statements', 'Object Literals', 'For loops']
duration: 5
levels: ['Timber Guard', 'Hunting Party', 'Zoo Keeper', 'Cloudrip Brawl', 'Cloudrip Treasure', 'Cloudrip Siege', 'Noble Sacrifice', 'Zero Sum', 'Borrowed Sword', 'Protect and Serve']
image: '/images/pages/courses/106_info.png'
},
{
title: 'Computer Science 7'
description: 'Functions!'
topics: ['Object Literals', 'For loops', 'Functions']
duration: 5
levels: ['Vital Powers', 'Timber Turncoat', 'Restless Dead', 'Ring Bearer', 'The Two Flowers', 'The Geometry of Flowers', 'Mountain Flower Grove', 'Hunters and Prey', 'Library Tactician']
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 8'
description: 'Maths.'
topics: ['For loops', 'Functions', 'Math Operations']
duration: 5
levels: ['Steelclaw Gap', 'Pesky Yaks', 'Mixed Unit Tactics', 'Sowing Fire', 'Reaping Fire', 'Toil and Trouble', 'What in Carnation', 'Misty Island Mine', 'Raiders of the Long Dark', 'Grim Determination', 'Deadly Discs', "Summit's Gate"]
image: '/images/pages/courses/107_info.png'
},
{
title: 'Computer Science 9'
description: 'Vectors and strings.'
topics: ['Vectors', 'Advanced Strings']
duration: 5
levels: ['Circle Walking', 'Skating Away', 'Kelvintaph PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'Ice Soccer', 'Razorfray']
image: '/images/pages/courses/107_info.png'
}
]
getStudents = ->
students = ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
_.shuffle(students).slice(_.random(0, 5))
data.instances = [
{
name: "Mr. Smith's First Period"
description: "Homework due on Friday."
code: 'b2KF7'
students: getStudents()
},
{
name: "Mr. Smith's Second Period"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
},
{
name: "Summer Camp 2015"
description: "You should have received an email with extra credit homework."
code: 'b2KF7'
students: getStudents()
},
{
name: "Maple High 4th"
code: 'b2KF7'
students: getStudents()
},
{
name: "Test class name one"
description: "Test class description"
code: 'b2KF7'
students: getStudents()
}
]
data.praise = [
{
quote: "The kids love it."
source: "PI:NAME:<NAME>END_PI, Athlos Leadership Academy"
},
{
quote: "My students have been using the site for a couple of weeks and they love it."
source: "PI:NAME:<NAME>END_PI, Computer Applications Teacher, School Technology Coordinator, Eastside Middle School"
},
{
quote: "Thanks for the captivating site. My eighth graders love it."
source: "PI:NAME:<NAME>END_PI, Ansbach Middle/High School"
},
{
quote: "My students have started working on CodeCombat and love it! I love that they are learning coding and problem solving skills without them even knowing it!!"
source: "PI:NAME:<NAME>END_PI, Special Education Teacher, Webb City School District"
},
{
quote: "I recently introduced Code Combat to a few of my fifth graders and they are loving it!"
source: "PI:NAME:<NAME>END_PI, Fifth Grade Teacher, Four Peaks Elementary School"
},
{
quote: "Overall I think it's a fantastic service. Variables, arrays, loops, all covered in very fun and imaginative ways. Every kid who has tried it is a fan."
source: "PI:NAME:<NAME>END_PI, Technology Teacher"
},
{
quote: "I love what you have created. The kids are so engaged."
source: "Desmond Smith, 4KS Academy"
},
{
quote: "My students love the website and I hope on having content structured around it in the near future."
source: "Michael Leonard, Science Teacher, Clearwater Central Catholic High School"
}
]
module.exports = data
|
[
{
"context": "ysql.createClient ({\n\thost: 'host_dbase',\n\tuser: 'scott',\n\tpassword: 'tiger'\n\t})\n\nconnection.query ('Use ",
"end": 1433,
"score": 0.999476432800293,
"start": 1428,
"tag": "USERNAME",
"value": "scott"
},
{
"context": "\n\thost: 'host_dbase',\n\tuser: 'scott',\n... | mysql/coffee/create/mysql_create.coffee | ekzemplaro/data_base_language | 3 | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mysql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mysql = require('mysql')
text_manipulate= require ('/var/www/data_base/common/coffee_common/text_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3321','岡山',28163,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3322','倉敷',57296,'1954-3-15')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3323','津山',24187,'1954-10-2')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3324','玉野',64392,'1954-6-22')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3325','笠岡',58731,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3326','井原',54796,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3327','総社',38257,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3328','高梁',12486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3329','新見',26759,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
#
console.log "*** 開始 ***"
dict_aa = data_prepare_proc()
text_manipulate.dict_display_proc(dict_aa)
connection = mysql.createClient ({
host: 'host_dbase',
user: 'scott',
password: 'tiger'
})
connection.query ('Use city')
connection.query ('drop table if exists cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
connection.query (command)
for key,value of dict_aa
sql_str = "insert into cities (id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name + "'," \
+ dict_aa[key].population + ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
connection.query (sql_str)
connection.end()
console.log "*** 終了 ***"
# ---------------------------------------------------------------
| 148894 | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mysql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mysql = require('mysql')
text_manipulate= require ('/var/www/data_base/common/coffee_common/text_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3321','岡山',28163,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3322','倉敷',57296,'1954-3-15')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3323','津山',24187,'1954-10-2')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3324','玉野',64392,'1954-6-22')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3325','笠岡',58731,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3326','井原',54796,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3327','総社',38257,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3328','高梁',12486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3329','新見',26759,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
#
console.log "*** 開始 ***"
dict_aa = data_prepare_proc()
text_manipulate.dict_display_proc(dict_aa)
connection = mysql.createClient ({
host: 'host_dbase',
user: 'scott',
password: '<PASSWORD>'
})
connection.query ('Use city')
connection.query ('drop table if exists cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
connection.query (command)
for key,value of dict_aa
sql_str = "insert into cities (id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name + "'," \
+ dict_aa[key].population + ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
connection.query (sql_str)
connection.end()
console.log "*** 終了 ***"
# ---------------------------------------------------------------
| true | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mysql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mysql = require('mysql')
text_manipulate= require ('/var/www/data_base/common/coffee_common/text_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3321','岡山',28163,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3322','倉敷',57296,'1954-3-15')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3323','津山',24187,'1954-10-2')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3324','玉野',64392,'1954-6-22')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3325','笠岡',58731,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3326','井原',54796,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3327','総社',38257,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3328','高梁',12486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t3329','新見',26759,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
#
console.log "*** 開始 ***"
dict_aa = data_prepare_proc()
text_manipulate.dict_display_proc(dict_aa)
connection = mysql.createClient ({
host: 'host_dbase',
user: 'scott',
password: 'PI:PASSWORD:<PASSWORD>END_PI'
})
connection.query ('Use city')
connection.query ('drop table if exists cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
connection.query (command)
for key,value of dict_aa
sql_str = "insert into cities (id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name + "'," \
+ dict_aa[key].population + ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
connection.query (sql_str)
connection.end()
console.log "*** 終了 ***"
# ---------------------------------------------------------------
|
[
{
"context": "ND.reverse CND.bold \"\"\" '---''(_/--' `-'\\\\_) Felix Lee \"\"\"\n echo()\n process.exit 0\n\n#---------------",
"end": 5228,
"score": 0.9998478293418884,
"start": 5219,
"tag": "NAME",
"value": "Felix Lee"
},
{
"context": "ent: 'Project home: {underline http... | dev/mixa/src/demo.coffee | loveencounterflow/hengist | 0 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'HENGIST/DEV/CL-PARSER'
debug = CND.get_logger 'debug', badge
alert = CND.get_logger 'alert', badge
whisper = CND.get_logger 'whisper', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
info = CND.get_logger 'info', badge
echo = CND.echo.bind CND
#...........................................................................................................
types = new ( require 'intertype' ).Intertype()
{ isa
validate
cast
type_of } = types.export()
# CP = require 'child_process'
defer = setImmediate
parse_argv = require 'command-line-args'
# cnd_parse = require 'cnd/parse-command-line'
misfit = Symbol 'misfit'
PATH = require 'path'
relpath = PATH.relative process.cwd(), __filename
{ freeze
lets } = require 'letsfreezethat'
#-----------------------------------------------------------------------------------------------------------
pluck = ( d, name, fallback = misfit ) ->
R = d[ name ]
delete d[ name ]
unless R?
return fallback unless fallback is misfit
throw new Error "^cli@5477^ no such attribute: #{rpr name}"
return R
# #-----------------------------------------------------------------------------------------------------------
# check_extraneous = ( d ) ->
# return if ( Object.keys d ).length is 0
# show_help_and_exit 111, "unknown arguments: #{rpr d}"
#-----------------------------------------------------------------------------------------------------------
get_cmd_literal = ( cmd, argv ) ->
return CND.lime "#{cmd}" if ( parameters = CND.shellescape argv ).length is 0
return CND.lime "#{cmd} #{parameters}"
#-----------------------------------------------------------------------------------------------------------
generate_documentation = ->
commandLineUsage = require 'command-line-usage'
doc_settings = []
doc_settings.push {
header: "Usage", content: """
node #{relpath} [meta] command [parameters]
[meta]: optional general flags
command: internal or external command to run (obligatory)
[parameters]: parameters to be passed to internal or external command;
* for internal parameters and flags, see below
* for external parameters and flags, refer to the documentation of the respective command
""", }
doc_settings.push { header: "meta", optionList: X.meta, }
for cmd in X.internals
doc_settings.push { header: "Internal command: #{cmd.name}", optionList: cmd, }
if ( Object.keys X.externals ).length > 0
descriptions = []
for cmd in X.externals
descriptions.push { content: "#{cmd.name}: #{cmd.description ? '???'}", }
doc_settings.push { header: "External commands: ", content: descriptions, }
return '\n' + commandLineUsage doc_settings
#-----------------------------------------------------------------------------------------------------------
show_help_for_topic_and_exit = ( q, argv ) ->
if argv.length > 0
return show_help_and_exit 113, "^cli@5478^ extraneous arguments #{rpr argv}"
switch q.parameters.topic
when null, undefined
return show_help_and_exit 0
when 'topics'
echo CND.blue "(this should be a list of topics)"
process.exit 0
when 'help'
### TAINT use custom function to output help ###
echo CND.blue """\n`node #{relpath} help [topic]`:\nget help about `topic`\n"""
process.exit 0
show_help_and_exit 120, "^cli@5887^ unknown help topic #{rpr q.parameters.topic}"
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_cat_and_exit = ->
echo()
echo CND.white CND.reverse CND.bold """ |\\ _,,,---,,_ """
echo CND.white CND.reverse CND.bold """ ZZZzz /,`.-'`' -. ;-;;,_ """
echo CND.white CND.reverse CND.bold """ |,4- ) )-,_. ,\\ ( `'-' """
echo CND.white CND.reverse CND.bold """ '---''(_/--' `-'\\_) Felix Lee """
echo()
process.exit 0
#-----------------------------------------------------------------------------------------------------------
@cli = ( argv = null ) ->
#---------------------------------------------------------------------------------------------------------
q =
trace: false # place under `meta`
help: false # place under `meta`
testing: argv? # place under `meta`
stage: null
cmd: null
parameters: {}
#---------------------------------------------------------------------------------------------------------
# Stage: Metaflags
#.........................................................................................................
q.stage = 'meta'
argv = argv ? process.argv
d = X.meta
s = { argv, stopAtFirstUnknown: true, }
p = parse_argv d, s
argv = pluck p, '_unknown', []
q.help = pluck p, 'help', false
q.trace = pluck p, 'trace', false
q.cd = pluck p, 'cd', null
urge "Stage: Metaflags", { q, argv, } if q.trace
return show_help_and_exit 0 if q.help
return show_help_and_exit 112, "^cli@5598^ extraneous flag #{rpr flag}" if ( flag = argv[ 0 ] )?.startsWith '-'
#---------------------------------------------------------------------------------------------------------
if q.cd?
process.chdir q.cd
urge CND.yellow "working directory is now #{process.cwd()}" if q.trace
#---------------------------------------------------------------------------------------------------------
# Stage: Internal Commands
# Internal commands must parse their specific flags and other arguments.
#.........................................................................................................
q.stage = 'internal'
d = { name: 'cmd', defaultOption: true, }
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.cmd = pluck p, 'cmd', null
argv = pluck p, '_unknown', []
urge "Stage: Commands", { q, argv, } if q.trace
return show_help_and_exit 114, "^cli@5479^ missing command" unless q.cmd?
#.........................................................................................................
switch q.cmd
when 'help'
d = X.internals.help
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.parameters.topic = pluck p, 'topic', null
argv = pluck p, '_unknown', []
urge "running internal command `help`", { q, argv, } if q.trace
return show_help_for_topic_and_exit q, argv
when 'cat'
return show_cat_and_exit()
#---------------------------------------------------------------------------------------------------------
# Stage: External Commands
#.........................................................................................................
# External commands call a child process that is passed the remaing command line arguments, so those
# can be dealt with summarily.
#.........................................................................................................
q.stage = 'external'
p = parse_argv [], { argv, stopAtFirstUnknown: true, }
argv = pluck p, '_unknown', []
q.parameters.argv = argv[ .. ]
urge "Stage: External Commands", { q, argv, } if q.trace
### TAINT derive list from settings ###
if q.cmd in [ 'psql', 'node', 'nodexh', ]
return q
q.error = { code: 115, message: "^cli@5480^ Unknown command #{CND.reverse rpr q.cmd}", }
#-----------------------------------------------------------------------------------------------------------
run_external_command = ->
# #.........................................................................................................
# switch q.cmd
# #-------------------------------------------------------------------------------------------------------
# when 'psql'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
# #-------------------------------------------------------------------------------------------------------
# when 'nodexh', 'node'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
#.........................................................................................................
#-----------------------------------------------------------------------------------------------------------
compile_settings = ( dft, usr ) ->
meta = []
internals = []
externals = []
R = { meta, internals, externals, }
#.........................................................................................................
validate.object usr.meta if usr.meta?
for name, description of Object.assign {}, dft.meta, usr.meta
throw Error "^cli@5587^ must not have attribute name, got #{rpr description}" if description.name?
meta.push lets description, ( d ) -> d.name = name
#.........................................................................................................
validate.object usr.commands if usr.commands?
for name, description of Object.assign {}, dft.commands, usr.commands
throw Error "^cli@5588^ must not have attribute name, got #{rpr description}" if description.name?
is_external = false
e = lets description, ( d ) ->
d.name = name
is_external = pluck d, 'external', false
if is_external then externals.push e
else internals.push e
#.........................................................................................................
return freeze R
#-----------------------------------------------------------------------------------------------------------
default_settings = freeze {
meta:
help: { alias: 'h', type: Boolean, description: "show help and exit", }
cd: { alias: 'd', type: String, description: "change to directory before running command", }
trace: { alias: 't', type: Boolean, description: "trace options parsing (for debugging)", }
commands:
cat: { description: "draw a cat", }
version: { description: "show project version and exit", }
}
#-----------------------------------------------------------------------------------------------------------
user_settings = freeze {
# meta:
# internal:
commands:
psql: { external: true, description: "use `psql` to run SQL", }
node: { external: true, description: "use `node` to run JS", }
nodexh: { external: true, description: "use `nodexh` to run JS", }
}
X = compile_settings default_settings, user_settings
debug '^6767^', JSON.stringify X, null, ' '
############################################################################################################
if module is require.main then do =>
debug '^3387^', @cli()
# debug await @cli [ '-t', null, '-t', ]
# {
# header: 'Typical Example',
# content: 'A simple example demonstrating typical usage.'
# },
# {
# content: 'Project home: {underline https://github.com/me/example}'
# }
| 177608 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'HENGIST/DEV/CL-PARSER'
debug = CND.get_logger 'debug', badge
alert = CND.get_logger 'alert', badge
whisper = CND.get_logger 'whisper', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
info = CND.get_logger 'info', badge
echo = CND.echo.bind CND
#...........................................................................................................
types = new ( require 'intertype' ).Intertype()
{ isa
validate
cast
type_of } = types.export()
# CP = require 'child_process'
defer = setImmediate
parse_argv = require 'command-line-args'
# cnd_parse = require 'cnd/parse-command-line'
misfit = Symbol 'misfit'
PATH = require 'path'
relpath = PATH.relative process.cwd(), __filename
{ freeze
lets } = require 'letsfreezethat'
#-----------------------------------------------------------------------------------------------------------
pluck = ( d, name, fallback = misfit ) ->
R = d[ name ]
delete d[ name ]
unless R?
return fallback unless fallback is misfit
throw new Error "^cli@5477^ no such attribute: #{rpr name}"
return R
# #-----------------------------------------------------------------------------------------------------------
# check_extraneous = ( d ) ->
# return if ( Object.keys d ).length is 0
# show_help_and_exit 111, "unknown arguments: #{rpr d}"
#-----------------------------------------------------------------------------------------------------------
get_cmd_literal = ( cmd, argv ) ->
return CND.lime "#{cmd}" if ( parameters = CND.shellescape argv ).length is 0
return CND.lime "#{cmd} #{parameters}"
#-----------------------------------------------------------------------------------------------------------
generate_documentation = ->
commandLineUsage = require 'command-line-usage'
doc_settings = []
doc_settings.push {
header: "Usage", content: """
node #{relpath} [meta] command [parameters]
[meta]: optional general flags
command: internal or external command to run (obligatory)
[parameters]: parameters to be passed to internal or external command;
* for internal parameters and flags, see below
* for external parameters and flags, refer to the documentation of the respective command
""", }
doc_settings.push { header: "meta", optionList: X.meta, }
for cmd in X.internals
doc_settings.push { header: "Internal command: #{cmd.name}", optionList: cmd, }
if ( Object.keys X.externals ).length > 0
descriptions = []
for cmd in X.externals
descriptions.push { content: "#{cmd.name}: #{cmd.description ? '???'}", }
doc_settings.push { header: "External commands: ", content: descriptions, }
return '\n' + commandLineUsage doc_settings
#-----------------------------------------------------------------------------------------------------------
show_help_for_topic_and_exit = ( q, argv ) ->
if argv.length > 0
return show_help_and_exit 113, "^cli@5478^ extraneous arguments #{rpr argv}"
switch q.parameters.topic
when null, undefined
return show_help_and_exit 0
when 'topics'
echo CND.blue "(this should be a list of topics)"
process.exit 0
when 'help'
### TAINT use custom function to output help ###
echo CND.blue """\n`node #{relpath} help [topic]`:\nget help about `topic`\n"""
process.exit 0
show_help_and_exit 120, "^cli@5887^ unknown help topic #{rpr q.parameters.topic}"
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_cat_and_exit = ->
echo()
echo CND.white CND.reverse CND.bold """ |\\ _,,,---,,_ """
echo CND.white CND.reverse CND.bold """ ZZZzz /,`.-'`' -. ;-;;,_ """
echo CND.white CND.reverse CND.bold """ |,4- ) )-,_. ,\\ ( `'-' """
echo CND.white CND.reverse CND.bold """ '---''(_/--' `-'\\_) <NAME> """
echo()
process.exit 0
#-----------------------------------------------------------------------------------------------------------
@cli = ( argv = null ) ->
#---------------------------------------------------------------------------------------------------------
q =
trace: false # place under `meta`
help: false # place under `meta`
testing: argv? # place under `meta`
stage: null
cmd: null
parameters: {}
#---------------------------------------------------------------------------------------------------------
# Stage: Metaflags
#.........................................................................................................
q.stage = 'meta'
argv = argv ? process.argv
d = X.meta
s = { argv, stopAtFirstUnknown: true, }
p = parse_argv d, s
argv = pluck p, '_unknown', []
q.help = pluck p, 'help', false
q.trace = pluck p, 'trace', false
q.cd = pluck p, 'cd', null
urge "Stage: Metaflags", { q, argv, } if q.trace
return show_help_and_exit 0 if q.help
return show_help_and_exit 112, "^cli@5598^ extraneous flag #{rpr flag}" if ( flag = argv[ 0 ] )?.startsWith '-'
#---------------------------------------------------------------------------------------------------------
if q.cd?
process.chdir q.cd
urge CND.yellow "working directory is now #{process.cwd()}" if q.trace
#---------------------------------------------------------------------------------------------------------
# Stage: Internal Commands
# Internal commands must parse their specific flags and other arguments.
#.........................................................................................................
q.stage = 'internal'
d = { name: 'cmd', defaultOption: true, }
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.cmd = pluck p, 'cmd', null
argv = pluck p, '_unknown', []
urge "Stage: Commands", { q, argv, } if q.trace
return show_help_and_exit 114, "^cli@5479^ missing command" unless q.cmd?
#.........................................................................................................
switch q.cmd
when 'help'
d = X.internals.help
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.parameters.topic = pluck p, 'topic', null
argv = pluck p, '_unknown', []
urge "running internal command `help`", { q, argv, } if q.trace
return show_help_for_topic_and_exit q, argv
when 'cat'
return show_cat_and_exit()
#---------------------------------------------------------------------------------------------------------
# Stage: External Commands
#.........................................................................................................
# External commands call a child process that is passed the remaing command line arguments, so those
# can be dealt with summarily.
#.........................................................................................................
q.stage = 'external'
p = parse_argv [], { argv, stopAtFirstUnknown: true, }
argv = pluck p, '_unknown', []
q.parameters.argv = argv[ .. ]
urge "Stage: External Commands", { q, argv, } if q.trace
### TAINT derive list from settings ###
if q.cmd in [ 'psql', 'node', 'nodexh', ]
return q
q.error = { code: 115, message: "^cli@5480^ Unknown command #{CND.reverse rpr q.cmd}", }
#-----------------------------------------------------------------------------------------------------------
run_external_command = ->
# #.........................................................................................................
# switch q.cmd
# #-------------------------------------------------------------------------------------------------------
# when 'psql'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
# #-------------------------------------------------------------------------------------------------------
# when 'nodexh', 'node'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
#.........................................................................................................
#-----------------------------------------------------------------------------------------------------------
compile_settings = ( dft, usr ) ->
meta = []
internals = []
externals = []
R = { meta, internals, externals, }
#.........................................................................................................
validate.object usr.meta if usr.meta?
for name, description of Object.assign {}, dft.meta, usr.meta
throw Error "^cli@5587^ must not have attribute name, got #{rpr description}" if description.name?
meta.push lets description, ( d ) -> d.name = name
#.........................................................................................................
validate.object usr.commands if usr.commands?
for name, description of Object.assign {}, dft.commands, usr.commands
throw Error "^cli@5588^ must not have attribute name, got #{rpr description}" if description.name?
is_external = false
e = lets description, ( d ) ->
d.name = name
is_external = pluck d, 'external', false
if is_external then externals.push e
else internals.push e
#.........................................................................................................
return freeze R
#-----------------------------------------------------------------------------------------------------------
default_settings = freeze {
meta:
help: { alias: 'h', type: Boolean, description: "show help and exit", }
cd: { alias: 'd', type: String, description: "change to directory before running command", }
trace: { alias: 't', type: Boolean, description: "trace options parsing (for debugging)", }
commands:
cat: { description: "draw a cat", }
version: { description: "show project version and exit", }
}
#-----------------------------------------------------------------------------------------------------------
user_settings = freeze {
# meta:
# internal:
commands:
psql: { external: true, description: "use `psql` to run SQL", }
node: { external: true, description: "use `node` to run JS", }
nodexh: { external: true, description: "use `nodexh` to run JS", }
}
X = compile_settings default_settings, user_settings
debug '^6767^', JSON.stringify X, null, ' '
############################################################################################################
if module is require.main then do =>
debug '^3387^', @cli()
# debug await @cli [ '-t', null, '-t', ]
# {
# header: 'Typical Example',
# content: 'A simple example demonstrating typical usage.'
# },
# {
# content: 'Project home: {underline https://github.com/me/example}'
# }
| true |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'HENGIST/DEV/CL-PARSER'
debug = CND.get_logger 'debug', badge
alert = CND.get_logger 'alert', badge
whisper = CND.get_logger 'whisper', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
info = CND.get_logger 'info', badge
echo = CND.echo.bind CND
#...........................................................................................................
types = new ( require 'intertype' ).Intertype()
{ isa
validate
cast
type_of } = types.export()
# CP = require 'child_process'
defer = setImmediate
parse_argv = require 'command-line-args'
# cnd_parse = require 'cnd/parse-command-line'
misfit = Symbol 'misfit'
PATH = require 'path'
relpath = PATH.relative process.cwd(), __filename
{ freeze
lets } = require 'letsfreezethat'
#-----------------------------------------------------------------------------------------------------------
pluck = ( d, name, fallback = misfit ) ->
R = d[ name ]
delete d[ name ]
unless R?
return fallback unless fallback is misfit
throw new Error "^cli@5477^ no such attribute: #{rpr name}"
return R
# #-----------------------------------------------------------------------------------------------------------
# check_extraneous = ( d ) ->
# return if ( Object.keys d ).length is 0
# show_help_and_exit 111, "unknown arguments: #{rpr d}"
#-----------------------------------------------------------------------------------------------------------
get_cmd_literal = ( cmd, argv ) ->
return CND.lime "#{cmd}" if ( parameters = CND.shellescape argv ).length is 0
return CND.lime "#{cmd} #{parameters}"
#-----------------------------------------------------------------------------------------------------------
generate_documentation = ->
commandLineUsage = require 'command-line-usage'
doc_settings = []
doc_settings.push {
header: "Usage", content: """
node #{relpath} [meta] command [parameters]
[meta]: optional general flags
command: internal or external command to run (obligatory)
[parameters]: parameters to be passed to internal or external command;
* for internal parameters and flags, see below
* for external parameters and flags, refer to the documentation of the respective command
""", }
doc_settings.push { header: "meta", optionList: X.meta, }
for cmd in X.internals
doc_settings.push { header: "Internal command: #{cmd.name}", optionList: cmd, }
if ( Object.keys X.externals ).length > 0
descriptions = []
for cmd in X.externals
descriptions.push { content: "#{cmd.name}: #{cmd.description ? '???'}", }
doc_settings.push { header: "External commands: ", content: descriptions, }
return '\n' + commandLineUsage doc_settings
#-----------------------------------------------------------------------------------------------------------
show_help_for_topic_and_exit = ( q, argv ) ->
if argv.length > 0
return show_help_and_exit 113, "^cli@5478^ extraneous arguments #{rpr argv}"
switch q.parameters.topic
when null, undefined
return show_help_and_exit 0
when 'topics'
echo CND.blue "(this should be a list of topics)"
process.exit 0
when 'help'
### TAINT use custom function to output help ###
echo CND.blue """\n`node #{relpath} help [topic]`:\nget help about `topic`\n"""
process.exit 0
show_help_and_exit 120, "^cli@5887^ unknown help topic #{rpr q.parameters.topic}"
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_help_and_exit = ( code = 0, message = null ) ->
usage = generate_documentation()
usage = '\n' + ( CND.blue usage ) + '\n'
usage += '\n' + ( CND.red message ) + '\n' if message?
echo usage
process.exit code
#-----------------------------------------------------------------------------------------------------------
show_cat_and_exit = ->
echo()
echo CND.white CND.reverse CND.bold """ |\\ _,,,---,,_ """
echo CND.white CND.reverse CND.bold """ ZZZzz /,`.-'`' -. ;-;;,_ """
echo CND.white CND.reverse CND.bold """ |,4- ) )-,_. ,\\ ( `'-' """
echo CND.white CND.reverse CND.bold """ '---''(_/--' `-'\\_) PI:NAME:<NAME>END_PI """
echo()
process.exit 0
#-----------------------------------------------------------------------------------------------------------
@cli = ( argv = null ) ->
#---------------------------------------------------------------------------------------------------------
q =
trace: false # place under `meta`
help: false # place under `meta`
testing: argv? # place under `meta`
stage: null
cmd: null
parameters: {}
#---------------------------------------------------------------------------------------------------------
# Stage: Metaflags
#.........................................................................................................
q.stage = 'meta'
argv = argv ? process.argv
d = X.meta
s = { argv, stopAtFirstUnknown: true, }
p = parse_argv d, s
argv = pluck p, '_unknown', []
q.help = pluck p, 'help', false
q.trace = pluck p, 'trace', false
q.cd = pluck p, 'cd', null
urge "Stage: Metaflags", { q, argv, } if q.trace
return show_help_and_exit 0 if q.help
return show_help_and_exit 112, "^cli@5598^ extraneous flag #{rpr flag}" if ( flag = argv[ 0 ] )?.startsWith '-'
#---------------------------------------------------------------------------------------------------------
if q.cd?
process.chdir q.cd
urge CND.yellow "working directory is now #{process.cwd()}" if q.trace
#---------------------------------------------------------------------------------------------------------
# Stage: Internal Commands
# Internal commands must parse their specific flags and other arguments.
#.........................................................................................................
q.stage = 'internal'
d = { name: 'cmd', defaultOption: true, }
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.cmd = pluck p, 'cmd', null
argv = pluck p, '_unknown', []
urge "Stage: Commands", { q, argv, } if q.trace
return show_help_and_exit 114, "^cli@5479^ missing command" unless q.cmd?
#.........................................................................................................
switch q.cmd
when 'help'
d = X.internals.help
p = parse_argv d, { argv, stopAtFirstUnknown: true, }
q.parameters.topic = pluck p, 'topic', null
argv = pluck p, '_unknown', []
urge "running internal command `help`", { q, argv, } if q.trace
return show_help_for_topic_and_exit q, argv
when 'cat'
return show_cat_and_exit()
#---------------------------------------------------------------------------------------------------------
# Stage: External Commands
#.........................................................................................................
# External commands call a child process that is passed the remaing command line arguments, so those
# can be dealt with summarily.
#.........................................................................................................
q.stage = 'external'
p = parse_argv [], { argv, stopAtFirstUnknown: true, }
argv = pluck p, '_unknown', []
q.parameters.argv = argv[ .. ]
urge "Stage: External Commands", { q, argv, } if q.trace
### TAINT derive list from settings ###
if q.cmd in [ 'psql', 'node', 'nodexh', ]
return q
q.error = { code: 115, message: "^cli@5480^ Unknown command #{CND.reverse rpr q.cmd}", }
#-----------------------------------------------------------------------------------------------------------
run_external_command = ->
# #.........................................................................................................
# switch q.cmd
# #-------------------------------------------------------------------------------------------------------
# when 'psql'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
# #-------------------------------------------------------------------------------------------------------
# when 'nodexh', 'node'
# urge "running external command #{get_cmd_literal q.cmd, argv}" if q.trace
# return resolve()
#.........................................................................................................
#-----------------------------------------------------------------------------------------------------------
compile_settings = ( dft, usr ) ->
meta = []
internals = []
externals = []
R = { meta, internals, externals, }
#.........................................................................................................
validate.object usr.meta if usr.meta?
for name, description of Object.assign {}, dft.meta, usr.meta
throw Error "^cli@5587^ must not have attribute name, got #{rpr description}" if description.name?
meta.push lets description, ( d ) -> d.name = name
#.........................................................................................................
validate.object usr.commands if usr.commands?
for name, description of Object.assign {}, dft.commands, usr.commands
throw Error "^cli@5588^ must not have attribute name, got #{rpr description}" if description.name?
is_external = false
e = lets description, ( d ) ->
d.name = name
is_external = pluck d, 'external', false
if is_external then externals.push e
else internals.push e
#.........................................................................................................
return freeze R
#-----------------------------------------------------------------------------------------------------------
default_settings = freeze {
meta:
help: { alias: 'h', type: Boolean, description: "show help and exit", }
cd: { alias: 'd', type: String, description: "change to directory before running command", }
trace: { alias: 't', type: Boolean, description: "trace options parsing (for debugging)", }
commands:
cat: { description: "draw a cat", }
version: { description: "show project version and exit", }
}
#-----------------------------------------------------------------------------------------------------------
user_settings = freeze {
# meta:
# internal:
commands:
psql: { external: true, description: "use `psql` to run SQL", }
node: { external: true, description: "use `node` to run JS", }
nodexh: { external: true, description: "use `nodexh` to run JS", }
}
X = compile_settings default_settings, user_settings
debug '^6767^', JSON.stringify X, null, ' '
############################################################################################################
if module is require.main then do =>
debug '^3387^', @cli()
# debug await @cli [ '-t', null, '-t', ]
# {
# header: 'Typical Example',
# content: 'A simple example demonstrating typical usage.'
# },
# {
# content: 'Project home: {underline https://github.com/me/example}'
# }
|
[
{
"context": "=================================\n# Copyright 2014 Hatio, Lab.\n# Licensed under The MIT License\n# http://o",
"end": 67,
"score": 0.6368700265884399,
"start": 62,
"tag": "NAME",
"value": "Hatio"
}
] | src/spec/SpecPresenter.coffee | heartyoh/infopik | 0 | # ==========================================
# Copyright 2014 Hatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS',
'./SpecInfographic',
'./SpecContentViewLayer',
'./SpecGroup',
'./SpecRect'
'./SpecRing'
'./SpecRuler'
], (
kin
SpecInfographic
SpecContentViewLayer
SpecGroup
SpecRect
SpecRing
SpecRuler
) ->
"use strict"
createView = (attributes) ->
return new kin.Stage(attributes)
{
type: 'presenter-app'
name: 'presenter-app'
containable: true
container_type: 'application'
description: 'Presenter Application Specification'
defaults: {
}
view_factory_fn: createView
dependencies: {
'infographic' : SpecInfographic
'content-view-layer' : SpecContentViewLayer
'group' : SpecGroup
'rect' : SpecRect
'ring' : SpecRing
'ruler' : SpecRuler
}
layers : [{
type: 'content-view-layer'
attrs: {}
}]
toolbox_image: 'images/toolbox_presenter_app.png'
}
| 91633 | # ==========================================
# Copyright 2014 <NAME>, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS',
'./SpecInfographic',
'./SpecContentViewLayer',
'./SpecGroup',
'./SpecRect'
'./SpecRing'
'./SpecRuler'
], (
kin
SpecInfographic
SpecContentViewLayer
SpecGroup
SpecRect
SpecRing
SpecRuler
) ->
"use strict"
createView = (attributes) ->
return new kin.Stage(attributes)
{
type: 'presenter-app'
name: 'presenter-app'
containable: true
container_type: 'application'
description: 'Presenter Application Specification'
defaults: {
}
view_factory_fn: createView
dependencies: {
'infographic' : SpecInfographic
'content-view-layer' : SpecContentViewLayer
'group' : SpecGroup
'rect' : SpecRect
'ring' : SpecRing
'ruler' : SpecRuler
}
layers : [{
type: 'content-view-layer'
attrs: {}
}]
toolbox_image: 'images/toolbox_presenter_app.png'
}
| true | # ==========================================
# Copyright 2014 PI:NAME:<NAME>END_PI, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'KineticJS',
'./SpecInfographic',
'./SpecContentViewLayer',
'./SpecGroup',
'./SpecRect'
'./SpecRing'
'./SpecRuler'
], (
kin
SpecInfographic
SpecContentViewLayer
SpecGroup
SpecRect
SpecRing
SpecRuler
) ->
"use strict"
createView = (attributes) ->
return new kin.Stage(attributes)
{
type: 'presenter-app'
name: 'presenter-app'
containable: true
container_type: 'application'
description: 'Presenter Application Specification'
defaults: {
}
view_factory_fn: createView
dependencies: {
'infographic' : SpecInfographic
'content-view-layer' : SpecContentViewLayer
'group' : SpecGroup
'rect' : SpecRect
'ring' : SpecRing
'ruler' : SpecRuler
}
layers : [{
type: 'content-view-layer'
attrs: {}
}]
toolbox_image: 'images/toolbox_presenter_app.png'
}
|
[
{
"context": "ing-a-grammar/\n\nscopeName: 'source.regent'\nname: 'Regent'\ntype: 'tree-sitter'\nparser: 'tree-sitter-regent'",
"end": 187,
"score": 0.836118757724762,
"start": 181,
"tag": "NAME",
"value": "Regent"
}
] | grammars/language-regent.cson | ellishg/language-regent | 0 | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.regent'
name: 'Regent'
type: 'tree-sitter'
parser: 'tree-sitter-regent'
fileTypes: ['rg']
comments:
start: '-- '
scopes:
# Regent
'task > name': 'entity.name.function'
'task_parameter > name': 'constant.variable'
'terra > name': 'entity.name.function'
'terra_parameter > name': 'constant.variable'
'''
fspace > name,
struct > name
''': 'entity.name.class'
'''
fspace_argument > name,
fspace_field > name,
struct_field > name
''': 'entity.name.variable'
'var_statement > name': 'constant.variable'
'region_field_identifier > identifier': 'constant.variable'
'type_identifier': 'support.storage.type'
'regent_primitive_type': 'support.storage.type'
'''
"terra",
"task",
"fspace",
"struct",
"region",
"ispace",
"ptr",
"var",
"where",
"reads",
"writes",
"reduces",
"exclusive",
"atomic",
"simultaneous",
"relaxed",
"equal",
"disjoint",
"aliased",
"__leaf",
"__inner",
"__idempotent",
"__replicable",
"__inline",
"__parallel",
"__index_launch",
"__vectorize",
"__spmd",
"__trace",
"__cuda",
"__openmp"
''': 'keyword.control'
'''
"min",
"max",
"copy",
"fill",
"partition",
"image",
"preimage",
"__demand",
"__forbid"
''': 'keyword.operators'
'"wild"': 'constant.language'
'":"': 'meta.delimiter'
# Lua
"comment": "comment"
"string": "string"
"number": "constant.numeric"
'''
function > function_name > identifier,
function > function_name > property_identifier,
function > function_name > method,
local_function > identifier,
function_call > identifier,
function_call > property_identifier,
function_call > method,
next
''': "entity.name.function"
'''
parameters > identifier,
spread
''': "variable.parameter"
'''
self,
global_variable
''': "variable.language"
"nil": "constant.language"
"true": "constant.language.boolean"
"false": "constant.language.boolean"
"'local'": "storage.modifier"
"'function'": "storage.type.function"
"label_statement": "storage.type.label"
"'{'": "punctuation.definition.table.begin.bracket.curly"
"'}'": "punctuation.definition.table.end.bracket.curly"
'''
"do",
"return",
"if",
"then",
"elseif",
"else",
"while",
"repeat",
"until",
"for",
"in",
"goto",
"end"
''': "keyword.control"
'''
"or",
"and",
"not"
''': "keyword.operator.logical"
'''
"=",
"<",
"<=",
"==",
"~=",
">=",
">",
"|",
"~",
"&",
"<<",
">>",
"+",
"-",
"*",
"/",
"//",
"%",
"..",
"^",
"#"
''': "keyword.operator"
'''
".",
",",
";"
''': "meta.delimiter"
folds: [
# Regent
{
type: 'task',
start: {type: '")"'},
end: {type: '"end"'}
}
{
type: 'fspace',
start: {type: '"{"'},
end: {type: '"}"'}
}
# Lua
{
type: [
"do_statement",
"while_statement",
"repeat_statement",
"for_statement",
"for_in_statement",
"function",
"local_function",
"function_definition",
"table"
],
end: {index: -1}
},
{
type: [
"if_statement",
"elseif"
],
start: {index: 1},
end: {type: ["else", "elseif"]}
},
{
type: "if_statement",
start: {index: 1},
end: {index: -1}
},
{
type: "elseif",
start: {index: 1}
},
{
type: "else",
start: {index: 0}
}
]
| 56752 | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.regent'
name: '<NAME>'
type: 'tree-sitter'
parser: 'tree-sitter-regent'
fileTypes: ['rg']
comments:
start: '-- '
scopes:
# Regent
'task > name': 'entity.name.function'
'task_parameter > name': 'constant.variable'
'terra > name': 'entity.name.function'
'terra_parameter > name': 'constant.variable'
'''
fspace > name,
struct > name
''': 'entity.name.class'
'''
fspace_argument > name,
fspace_field > name,
struct_field > name
''': 'entity.name.variable'
'var_statement > name': 'constant.variable'
'region_field_identifier > identifier': 'constant.variable'
'type_identifier': 'support.storage.type'
'regent_primitive_type': 'support.storage.type'
'''
"terra",
"task",
"fspace",
"struct",
"region",
"ispace",
"ptr",
"var",
"where",
"reads",
"writes",
"reduces",
"exclusive",
"atomic",
"simultaneous",
"relaxed",
"equal",
"disjoint",
"aliased",
"__leaf",
"__inner",
"__idempotent",
"__replicable",
"__inline",
"__parallel",
"__index_launch",
"__vectorize",
"__spmd",
"__trace",
"__cuda",
"__openmp"
''': 'keyword.control'
'''
"min",
"max",
"copy",
"fill",
"partition",
"image",
"preimage",
"__demand",
"__forbid"
''': 'keyword.operators'
'"wild"': 'constant.language'
'":"': 'meta.delimiter'
# Lua
"comment": "comment"
"string": "string"
"number": "constant.numeric"
'''
function > function_name > identifier,
function > function_name > property_identifier,
function > function_name > method,
local_function > identifier,
function_call > identifier,
function_call > property_identifier,
function_call > method,
next
''': "entity.name.function"
'''
parameters > identifier,
spread
''': "variable.parameter"
'''
self,
global_variable
''': "variable.language"
"nil": "constant.language"
"true": "constant.language.boolean"
"false": "constant.language.boolean"
"'local'": "storage.modifier"
"'function'": "storage.type.function"
"label_statement": "storage.type.label"
"'{'": "punctuation.definition.table.begin.bracket.curly"
"'}'": "punctuation.definition.table.end.bracket.curly"
'''
"do",
"return",
"if",
"then",
"elseif",
"else",
"while",
"repeat",
"until",
"for",
"in",
"goto",
"end"
''': "keyword.control"
'''
"or",
"and",
"not"
''': "keyword.operator.logical"
'''
"=",
"<",
"<=",
"==",
"~=",
">=",
">",
"|",
"~",
"&",
"<<",
">>",
"+",
"-",
"*",
"/",
"//",
"%",
"..",
"^",
"#"
''': "keyword.operator"
'''
".",
",",
";"
''': "meta.delimiter"
folds: [
# Regent
{
type: 'task',
start: {type: '")"'},
end: {type: '"end"'}
}
{
type: 'fspace',
start: {type: '"{"'},
end: {type: '"}"'}
}
# Lua
{
type: [
"do_statement",
"while_statement",
"repeat_statement",
"for_statement",
"for_in_statement",
"function",
"local_function",
"function_definition",
"table"
],
end: {index: -1}
},
{
type: [
"if_statement",
"elseif"
],
start: {index: 1},
end: {type: ["else", "elseif"]}
},
{
type: "if_statement",
start: {index: 1},
end: {index: -1}
},
{
type: "elseif",
start: {index: 1}
},
{
type: "else",
start: {index: 0}
}
]
| true | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.regent'
name: 'PI:NAME:<NAME>END_PI'
type: 'tree-sitter'
parser: 'tree-sitter-regent'
fileTypes: ['rg']
comments:
start: '-- '
scopes:
# Regent
'task > name': 'entity.name.function'
'task_parameter > name': 'constant.variable'
'terra > name': 'entity.name.function'
'terra_parameter > name': 'constant.variable'
'''
fspace > name,
struct > name
''': 'entity.name.class'
'''
fspace_argument > name,
fspace_field > name,
struct_field > name
''': 'entity.name.variable'
'var_statement > name': 'constant.variable'
'region_field_identifier > identifier': 'constant.variable'
'type_identifier': 'support.storage.type'
'regent_primitive_type': 'support.storage.type'
'''
"terra",
"task",
"fspace",
"struct",
"region",
"ispace",
"ptr",
"var",
"where",
"reads",
"writes",
"reduces",
"exclusive",
"atomic",
"simultaneous",
"relaxed",
"equal",
"disjoint",
"aliased",
"__leaf",
"__inner",
"__idempotent",
"__replicable",
"__inline",
"__parallel",
"__index_launch",
"__vectorize",
"__spmd",
"__trace",
"__cuda",
"__openmp"
''': 'keyword.control'
'''
"min",
"max",
"copy",
"fill",
"partition",
"image",
"preimage",
"__demand",
"__forbid"
''': 'keyword.operators'
'"wild"': 'constant.language'
'":"': 'meta.delimiter'
# Lua
"comment": "comment"
"string": "string"
"number": "constant.numeric"
'''
function > function_name > identifier,
function > function_name > property_identifier,
function > function_name > method,
local_function > identifier,
function_call > identifier,
function_call > property_identifier,
function_call > method,
next
''': "entity.name.function"
'''
parameters > identifier,
spread
''': "variable.parameter"
'''
self,
global_variable
''': "variable.language"
"nil": "constant.language"
"true": "constant.language.boolean"
"false": "constant.language.boolean"
"'local'": "storage.modifier"
"'function'": "storage.type.function"
"label_statement": "storage.type.label"
"'{'": "punctuation.definition.table.begin.bracket.curly"
"'}'": "punctuation.definition.table.end.bracket.curly"
'''
"do",
"return",
"if",
"then",
"elseif",
"else",
"while",
"repeat",
"until",
"for",
"in",
"goto",
"end"
''': "keyword.control"
'''
"or",
"and",
"not"
''': "keyword.operator.logical"
'''
"=",
"<",
"<=",
"==",
"~=",
">=",
">",
"|",
"~",
"&",
"<<",
">>",
"+",
"-",
"*",
"/",
"//",
"%",
"..",
"^",
"#"
''': "keyword.operator"
'''
".",
",",
";"
''': "meta.delimiter"
folds: [
# Regent
{
type: 'task',
start: {type: '")"'},
end: {type: '"end"'}
}
{
type: 'fspace',
start: {type: '"{"'},
end: {type: '"}"'}
}
# Lua
{
type: [
"do_statement",
"while_statement",
"repeat_statement",
"for_statement",
"for_in_statement",
"function",
"local_function",
"function_definition",
"table"
],
end: {index: -1}
},
{
type: [
"if_statement",
"elseif"
],
start: {index: 1},
end: {type: ["else", "elseif"]}
},
{
type: "if_statement",
start: {index: 1},
end: {index: -1}
},
{
type: "elseif",
start: {index: 1}
},
{
type: "else",
start: {index: 0}
}
]
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9955840110778809,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/newProgNotePage/eventTabView.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Read/Write event information view contained within eventTab
Imm = require 'immutable'
Moment = require 'moment'
nlp = require 'nlp_compromise'
Term = require '../term'
{TimestampFormat} = require '../persist/utils'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
Bootbox = win.bootbox
Dialog = require('../dialog').load(win)
WithTooltip = require('../withTooltip').load(win)
OpenDialogLink = require('../openDialogLink').load(win)
ColorKeyBubble = require('../colorKeyBubble').load(win)
ProgramsDropdown = require('../programsDropdown').load(win)
TimeSpanSelection = require('../timeSpanSelection').load(win)
ExpandingTextArea = require('../expandingTextArea').load(win)
EventTypesDropdown = require('../eventTypesDropdown').load(win)
{FaIcon, renderName, showWhen, formatTimestamp, renderTimeSpan, makeMoment} = require('../utils').load(win)
EventTabView = React.createFactory React.createClass
displayName: 'EventTabView'
mixins: [React.addons.PureRenderMixin]
componentDidMount: ->
@refs.description.focus()
componentDidUpdate: (oldProps, oldState) ->
if (@props.isBeingEdited isnt oldProps.isBeingEdited) and @props.isBeingEdited
@refs.description.focus()
getInitialState: ->
# Use progNote's back/date to start (full-day event)
startingDate = if @props.backdate then makeMoment(@props.backdate) else Moment()
startTimestamp = startingDate.startOf('day').format(TimestampFormat)
endTimestamp = startingDate.endOf('day').format(TimestampFormat)
state = {
progEvent: Imm.Map {
title: ''
description: ''
typeId: ''
startTimestamp
endTimestamp
}
isGlobalEvent: null
}
@initialState = state # Cache for later comparisons
return state
render: ->
progEvent = @state.progEvent
typeId = progEvent.get 'typeId'
selectedEventType = @props.eventTypes.find (type) => type.get('id') is typeId
selectedEventTypeName = if selectedEventType then selectedEventType.get('name') else null
formIsValid = @_formIsValid()
hasChanges = @_hasChanges()
return R.div({
className: [
'eventView'
showWhen @props.isBeingEdited or not @props.editMode
].join ' '
},
R.form({className: showWhen @props.isBeingEdited},
R.button({
className: 'btn btn-danger closeButton'
onClick: @_closeForm.bind null, hasChanges
},
FaIcon('times')
)
(if not @props.eventTypes.isEmpty()
R.div({className: 'form-group titleContainer'},
R.div({},
R.label({}, Term 'Event Type')
R.div({},
EventTypesDropdown({
selectedEventType
eventTypes: @props.eventTypes
onSelect: @_updateTypeId
typeId
})
)
)
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
ref: 'description'
value: progEvent.get('description')
height: 100
onChange: @_updateDescription
placeholder: if not @props.eventTypes.isEmpty() then "Describe details (optional)" else "Describe details"
})
)
R.div({className: 'globalEventContainer'},
WithTooltip({
title: "#{Term 'Client'} must be assigned to 1 or more #{Term 'programs'}"
showTooltip: @props.clientPrograms.isEmpty()
placement: 'left'
},
R.div({
className: [
'checkbox'
'disabled' if @props.clientPrograms.isEmpty()
].join ' '
},
R.label({},
R.input({
disabled: @props.clientPrograms.isEmpty()
type: 'checkbox'
onClick: @_toggleIsGlobalEvent
checked: @state.isGlobalEvent
})
"Make this a #{Term 'global event'}"
(unless @props.clientPrograms.isEmpty()
WithTooltip({
title: "A copy of this #{Term 'event'}
will visible to all #{Term 'client files'}"
},
FaIcon('question-circle')
)
)
)
)
)
)
TimeSpanSelection({
startTimestamp: progEvent.get('startTimestamp')
endTimestamp: progEvent.get('endTimestamp')
updateTimestamps: @_updateTimestamps
widgetPositioning: {
horizontal: 'right'
vertical: 'top'
}
})
R.div({className: 'btn-toolbar'},
# TODO: Refactor to something more generic
(if @state.isGlobalEvent
OpenDialogLink({
dialog: AmendGlobalEventDialog
progEvent
clientFileId: @props.clientFileId
clientPrograms: @props.clientPrograms
onSuccess: @_saveProgEvent
},
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
else
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
onClick: @_submit
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
)
)
R.div({className: "details #{showWhen not @props.isBeingEdited}"},
R.div({className: 'title'}, @props.progEvent.get('title') or selectedEventTypeName)
R.div({className: 'description'}, @props.progEvent.get('description'))
R.div({className: 'timeSpan'},
renderTimeSpan(
@props.progEvent.get('startTimestamp'), @props.progEvent.get('endTimestamp')
)
)
)
)
_updateTitle: (event) ->
progEvent = @state.progEvent.set 'title', event.target.value
@setState {progEvent}
_updateDescription: (event) ->
progEvent = @state.progEvent.set 'description', event.target.value
@setState {progEvent}
_updateTypeId: (typeId) ->
progEvent = @state.progEvent
.set 'typeId', typeId
.set 'title', '' # EventType takes place of 'title'
@setState {progEvent}
_updateTimestamps: ({startTimestamp, endTimestamp}) ->
progEvent = @state.progEvent
if startTimestamp?
progEvent = progEvent.set 'startTimestamp', startTimestamp
if endTimestamp?
progEvent = progEvent.set 'endTimestamp', endTimestamp
@setState {progEvent}
_formIsValid: ->
description = @state.progEvent.get('description')
hasDescription = if description then description.trim() else description
hasEventTypeId = @state.progEvent.get('typeId')
# Needs to have a description or eventType
return !!(hasDescription or hasEventTypeId)
_hasChanges: ->
if not @initialState then return false # Make sure initialState is mounted
return !!@state.isGlobalEvent or not Imm.is @state.progEvent, @initialState.progEvent
_toggleIsGlobalEvent: ->
@setState {isGlobalEvent: not @state.isGlobalEvent}
_closeForm: (hasChanges, event) ->
event.preventDefault()
if hasChanges
Bootbox.confirm "Cancel #{Term 'event'} editing?", (ok) =>
if ok
@_resetProgEvent()
else
@_resetProgEvent()
_resetProgEvent: ->
@setState {progEvent: @props.progEvent}, =>
@props.cancel()
_submit: (event) ->
event.preventDefault()
@_saveProgEvent @state.progEvent
_saveProgEvent: (progEvent) ->
# Axe the title if an eventType is being used instead (#871)
# Otherwise, make sure typeId isn't null
if !!progEvent.get('typeId')
progEvent = progEvent.set 'title', ''
else
progEvent = progEvent.set 'typeId', ''
@props.saveProgEvent progEvent
AmendGlobalEventDialog = React.createFactory React.createClass
displayName: 'AmendGlobalEventDialog'
mixins: [React.addons.PureRenderMixin]
getInitialState: ->
# Use client's program if has only 1
# Otherwise use the program that matches userProgramId
# Else, user must select program from list
userProgramId = global.ActiveSession.programId
programId = ''
@programSelectionRequired = false
clientHasPrograms = not @props.clientPrograms.isEmpty()
if clientHasPrograms
if @props.clientPrograms.size is 1
programId = @props.clientPrograms.first().get('id')
else
clientIsInUserProgram = @props.clientPrograms.some (p) -> p.get('id') is userProgramId
if clientIsInUserProgram
programId = userProgramId
else
@programSelectionRequired = true
return {
title: @props.progEvent.get('title') or ''
description: @props.progEvent.get('description') or ''
typeId: @props.progEvent.get('typeId') or ''
programId
}
propTypes: {
progEvent: React.PropTypes.instanceOf(Imm.Map).isRequired
clientFileId: React.PropTypes.string.isRequired
clientPrograms: React.PropTypes.instanceOf(Imm.List).isRequired
}
render: ->
flaggedNames = @_generateFlaggedNames()
selectedProgram = @props.clientPrograms.find (p) => p.get('id') is @state.programId
return Dialog({
ref: 'dialog'
title: "Amend #{Term 'Global Event'}"
onClose: @props.onClose
},
R.div({className: 'amendGlobalEventDialog'},
R.p({},
"Please remove any sensitive and/or #{Term 'client'}-specific information
to be saved in the #{Term 'global event'}."
)
R.p({},
"This information will appear for all #{Term 'client files'}"
(if not @programSelectionRequired and selectedProgram?
R.span({},
" in: "
ColorKeyBubble({
colorKeyHex: selectedProgram.get('colorKeyHex')
})
' '
R.strong({}, selectedProgram.get('name'))
)
else
"in the program you specify."
)
)
(if flaggedNames.length > 0
R.div({className: 'flaggedNames'},
FaIcon('flag')
"Flagged: "
flaggedNames.join ', '
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
value: @state.description
onChange: @_updateDescription
placeholder: if @state.typeId then "(optional)" else ''
})
)
(if @programSelectionRequired and @props.clientPrograms.size > 1
R.div({className: 'form-group'},
R.hr({})
R.label({}, "Select a program for this #{Term 'global event'}")
ProgramsDropdown({
selectedProgramId: @state.programId
programs: @props.clientPrograms
onSelect: @_updateProgram
excludeNone: true
})
R.hr({})
)
)
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-default'
onClick: @props.onCancel
},
"Cancel"
)
R.button({
className: 'btn btn-success'
onClick: @_submit
disabled: @_formIsInvalid()
},
"Save #{Term 'Global Event'} "
FaIcon('check')
)
)
)
)
_updateDescription: (event) ->
description = event.target.value
@setState {description}
_updateProgram: (programId) ->
@setState {programId}
_formIsInvalid: ->
return not (@state.description or @state.typeId) or (@programSelectionRequired and not @state.programId)
_generateFlaggedNames: ->
# TODO: Process the title as well?
people = nlp.text(@props.progEvent.get('description')).people()
names = []
for i of people
names.push(people[i].normal) unless people[i].pos.Pronoun
return names
_submit: (event) ->
event.preventDefault()
# Set up globalEvent object
globalEvent = @props.progEvent
.set 'title', @state.title
.set 'description', @state.description
.set 'clientFileId', @props.clientFileId
.set 'programId', @state.programId
# Attach globalEvent as a property of the progEvent,
# which will get extracted during final save process
progEvent = @props.progEvent.set 'globalEvent', globalEvent
@props.onSuccess(progEvent)
return EventTabView
module.exports = {load}
| 14402 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Read/Write event information view contained within eventTab
Imm = require 'immutable'
Moment = require 'moment'
nlp = require 'nlp_compromise'
Term = require '../term'
{TimestampFormat} = require '../persist/utils'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
Bootbox = win.bootbox
Dialog = require('../dialog').load(win)
WithTooltip = require('../withTooltip').load(win)
OpenDialogLink = require('../openDialogLink').load(win)
ColorKeyBubble = require('../colorKeyBubble').load(win)
ProgramsDropdown = require('../programsDropdown').load(win)
TimeSpanSelection = require('../timeSpanSelection').load(win)
ExpandingTextArea = require('../expandingTextArea').load(win)
EventTypesDropdown = require('../eventTypesDropdown').load(win)
{FaIcon, renderName, showWhen, formatTimestamp, renderTimeSpan, makeMoment} = require('../utils').load(win)
EventTabView = React.createFactory React.createClass
displayName: 'EventTabView'
mixins: [React.addons.PureRenderMixin]
componentDidMount: ->
@refs.description.focus()
componentDidUpdate: (oldProps, oldState) ->
if (@props.isBeingEdited isnt oldProps.isBeingEdited) and @props.isBeingEdited
@refs.description.focus()
getInitialState: ->
# Use progNote's back/date to start (full-day event)
startingDate = if @props.backdate then makeMoment(@props.backdate) else Moment()
startTimestamp = startingDate.startOf('day').format(TimestampFormat)
endTimestamp = startingDate.endOf('day').format(TimestampFormat)
state = {
progEvent: Imm.Map {
title: ''
description: ''
typeId: ''
startTimestamp
endTimestamp
}
isGlobalEvent: null
}
@initialState = state # Cache for later comparisons
return state
render: ->
progEvent = @state.progEvent
typeId = progEvent.get 'typeId'
selectedEventType = @props.eventTypes.find (type) => type.get('id') is typeId
selectedEventTypeName = if selectedEventType then selectedEventType.get('name') else null
formIsValid = @_formIsValid()
hasChanges = @_hasChanges()
return R.div({
className: [
'eventView'
showWhen @props.isBeingEdited or not @props.editMode
].join ' '
},
R.form({className: showWhen @props.isBeingEdited},
R.button({
className: 'btn btn-danger closeButton'
onClick: @_closeForm.bind null, hasChanges
},
FaIcon('times')
)
(if not @props.eventTypes.isEmpty()
R.div({className: 'form-group titleContainer'},
R.div({},
R.label({}, Term 'Event Type')
R.div({},
EventTypesDropdown({
selectedEventType
eventTypes: @props.eventTypes
onSelect: @_updateTypeId
typeId
})
)
)
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
ref: 'description'
value: progEvent.get('description')
height: 100
onChange: @_updateDescription
placeholder: if not @props.eventTypes.isEmpty() then "Describe details (optional)" else "Describe details"
})
)
R.div({className: 'globalEventContainer'},
WithTooltip({
title: "#{Term 'Client'} must be assigned to 1 or more #{Term 'programs'}"
showTooltip: @props.clientPrograms.isEmpty()
placement: 'left'
},
R.div({
className: [
'checkbox'
'disabled' if @props.clientPrograms.isEmpty()
].join ' '
},
R.label({},
R.input({
disabled: @props.clientPrograms.isEmpty()
type: 'checkbox'
onClick: @_toggleIsGlobalEvent
checked: @state.isGlobalEvent
})
"Make this a #{Term 'global event'}"
(unless @props.clientPrograms.isEmpty()
WithTooltip({
title: "A copy of this #{Term 'event'}
will visible to all #{Term 'client files'}"
},
FaIcon('question-circle')
)
)
)
)
)
)
TimeSpanSelection({
startTimestamp: progEvent.get('startTimestamp')
endTimestamp: progEvent.get('endTimestamp')
updateTimestamps: @_updateTimestamps
widgetPositioning: {
horizontal: 'right'
vertical: 'top'
}
})
R.div({className: 'btn-toolbar'},
# TODO: Refactor to something more generic
(if @state.isGlobalEvent
OpenDialogLink({
dialog: AmendGlobalEventDialog
progEvent
clientFileId: @props.clientFileId
clientPrograms: @props.clientPrograms
onSuccess: @_saveProgEvent
},
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
else
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
onClick: @_submit
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
)
)
R.div({className: "details #{showWhen not @props.isBeingEdited}"},
R.div({className: 'title'}, @props.progEvent.get('title') or selectedEventTypeName)
R.div({className: 'description'}, @props.progEvent.get('description'))
R.div({className: 'timeSpan'},
renderTimeSpan(
@props.progEvent.get('startTimestamp'), @props.progEvent.get('endTimestamp')
)
)
)
)
_updateTitle: (event) ->
progEvent = @state.progEvent.set 'title', event.target.value
@setState {progEvent}
_updateDescription: (event) ->
progEvent = @state.progEvent.set 'description', event.target.value
@setState {progEvent}
_updateTypeId: (typeId) ->
progEvent = @state.progEvent
.set 'typeId', typeId
.set 'title', '' # EventType takes place of 'title'
@setState {progEvent}
_updateTimestamps: ({startTimestamp, endTimestamp}) ->
progEvent = @state.progEvent
if startTimestamp?
progEvent = progEvent.set 'startTimestamp', startTimestamp
if endTimestamp?
progEvent = progEvent.set 'endTimestamp', endTimestamp
@setState {progEvent}
_formIsValid: ->
description = @state.progEvent.get('description')
hasDescription = if description then description.trim() else description
hasEventTypeId = @state.progEvent.get('typeId')
# Needs to have a description or eventType
return !!(hasDescription or hasEventTypeId)
_hasChanges: ->
if not @initialState then return false # Make sure initialState is mounted
return !!@state.isGlobalEvent or not Imm.is @state.progEvent, @initialState.progEvent
_toggleIsGlobalEvent: ->
@setState {isGlobalEvent: not @state.isGlobalEvent}
_closeForm: (hasChanges, event) ->
event.preventDefault()
if hasChanges
Bootbox.confirm "Cancel #{Term 'event'} editing?", (ok) =>
if ok
@_resetProgEvent()
else
@_resetProgEvent()
_resetProgEvent: ->
@setState {progEvent: @props.progEvent}, =>
@props.cancel()
_submit: (event) ->
event.preventDefault()
@_saveProgEvent @state.progEvent
_saveProgEvent: (progEvent) ->
# Axe the title if an eventType is being used instead (#871)
# Otherwise, make sure typeId isn't null
if !!progEvent.get('typeId')
progEvent = progEvent.set 'title', ''
else
progEvent = progEvent.set 'typeId', ''
@props.saveProgEvent progEvent
AmendGlobalEventDialog = React.createFactory React.createClass
displayName: 'AmendGlobalEventDialog'
mixins: [React.addons.PureRenderMixin]
getInitialState: ->
# Use client's program if has only 1
# Otherwise use the program that matches userProgramId
# Else, user must select program from list
userProgramId = global.ActiveSession.programId
programId = ''
@programSelectionRequired = false
clientHasPrograms = not @props.clientPrograms.isEmpty()
if clientHasPrograms
if @props.clientPrograms.size is 1
programId = @props.clientPrograms.first().get('id')
else
clientIsInUserProgram = @props.clientPrograms.some (p) -> p.get('id') is userProgramId
if clientIsInUserProgram
programId = userProgramId
else
@programSelectionRequired = true
return {
title: @props.progEvent.get('title') or ''
description: @props.progEvent.get('description') or ''
typeId: @props.progEvent.get('typeId') or ''
programId
}
propTypes: {
progEvent: React.PropTypes.instanceOf(Imm.Map).isRequired
clientFileId: React.PropTypes.string.isRequired
clientPrograms: React.PropTypes.instanceOf(Imm.List).isRequired
}
render: ->
flaggedNames = @_generateFlaggedNames()
selectedProgram = @props.clientPrograms.find (p) => p.get('id') is @state.programId
return Dialog({
ref: 'dialog'
title: "Amend #{Term 'Global Event'}"
onClose: @props.onClose
},
R.div({className: 'amendGlobalEventDialog'},
R.p({},
"Please remove any sensitive and/or #{Term 'client'}-specific information
to be saved in the #{Term 'global event'}."
)
R.p({},
"This information will appear for all #{Term 'client files'}"
(if not @programSelectionRequired and selectedProgram?
R.span({},
" in: "
ColorKeyBubble({
colorKeyHex: selectedProgram.get('colorKeyHex')
})
' '
R.strong({}, selectedProgram.get('name'))
)
else
"in the program you specify."
)
)
(if flaggedNames.length > 0
R.div({className: 'flaggedNames'},
FaIcon('flag')
"Flagged: "
flaggedNames.join ', '
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
value: @state.description
onChange: @_updateDescription
placeholder: if @state.typeId then "(optional)" else ''
})
)
(if @programSelectionRequired and @props.clientPrograms.size > 1
R.div({className: 'form-group'},
R.hr({})
R.label({}, "Select a program for this #{Term 'global event'}")
ProgramsDropdown({
selectedProgramId: @state.programId
programs: @props.clientPrograms
onSelect: @_updateProgram
excludeNone: true
})
R.hr({})
)
)
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-default'
onClick: @props.onCancel
},
"Cancel"
)
R.button({
className: 'btn btn-success'
onClick: @_submit
disabled: @_formIsInvalid()
},
"Save #{Term 'Global Event'} "
FaIcon('check')
)
)
)
)
_updateDescription: (event) ->
description = event.target.value
@setState {description}
_updateProgram: (programId) ->
@setState {programId}
_formIsInvalid: ->
return not (@state.description or @state.typeId) or (@programSelectionRequired and not @state.programId)
_generateFlaggedNames: ->
# TODO: Process the title as well?
people = nlp.text(@props.progEvent.get('description')).people()
names = []
for i of people
names.push(people[i].normal) unless people[i].pos.Pronoun
return names
_submit: (event) ->
event.preventDefault()
# Set up globalEvent object
globalEvent = @props.progEvent
.set 'title', @state.title
.set 'description', @state.description
.set 'clientFileId', @props.clientFileId
.set 'programId', @state.programId
# Attach globalEvent as a property of the progEvent,
# which will get extracted during final save process
progEvent = @props.progEvent.set 'globalEvent', globalEvent
@props.onSuccess(progEvent)
return EventTabView
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Read/Write event information view contained within eventTab
Imm = require 'immutable'
Moment = require 'moment'
nlp = require 'nlp_compromise'
Term = require '../term'
{TimestampFormat} = require '../persist/utils'
load = (win) ->
$ = win.jQuery
React = win.React
R = React.DOM
Bootbox = win.bootbox
Dialog = require('../dialog').load(win)
WithTooltip = require('../withTooltip').load(win)
OpenDialogLink = require('../openDialogLink').load(win)
ColorKeyBubble = require('../colorKeyBubble').load(win)
ProgramsDropdown = require('../programsDropdown').load(win)
TimeSpanSelection = require('../timeSpanSelection').load(win)
ExpandingTextArea = require('../expandingTextArea').load(win)
EventTypesDropdown = require('../eventTypesDropdown').load(win)
{FaIcon, renderName, showWhen, formatTimestamp, renderTimeSpan, makeMoment} = require('../utils').load(win)
EventTabView = React.createFactory React.createClass
displayName: 'EventTabView'
mixins: [React.addons.PureRenderMixin]
componentDidMount: ->
@refs.description.focus()
componentDidUpdate: (oldProps, oldState) ->
if (@props.isBeingEdited isnt oldProps.isBeingEdited) and @props.isBeingEdited
@refs.description.focus()
getInitialState: ->
# Use progNote's back/date to start (full-day event)
startingDate = if @props.backdate then makeMoment(@props.backdate) else Moment()
startTimestamp = startingDate.startOf('day').format(TimestampFormat)
endTimestamp = startingDate.endOf('day').format(TimestampFormat)
state = {
progEvent: Imm.Map {
title: ''
description: ''
typeId: ''
startTimestamp
endTimestamp
}
isGlobalEvent: null
}
@initialState = state # Cache for later comparisons
return state
render: ->
progEvent = @state.progEvent
typeId = progEvent.get 'typeId'
selectedEventType = @props.eventTypes.find (type) => type.get('id') is typeId
selectedEventTypeName = if selectedEventType then selectedEventType.get('name') else null
formIsValid = @_formIsValid()
hasChanges = @_hasChanges()
return R.div({
className: [
'eventView'
showWhen @props.isBeingEdited or not @props.editMode
].join ' '
},
R.form({className: showWhen @props.isBeingEdited},
R.button({
className: 'btn btn-danger closeButton'
onClick: @_closeForm.bind null, hasChanges
},
FaIcon('times')
)
(if not @props.eventTypes.isEmpty()
R.div({className: 'form-group titleContainer'},
R.div({},
R.label({}, Term 'Event Type')
R.div({},
EventTypesDropdown({
selectedEventType
eventTypes: @props.eventTypes
onSelect: @_updateTypeId
typeId
})
)
)
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
ref: 'description'
value: progEvent.get('description')
height: 100
onChange: @_updateDescription
placeholder: if not @props.eventTypes.isEmpty() then "Describe details (optional)" else "Describe details"
})
)
R.div({className: 'globalEventContainer'},
WithTooltip({
title: "#{Term 'Client'} must be assigned to 1 or more #{Term 'programs'}"
showTooltip: @props.clientPrograms.isEmpty()
placement: 'left'
},
R.div({
className: [
'checkbox'
'disabled' if @props.clientPrograms.isEmpty()
].join ' '
},
R.label({},
R.input({
disabled: @props.clientPrograms.isEmpty()
type: 'checkbox'
onClick: @_toggleIsGlobalEvent
checked: @state.isGlobalEvent
})
"Make this a #{Term 'global event'}"
(unless @props.clientPrograms.isEmpty()
WithTooltip({
title: "A copy of this #{Term 'event'}
will visible to all #{Term 'client files'}"
},
FaIcon('question-circle')
)
)
)
)
)
)
TimeSpanSelection({
startTimestamp: progEvent.get('startTimestamp')
endTimestamp: progEvent.get('endTimestamp')
updateTimestamps: @_updateTimestamps
widgetPositioning: {
horizontal: 'right'
vertical: 'top'
}
})
R.div({className: 'btn-toolbar'},
# TODO: Refactor to something more generic
(if @state.isGlobalEvent
OpenDialogLink({
dialog: AmendGlobalEventDialog
progEvent
clientFileId: @props.clientFileId
clientPrograms: @props.clientPrograms
onSuccess: @_saveProgEvent
},
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
else
R.button({
className: 'btn btn-success btn-block'
type: 'submit'
onClick: @_submit
disabled: not formIsValid or not hasChanges
},
"Save "
FaIcon('check')
)
)
)
)
R.div({className: "details #{showWhen not @props.isBeingEdited}"},
R.div({className: 'title'}, @props.progEvent.get('title') or selectedEventTypeName)
R.div({className: 'description'}, @props.progEvent.get('description'))
R.div({className: 'timeSpan'},
renderTimeSpan(
@props.progEvent.get('startTimestamp'), @props.progEvent.get('endTimestamp')
)
)
)
)
_updateTitle: (event) ->
progEvent = @state.progEvent.set 'title', event.target.value
@setState {progEvent}
_updateDescription: (event) ->
progEvent = @state.progEvent.set 'description', event.target.value
@setState {progEvent}
_updateTypeId: (typeId) ->
progEvent = @state.progEvent
.set 'typeId', typeId
.set 'title', '' # EventType takes place of 'title'
@setState {progEvent}
_updateTimestamps: ({startTimestamp, endTimestamp}) ->
progEvent = @state.progEvent
if startTimestamp?
progEvent = progEvent.set 'startTimestamp', startTimestamp
if endTimestamp?
progEvent = progEvent.set 'endTimestamp', endTimestamp
@setState {progEvent}
_formIsValid: ->
description = @state.progEvent.get('description')
hasDescription = if description then description.trim() else description
hasEventTypeId = @state.progEvent.get('typeId')
# Needs to have a description or eventType
return !!(hasDescription or hasEventTypeId)
_hasChanges: ->
if not @initialState then return false # Make sure initialState is mounted
return !!@state.isGlobalEvent or not Imm.is @state.progEvent, @initialState.progEvent
_toggleIsGlobalEvent: ->
@setState {isGlobalEvent: not @state.isGlobalEvent}
_closeForm: (hasChanges, event) ->
event.preventDefault()
if hasChanges
Bootbox.confirm "Cancel #{Term 'event'} editing?", (ok) =>
if ok
@_resetProgEvent()
else
@_resetProgEvent()
_resetProgEvent: ->
@setState {progEvent: @props.progEvent}, =>
@props.cancel()
_submit: (event) ->
event.preventDefault()
@_saveProgEvent @state.progEvent
_saveProgEvent: (progEvent) ->
# Axe the title if an eventType is being used instead (#871)
# Otherwise, make sure typeId isn't null
if !!progEvent.get('typeId')
progEvent = progEvent.set 'title', ''
else
progEvent = progEvent.set 'typeId', ''
@props.saveProgEvent progEvent
AmendGlobalEventDialog = React.createFactory React.createClass
displayName: 'AmendGlobalEventDialog'
mixins: [React.addons.PureRenderMixin]
getInitialState: ->
# Use client's program if has only 1
# Otherwise use the program that matches userProgramId
# Else, user must select program from list
userProgramId = global.ActiveSession.programId
programId = ''
@programSelectionRequired = false
clientHasPrograms = not @props.clientPrograms.isEmpty()
if clientHasPrograms
if @props.clientPrograms.size is 1
programId = @props.clientPrograms.first().get('id')
else
clientIsInUserProgram = @props.clientPrograms.some (p) -> p.get('id') is userProgramId
if clientIsInUserProgram
programId = userProgramId
else
@programSelectionRequired = true
return {
title: @props.progEvent.get('title') or ''
description: @props.progEvent.get('description') or ''
typeId: @props.progEvent.get('typeId') or ''
programId
}
propTypes: {
progEvent: React.PropTypes.instanceOf(Imm.Map).isRequired
clientFileId: React.PropTypes.string.isRequired
clientPrograms: React.PropTypes.instanceOf(Imm.List).isRequired
}
render: ->
flaggedNames = @_generateFlaggedNames()
selectedProgram = @props.clientPrograms.find (p) => p.get('id') is @state.programId
return Dialog({
ref: 'dialog'
title: "Amend #{Term 'Global Event'}"
onClose: @props.onClose
},
R.div({className: 'amendGlobalEventDialog'},
R.p({},
"Please remove any sensitive and/or #{Term 'client'}-specific information
to be saved in the #{Term 'global event'}."
)
R.p({},
"This information will appear for all #{Term 'client files'}"
(if not @programSelectionRequired and selectedProgram?
R.span({},
" in: "
ColorKeyBubble({
colorKeyHex: selectedProgram.get('colorKeyHex')
})
' '
R.strong({}, selectedProgram.get('name'))
)
else
"in the program you specify."
)
)
(if flaggedNames.length > 0
R.div({className: 'flaggedNames'},
FaIcon('flag')
"Flagged: "
flaggedNames.join ', '
)
)
R.div({className: 'form-group'},
R.label({}, "Description")
ExpandingTextArea({
value: @state.description
onChange: @_updateDescription
placeholder: if @state.typeId then "(optional)" else ''
})
)
(if @programSelectionRequired and @props.clientPrograms.size > 1
R.div({className: 'form-group'},
R.hr({})
R.label({}, "Select a program for this #{Term 'global event'}")
ProgramsDropdown({
selectedProgramId: @state.programId
programs: @props.clientPrograms
onSelect: @_updateProgram
excludeNone: true
})
R.hr({})
)
)
R.div({className: 'btn-toolbar pull-right'},
R.button({
className: 'btn btn-default'
onClick: @props.onCancel
},
"Cancel"
)
R.button({
className: 'btn btn-success'
onClick: @_submit
disabled: @_formIsInvalid()
},
"Save #{Term 'Global Event'} "
FaIcon('check')
)
)
)
)
_updateDescription: (event) ->
description = event.target.value
@setState {description}
_updateProgram: (programId) ->
@setState {programId}
_formIsInvalid: ->
return not (@state.description or @state.typeId) or (@programSelectionRequired and not @state.programId)
_generateFlaggedNames: ->
# TODO: Process the title as well?
people = nlp.text(@props.progEvent.get('description')).people()
names = []
for i of people
names.push(people[i].normal) unless people[i].pos.Pronoun
return names
_submit: (event) ->
event.preventDefault()
# Set up globalEvent object
globalEvent = @props.progEvent
.set 'title', @state.title
.set 'description', @state.description
.set 'clientFileId', @props.clientFileId
.set 'programId', @state.programId
# Attach globalEvent as a property of the progEvent,
# which will get extracted during final save process
progEvent = @props.progEvent.set 'globalEvent', globalEvent
@props.onSuccess(progEvent)
return EventTabView
module.exports = {load}
|
[
{
"context": "\n\n# 'Particle REST API Class'\n# by Marc Krenn, July 12th, 2016 | marc.krenn@gmail.com | @marc_k",
"end": 45,
"score": 0.9998914003372192,
"start": 35,
"tag": "NAME",
"value": "Marc Krenn"
},
{
"context": "REST API Class'\n# by Marc Krenn, July 12th, 2016 | marc.krenn... | modules/Particle.coffee | marckrenn/framer-marcsOtherModules | 9 |
# 'Particle REST API Class'
# by Marc Krenn, July 12th, 2016 | marc.krenn@gmail.com | @marc_krenn
# Particle Class -----------------------------------
class exports.Particle extends Framer.BaseClass
baseUrl = "https://api.particle.io/v1/devices"
# Read-only properties ------------------------
@.define "info",
default: null
get: -> @._info
@.define "connectionStatus",
default: "disconnected"
get: -> @._connectionStatus
@.define "isConnected",
default: false
get: -> @._isConnected
@.define "postInterval",
get: -> @._postInterval
set: (val) ->
@["postThrottled"] = _.throttle(postUnthrottled, val*1000)
# Constructor / Init ---------------------------
constructor: (@options={}) ->
@.deviceId = @.options.deviceId ?= ""
@.accessToken = @.options.accessToken ?= ""
#@.getInterval = @.options.getInterval ?= if Utils.isChrome() then 1.5 else if Utils.isMobile() then 5 else 0
@._postInterval = @.options.postInterval ?= 0
super @.options
# `Post´ method ---------------------------
postUnthrottled = (func, value, callback, deviceId, accessToken) =>
url = "#{baseUrl}/#{deviceId}/#{func}?access_token=#{accessToken}"
xhttp = new XMLHttpRequest
xhttp.onreadystatechange = =>
if xhttp.readyState is 4 and xhttp.status is 200 and callback isnt undefined
#callback(JSON.parse(xhttp.response)) # callback is optional
callback(JSON.parse(xhttp.response).return_value)
xhttp.open 'POST', url, true
xhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded")
xhttp.send("value=#{value}")
post: (func, value, callback) =>
@.postThrottled(func, value, callback, deviceId = @.deviceId, accessToken = @.accessToken)
# Synonyms:
turn: (func, value, callback) ->
@.post(func, value, callback)
set: (func, value, callback) ->
@.post(func, value, callback)
write: (func, value, callback) ->
@.post(func, value, callback)
# `Get´ method ----------------------------
get: (variable, callback) ->
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/#{variable}?access_token=#{@.accessToken}", (error, data) ->
callback(data.result) if data?
# Synonyms:
fetch: (variable, callback) ->
@.get(variable, callback)
query: (variable, callback) ->
@.get(variable, callback)
read: (variable, callback) ->
@.get(variable, callback)
# `Monitor´ method -------------------------
onChange: (func, callback) ->
if func is "connection"
saveConnected = undefined
do connectionUpdate = =>
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/?access_token=#{@.accessToken}", (error, data) =>
if data?
if data.connected
@._isConnected = true
@._connectionStatus = "connected"
else
@._isConnected = false
@._connectionStatus = "disconnected"
# Add returned parameters as propterties to the object
@[property[0]] = property[1] for property in _.zip(Object.keys(data),_.map(data))
@._info = data
callback(data) if data.connected isnt saveConnected
saveConnected = data.connected
else
@._isConnected = false
@._connectionStatus = "noInternetConnection"
callback(false) #if saveConnected isnt false
saveConnected = false
#connectionUpdate()
Utils.interval 2, => connectionUpdate()
else
url = "https://api.particle.io/v1/devices/events?access_token=#{@.accessToken}"
source = new EventSource(url)
#console.log "Firebase: Listening to changes made to '#{path}' \n URL: '#{url}'" if @debug
source.addEventListener func, (ev) ->
callback(JSON.parse(ev.data).data)
| 39373 |
# 'Particle REST API Class'
# by <NAME>, July 12th, 2016 | <EMAIL> | @marc_krenn
# Particle Class -----------------------------------
class exports.Particle extends Framer.BaseClass
baseUrl = "https://api.particle.io/v1/devices"
# Read-only properties ------------------------
@.define "info",
default: null
get: -> @._info
@.define "connectionStatus",
default: "disconnected"
get: -> @._connectionStatus
@.define "isConnected",
default: false
get: -> @._isConnected
@.define "postInterval",
get: -> @._postInterval
set: (val) ->
@["postThrottled"] = _.throttle(postUnthrottled, val*1000)
# Constructor / Init ---------------------------
constructor: (@options={}) ->
@.deviceId = @.options.deviceId ?= ""
@.accessToken = @.options.accessToken ?= ""
#@.getInterval = @.options.getInterval ?= if Utils.isChrome() then 1.5 else if Utils.isMobile() then 5 else 0
@._postInterval = @.options.postInterval ?= 0
super @.options
# `Post´ method ---------------------------
postUnthrottled = (func, value, callback, deviceId, accessToken) =>
url = "#{baseUrl}/#{deviceId}/#{func}?access_token=#{accessToken}"
xhttp = new XMLHttpRequest
xhttp.onreadystatechange = =>
if xhttp.readyState is 4 and xhttp.status is 200 and callback isnt undefined
#callback(JSON.parse(xhttp.response)) # callback is optional
callback(JSON.parse(xhttp.response).return_value)
xhttp.open 'POST', url, true
xhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded")
xhttp.send("value=#{value}")
post: (func, value, callback) =>
@.postThrottled(func, value, callback, deviceId = @.deviceId, accessToken = @.accessToken)
# Synonyms:
turn: (func, value, callback) ->
@.post(func, value, callback)
set: (func, value, callback) ->
@.post(func, value, callback)
write: (func, value, callback) ->
@.post(func, value, callback)
# `Get´ method ----------------------------
get: (variable, callback) ->
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/#{variable}?access_token=#{@.accessToken}", (error, data) ->
callback(data.result) if data?
# Synonyms:
fetch: (variable, callback) ->
@.get(variable, callback)
query: (variable, callback) ->
@.get(variable, callback)
read: (variable, callback) ->
@.get(variable, callback)
# `Monitor´ method -------------------------
onChange: (func, callback) ->
if func is "connection"
saveConnected = undefined
do connectionUpdate = =>
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/?access_token=#{@.accessToken}", (error, data) =>
if data?
if data.connected
@._isConnected = true
@._connectionStatus = "connected"
else
@._isConnected = false
@._connectionStatus = "disconnected"
# Add returned parameters as propterties to the object
@[property[0]] = property[1] for property in _.zip(Object.keys(data),_.map(data))
@._info = data
callback(data) if data.connected isnt saveConnected
saveConnected = data.connected
else
@._isConnected = false
@._connectionStatus = "noInternetConnection"
callback(false) #if saveConnected isnt false
saveConnected = false
#connectionUpdate()
Utils.interval 2, => connectionUpdate()
else
url = "https://api.particle.io/v1/devices/events?access_token=#{@.accessToken}"
source = new EventSource(url)
#console.log "Firebase: Listening to changes made to '#{path}' \n URL: '#{url}'" if @debug
source.addEventListener func, (ev) ->
callback(JSON.parse(ev.data).data)
| true |
# 'Particle REST API Class'
# by PI:NAME:<NAME>END_PI, July 12th, 2016 | PI:EMAIL:<EMAIL>END_PI | @marc_krenn
# Particle Class -----------------------------------
class exports.Particle extends Framer.BaseClass
baseUrl = "https://api.particle.io/v1/devices"
# Read-only properties ------------------------
@.define "info",
default: null
get: -> @._info
@.define "connectionStatus",
default: "disconnected"
get: -> @._connectionStatus
@.define "isConnected",
default: false
get: -> @._isConnected
@.define "postInterval",
get: -> @._postInterval
set: (val) ->
@["postThrottled"] = _.throttle(postUnthrottled, val*1000)
# Constructor / Init ---------------------------
constructor: (@options={}) ->
@.deviceId = @.options.deviceId ?= ""
@.accessToken = @.options.accessToken ?= ""
#@.getInterval = @.options.getInterval ?= if Utils.isChrome() then 1.5 else if Utils.isMobile() then 5 else 0
@._postInterval = @.options.postInterval ?= 0
super @.options
# `Post´ method ---------------------------
postUnthrottled = (func, value, callback, deviceId, accessToken) =>
url = "#{baseUrl}/#{deviceId}/#{func}?access_token=#{accessToken}"
xhttp = new XMLHttpRequest
xhttp.onreadystatechange = =>
if xhttp.readyState is 4 and xhttp.status is 200 and callback isnt undefined
#callback(JSON.parse(xhttp.response)) # callback is optional
callback(JSON.parse(xhttp.response).return_value)
xhttp.open 'POST', url, true
xhttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded")
xhttp.send("value=#{value}")
post: (func, value, callback) =>
@.postThrottled(func, value, callback, deviceId = @.deviceId, accessToken = @.accessToken)
# Synonyms:
turn: (func, value, callback) ->
@.post(func, value, callback)
set: (func, value, callback) ->
@.post(func, value, callback)
write: (func, value, callback) ->
@.post(func, value, callback)
# `Get´ method ----------------------------
get: (variable, callback) ->
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/#{variable}?access_token=#{@.accessToken}", (error, data) ->
callback(data.result) if data?
# Synonyms:
fetch: (variable, callback) ->
@.get(variable, callback)
query: (variable, callback) ->
@.get(variable, callback)
read: (variable, callback) ->
@.get(variable, callback)
# `Monitor´ method -------------------------
onChange: (func, callback) ->
if func is "connection"
saveConnected = undefined
do connectionUpdate = =>
Utils.domLoadJSON "#{baseUrl}/#{@.deviceId}/?access_token=#{@.accessToken}", (error, data) =>
if data?
if data.connected
@._isConnected = true
@._connectionStatus = "connected"
else
@._isConnected = false
@._connectionStatus = "disconnected"
# Add returned parameters as propterties to the object
@[property[0]] = property[1] for property in _.zip(Object.keys(data),_.map(data))
@._info = data
callback(data) if data.connected isnt saveConnected
saveConnected = data.connected
else
@._isConnected = false
@._connectionStatus = "noInternetConnection"
callback(false) #if saveConnected isnt false
saveConnected = false
#connectionUpdate()
Utils.interval 2, => connectionUpdate()
else
url = "https://api.particle.io/v1/devices/events?access_token=#{@.accessToken}"
source = new EventSource(url)
#console.log "Firebase: Listening to changes made to '#{path}' \n URL: '#{url}'" if @debug
source.addEventListener func, (ev) ->
callback(JSON.parse(ev.data).data)
|
[
{
"context": "dule.exports = (I={}) ->\n defaults I,\n name: \"DUDER\"\n x: 8\n y: 5\n\n img = new Image\n img.src =",
"end": 57,
"score": 0.9920342564582825,
"start": 52,
"tag": "NAME",
"value": "DUDER"
}
] | player.coffee | STRd6/ld33 | 2 | module.exports = (I={}) ->
defaults I,
name: "DUDER"
x: 8
y: 5
img = new Image
img.src = I.url
I: I
draw: (canvas) ->
canvas.drawImage img, I.x * 32, I.y * 32
update: (dt) ->
move: ({x, y}, map) ->
I.x += x
I.y += y
revert = true unless map.passable(I)
if revert
I.x -= x
I.y -= y
map.triggerItems(I)
interact: ->
if I.conversation
showConversation I.conversation.map (data) ->
extend {}, data
| 76104 | module.exports = (I={}) ->
defaults I,
name: "<NAME>"
x: 8
y: 5
img = new Image
img.src = I.url
I: I
draw: (canvas) ->
canvas.drawImage img, I.x * 32, I.y * 32
update: (dt) ->
move: ({x, y}, map) ->
I.x += x
I.y += y
revert = true unless map.passable(I)
if revert
I.x -= x
I.y -= y
map.triggerItems(I)
interact: ->
if I.conversation
showConversation I.conversation.map (data) ->
extend {}, data
| true | module.exports = (I={}) ->
defaults I,
name: "PI:NAME:<NAME>END_PI"
x: 8
y: 5
img = new Image
img.src = I.url
I: I
draw: (canvas) ->
canvas.drawImage img, I.x * 32, I.y * 32
update: (dt) ->
move: ({x, y}, map) ->
I.x += x
I.y += y
revert = true unless map.passable(I)
if revert
I.x -= x
I.y -= y
map.triggerItems(I)
interact: ->
if I.conversation
showConversation I.conversation.map (data) ->
extend {}, data
|
[
{
"context": "interpreteAggResult\n buckets:[\n key: 'EIN'\n doc_count: 40\n ,\n key: 'KOOR",
"end": 745,
"score": 0.6496264934539795,
"start": 742,
"tag": "KEY",
"value": "EIN"
},
{
"context": "'EIN'\n doc_count: 40\n ,\n key: 'KOORD... | spec/config-types/simple-spec.coffee | lxfrdl/irma | 1 | describe "simple facette", ->
Simple =require '../../src/config-types/simple'
f=undefined
beforeEach ->
f = new Simple field:"blah"
it "can construct a filter expression", ->
expect(f.filter("nu,na")).to.eql
terms:
blah:['nu','na']
it "provides an appropriate aggregation expression", ->
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 0
it "can limit the number of buckets used for aggregation", ->
f= new Simple
field:"blah"
buckets: 42
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 42
it "knows how to interprete aggregation results", ->
interpretation = f.interpreteAggResult
buckets:[
key: 'EIN'
doc_count: 40
,
key: 'KOORD'
doc_count: 5
]
expect(interpretation).to.eql
EIN:40
KOORD:5
| 143583 | describe "simple facette", ->
Simple =require '../../src/config-types/simple'
f=undefined
beforeEach ->
f = new Simple field:"blah"
it "can construct a filter expression", ->
expect(f.filter("nu,na")).to.eql
terms:
blah:['nu','na']
it "provides an appropriate aggregation expression", ->
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 0
it "can limit the number of buckets used for aggregation", ->
f= new Simple
field:"blah"
buckets: 42
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 42
it "knows how to interprete aggregation results", ->
interpretation = f.interpreteAggResult
buckets:[
key: '<KEY>'
doc_count: 40
,
key: '<KEY>'
doc_count: 5
]
expect(interpretation).to.eql
EIN:40
KOORD:5
| true | describe "simple facette", ->
Simple =require '../../src/config-types/simple'
f=undefined
beforeEach ->
f = new Simple field:"blah"
it "can construct a filter expression", ->
expect(f.filter("nu,na")).to.eql
terms:
blah:['nu','na']
it "provides an appropriate aggregation expression", ->
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 0
it "can limit the number of buckets used for aggregation", ->
f= new Simple
field:"blah"
buckets: 42
expect(f.aggregation()).to.eql
terms:
field: 'blah'
size: 42
it "knows how to interprete aggregation results", ->
interpretation = f.interpreteAggResult
buckets:[
key: 'PI:KEY:<KEY>END_PI'
doc_count: 40
,
key: 'PI:KEY:<KEY>END_PI'
doc_count: 5
]
expect(interpretation).to.eql
EIN:40
KOORD:5
|
[
{
"context": "ata: false\n stripTrailingSlash: false\n password: null\n username: null\n verbs:\n 'create' : 'POST'\n ",
"end": 1113,
"score": 0.7740029096603394,
"start": 1109,
"tag": "PASSWORD",
"value": "null"
},
{
"context": "key = \"\"\n $.each obj, (k,v) =>\n key... | public/js/jquery.rest/src/jquery.rest.coffee | elecom/skutools | 440 | 'use strict'
#helpers
error = (msg) ->
throw new Error "ERROR: jquery.rest: #{msg}"
s = (n) -> t = ""; t += " " while n-- >0; t
encode64 = (s) ->
error "You need a polyfill for 'btoa' to use basic auth." unless window.btoa
window.btoa s
stringify = (obj) ->
error "You need a polyfill for 'JSON' to use stringify." unless window.JSON
window.JSON.stringify obj
inheritExtend = (a, b) ->
F = () ->
F.prototype = a
$.extend true, new F(), b
validateOpts = (options) ->
return false unless options and $.isPlainObject options
$.each options, (name) ->
error "Unknown option: '#{name}'" if defaultOpts[name] is `undefined`
null
validateStr = (name, str) ->
error "'#{name}' must be a string" unless 'string' is $.type str
deleteWarning = ->
alert '"delete()" has been deprecated. Please use "destroy()" or "del()" instead.'
#defaults
defaultOpts =
url: ''
cache: 0
request: (resource, options) -> $.ajax(options)
isSingle: false
autoClearCache: true
cachableMethods: ['GET']
methodOverride: false
stringifyData: false
stripTrailingSlash: false
password: null
username: null
verbs:
'create' : 'POST'
'read' : 'GET'
'update' : 'PUT'
'destroy': 'DELETE'
ajax:
dataType: 'json'
#ajax cache with timeouts
class Cache
constructor: (@parent) ->
@c = {}
valid: (date) ->
diff = new Date().getTime() - date.getTime()
return diff <= @parent.opts.cache*1000
key: (obj) ->
key = ""
$.each obj, (k,v) =>
key += k + "=" + (if $.isPlainObject(v) then "{"+@key(v)+"}" else v) + "|"
key
get: (key) ->
result = @c[key]
unless result
return
if @valid result.created
return result.data
return
put: (key, data) ->
@c[key] =
created: new Date()
data: data
clear: (regexp) ->
if regexp
$.each @c, (k) =>
delete @c[k] if k.match regexp
else
@c = {}
#represents one verb Create,Read,...
class Verb
constructor: (@name, @method, options = {}, @parent) ->
validateStr 'name', @name
validateStr 'method', @method
validateOpts options
error "Cannot add Verb: '#{name}' already exists" if @parent[@name]
@method = method.toUpperCase()
#default url to blank
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
@root = @parent.root
@custom = !defaultOpts.verbs[@name]
#bind call to this instance and save reference
@call = $.proxy @call, @
@call.instance = @
call: ->
#will execute in the context of the parent resource
{url,data} = @parent.extractUrlData @method, arguments
url += @opts.url or @name if @custom
@parent.ajax.call @, @method, url, data
show: (d) ->
console.log s(d) + @name + ": " + @method
#resource class - represents one set of crud ops
class Resource
constructor: (nameOrUrl, options = {}, parent) ->
validateOpts options
if parent and parent instanceof Resource
@name = nameOrUrl
validateStr 'name', @name
@constructChild parent, options
else
@url = nameOrUrl or ''
validateStr 'url', @url
@constructRoot options
constructRoot: (options) ->
@opts = inheritExtend defaultOpts, options
@root = @
@expectedIds = 0
@urlNoId = @url
@cache = new Cache @
@parent = null
@name = @opts.name or 'ROOT'
constructChild: (@parent, options) ->
validateStr 'name', @name
@error "Invalid parent" unless @parent instanceof Resource
@error "'#{name}' already exists" if @parent[@name]
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
#dont use parent `isSingle`
@opts.isSingle = 'isSingle' of options and options.isSingle
@root = @parent.root
@urlNoId = @parent.url + "#{@opts.url or @name}/"
@url = @urlNoId
@expectedIds = @parent.expectedIds
unless @opts.isSingle
@expectedIds += 1
@url += ":ID_#{@expectedIds}/"
#add all verbs defined for this resource
$.each @opts.verbs, $.proxy @addVerb, @
if @destroy
@del = @destroy
@delete = deleteWarning
error: (msg) ->
error "Cannot add Resource: " + msg
add: (name, options) ->
@[name] = new Resource name, options, @
addVerb: (name, method, options) ->
@[name] = new Verb(name, method, options, @).call
show: (d=0)->
error "Plugin Bug! Recursion Fail" if d > 25
console.log(s(d)+@name+": " + @url) if @name
$.each @, (name, fn) ->
fn.instance.show(d+1) if $.type(fn) is 'function' and fn.instance instanceof Verb and name isnt 'del'
$.each @, (name,res) ->
if name isnt "parent" and name isnt "root" and res instanceof Resource
res.show(d+1)
null
toString: ->
@name
extractUrlData: (name, args) ->
ids = []
data = null
params = null
for arg in args
t = $.type(arg)
if t is 'string' or t is 'number'
ids.push(arg)
else if t is 'object' and data is null
data = arg
else if t is 'object' and params is null
params = arg
else
error "Invalid argument: #{arg} (#{t})." +
" Must be strings or ints (IDs) followed by one optional object and one optional query params object."
providedIds = ids.length
canUrl = name isnt 'create'
canUrlNoId = name isnt 'update' and name isnt 'delete'
url = null
url = @url if canUrl and providedIds is @expectedIds
url = @urlNoId if canUrlNoId and providedIds is @expectedIds - 1
if url is null
msg = (@expectedIds - 1) if canUrlNoId
msg = ((if msg then msg+' or ' else '') + @expectedIds) if canUrl
error "Invalid number of ID arguments, required #{msg}, provided #{providedIds}"
for id, i in ids
url = url.replace new RegExp("\/:ID_#{i+1}\/"), "/#{id}/"
url += "?#{$.param params}" if params
{url, data}
ajax: (method, url, data) ->
error "method missing" unless method
error "url missing" unless url
headers = {}
# console.log method, url, data
if @opts.username and @opts.password
encoded = encode64 @opts.username + ":" + @opts.password
headers.Authorization = "Basic #{encoded}"
if data and @opts.stringifyData and method not in ['GET', 'HEAD']
data = stringify data
headers['Content-Type'] = "application/json"
if @opts.methodOverride and method not in ['GET', 'HEAD', 'POST']
headers['X-HTTP-Method-Override'] = method
method = 'POST'
if @opts.stripTrailingSlash
url = url.replace /\/$/, ""
ajaxOpts = { url, type:method, headers }
ajaxOpts.data = data if data
#add this verb's/resource's defaults
ajaxOpts = $.extend true, {}, @opts.ajax, ajaxOpts
useCache = @opts.cache and $.inArray(method, @opts.cachableMethods) >= 0
if useCache
key = @root.cache.key ajaxOpts
req = @root.cache.get key
return req if req
#when method not in cachable methds, clear cache entries matching this url
if @opts.cache and @opts.autoClearCache and $.inArray(method, @opts.cachableMethods) is -1
escapedUrl = url.replace(/([.?*+^$[\]\\(){}|-])/g, "\\$1")
@root.cache.clear(new RegExp(escapedUrl))
req = @opts.request @parent, ajaxOpts
if useCache
req.done => @root.cache.put key, req
return req
# Public API
Resource.defaults = defaultOpts
$.RestClient = Resource
| 171423 | 'use strict'
#helpers
error = (msg) ->
throw new Error "ERROR: jquery.rest: #{msg}"
s = (n) -> t = ""; t += " " while n-- >0; t
encode64 = (s) ->
error "You need a polyfill for 'btoa' to use basic auth." unless window.btoa
window.btoa s
stringify = (obj) ->
error "You need a polyfill for 'JSON' to use stringify." unless window.JSON
window.JSON.stringify obj
inheritExtend = (a, b) ->
F = () ->
F.prototype = a
$.extend true, new F(), b
validateOpts = (options) ->
return false unless options and $.isPlainObject options
$.each options, (name) ->
error "Unknown option: '#{name}'" if defaultOpts[name] is `undefined`
null
validateStr = (name, str) ->
error "'#{name}' must be a string" unless 'string' is $.type str
deleteWarning = ->
alert '"delete()" has been deprecated. Please use "destroy()" or "del()" instead.'
#defaults
defaultOpts =
url: ''
cache: 0
request: (resource, options) -> $.ajax(options)
isSingle: false
autoClearCache: true
cachableMethods: ['GET']
methodOverride: false
stringifyData: false
stripTrailingSlash: false
password: <PASSWORD>
username: null
verbs:
'create' : 'POST'
'read' : 'GET'
'update' : 'PUT'
'destroy': 'DELETE'
ajax:
dataType: 'json'
#ajax cache with timeouts
class Cache
constructor: (@parent) ->
@c = {}
valid: (date) ->
diff = new Date().getTime() - date.getTime()
return diff <= @parent.opts.cache*1000
key: (obj) ->
key = ""
$.each obj, (k,v) =>
key += k + <KEY> + (if $.isPlainObject(v) then "{"+@key(v)+"}" else v) + "<KEY>|"
key
get: (key) ->
result = @c[key]
unless result
return
if @valid result.created
return result.data
return
put: (key, data) ->
@c[key] =
created: new Date()
data: data
clear: (regexp) ->
if regexp
$.each @c, (k) =>
delete @c[k] if k.match regexp
else
@c = {}
#represents one verb Create,Read,...
class Verb
constructor: (@name, @method, options = {}, @parent) ->
validateStr 'name', @name
validateStr 'method', @method
validateOpts options
error "Cannot add Verb: '#{name}' already exists" if @parent[@name]
@method = method.toUpperCase()
#default url to blank
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
@root = @parent.root
@custom = !defaultOpts.verbs[@name]
#bind call to this instance and save reference
@call = $.proxy @call, @
@call.instance = @
call: ->
#will execute in the context of the parent resource
{url,data} = @parent.extractUrlData @method, arguments
url += @opts.url or @name if @custom
@parent.ajax.call @, @method, url, data
show: (d) ->
console.log s(d) + @name + ": " + @method
#resource class - represents one set of crud ops
class Resource
constructor: (nameOrUrl, options = {}, parent) ->
validateOpts options
if parent and parent instanceof Resource
@name = nameOrUrl
validateStr 'name', @name
@constructChild parent, options
else
@url = nameOrUrl or ''
validateStr 'url', @url
@constructRoot options
constructRoot: (options) ->
@opts = inheritExtend defaultOpts, options
@root = @
@expectedIds = 0
@urlNoId = @url
@cache = new Cache @
@parent = null
@name = @opts.name or 'ROOT'
constructChild: (@parent, options) ->
validateStr 'name', @name
@error "Invalid parent" unless @parent instanceof Resource
@error "'#{name}' already exists" if @parent[@name]
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
#dont use parent `isSingle`
@opts.isSingle = 'isSingle' of options and options.isSingle
@root = @parent.root
@urlNoId = @parent.url + "#{@opts.url or @name}/"
@url = @urlNoId
@expectedIds = @parent.expectedIds
unless @opts.isSingle
@expectedIds += 1
@url += ":ID_#{@expectedIds}/"
#add all verbs defined for this resource
$.each @opts.verbs, $.proxy @addVerb, @
if @destroy
@del = @destroy
@delete = deleteWarning
error: (msg) ->
error "Cannot add Resource: " + msg
add: (name, options) ->
@[name] = new Resource name, options, @
addVerb: (name, method, options) ->
@[name] = new Verb(name, method, options, @).call
show: (d=0)->
error "Plugin Bug! Recursion Fail" if d > 25
console.log(s(d)+@name+": " + @url) if @name
$.each @, (name, fn) ->
fn.instance.show(d+1) if $.type(fn) is 'function' and fn.instance instanceof Verb and name isnt 'del'
$.each @, (name,res) ->
if name isnt "parent" and name isnt "root" and res instanceof Resource
res.show(d+1)
null
toString: ->
@name
extractUrlData: (name, args) ->
ids = []
data = null
params = null
for arg in args
t = $.type(arg)
if t is 'string' or t is 'number'
ids.push(arg)
else if t is 'object' and data is null
data = arg
else if t is 'object' and params is null
params = arg
else
error "Invalid argument: #{arg} (#{t})." +
" Must be strings or ints (IDs) followed by one optional object and one optional query params object."
providedIds = ids.length
canUrl = name isnt 'create'
canUrlNoId = name isnt 'update' and name isnt 'delete'
url = null
url = @url if canUrl and providedIds is @expectedIds
url = @urlNoId if canUrlNoId and providedIds is @expectedIds - 1
if url is null
msg = (@expectedIds - 1) if canUrlNoId
msg = ((if msg then msg+' or ' else '') + @expectedIds) if canUrl
error "Invalid number of ID arguments, required #{msg}, provided #{providedIds}"
for id, i in ids
url = url.replace new RegExp("\/:ID_#{i+1}\/"), "/#{id}/"
url += "?#{$.param params}" if params
{url, data}
ajax: (method, url, data) ->
error "method missing" unless method
error "url missing" unless url
headers = {}
# console.log method, url, data
if @opts.username and @opts.password
encoded = encode64 @opts.username + ":" + @opts.password
headers.Authorization = "Basic #{encoded}"
if data and @opts.stringifyData and method not in ['GET', 'HEAD']
data = stringify data
headers['Content-Type'] = "application/json"
if @opts.methodOverride and method not in ['GET', 'HEAD', 'POST']
headers['X-HTTP-Method-Override'] = method
method = 'POST'
if @opts.stripTrailingSlash
url = url.replace /\/$/, ""
ajaxOpts = { url, type:method, headers }
ajaxOpts.data = data if data
#add this verb's/resource's defaults
ajaxOpts = $.extend true, {}, @opts.ajax, ajaxOpts
useCache = @opts.cache and $.inArray(method, @opts.cachableMethods) >= 0
if useCache
key = @root.cache.key ajaxOpts
req = @root.cache.get key
return req if req
#when method not in cachable methds, clear cache entries matching this url
if @opts.cache and @opts.autoClearCache and $.inArray(method, @opts.cachableMethods) is -1
escapedUrl = url.replace(/([.?*+^$[\]\\(){}|-])/g, "\\$1")
@root.cache.clear(new RegExp(escapedUrl))
req = @opts.request @parent, ajaxOpts
if useCache
req.done => @root.cache.put key, req
return req
# Public API
Resource.defaults = defaultOpts
$.RestClient = Resource
| true | 'use strict'
#helpers
error = (msg) ->
throw new Error "ERROR: jquery.rest: #{msg}"
s = (n) -> t = ""; t += " " while n-- >0; t
encode64 = (s) ->
error "You need a polyfill for 'btoa' to use basic auth." unless window.btoa
window.btoa s
stringify = (obj) ->
error "You need a polyfill for 'JSON' to use stringify." unless window.JSON
window.JSON.stringify obj
inheritExtend = (a, b) ->
F = () ->
F.prototype = a
$.extend true, new F(), b
validateOpts = (options) ->
return false unless options and $.isPlainObject options
$.each options, (name) ->
error "Unknown option: '#{name}'" if defaultOpts[name] is `undefined`
null
validateStr = (name, str) ->
error "'#{name}' must be a string" unless 'string' is $.type str
deleteWarning = ->
alert '"delete()" has been deprecated. Please use "destroy()" or "del()" instead.'
#defaults
defaultOpts =
url: ''
cache: 0
request: (resource, options) -> $.ajax(options)
isSingle: false
autoClearCache: true
cachableMethods: ['GET']
methodOverride: false
stringifyData: false
stripTrailingSlash: false
password: PI:PASSWORD:<PASSWORD>END_PI
username: null
verbs:
'create' : 'POST'
'read' : 'GET'
'update' : 'PUT'
'destroy': 'DELETE'
ajax:
dataType: 'json'
#ajax cache with timeouts
class Cache
constructor: (@parent) ->
@c = {}
valid: (date) ->
diff = new Date().getTime() - date.getTime()
return diff <= @parent.opts.cache*1000
key: (obj) ->
key = ""
$.each obj, (k,v) =>
key += k + PI:KEY:<KEY>END_PI + (if $.isPlainObject(v) then "{"+@key(v)+"}" else v) + "PI:KEY:<KEY>END_PI|"
key
get: (key) ->
result = @c[key]
unless result
return
if @valid result.created
return result.data
return
put: (key, data) ->
@c[key] =
created: new Date()
data: data
clear: (regexp) ->
if regexp
$.each @c, (k) =>
delete @c[k] if k.match regexp
else
@c = {}
#represents one verb Create,Read,...
class Verb
constructor: (@name, @method, options = {}, @parent) ->
validateStr 'name', @name
validateStr 'method', @method
validateOpts options
error "Cannot add Verb: '#{name}' already exists" if @parent[@name]
@method = method.toUpperCase()
#default url to blank
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
@root = @parent.root
@custom = !defaultOpts.verbs[@name]
#bind call to this instance and save reference
@call = $.proxy @call, @
@call.instance = @
call: ->
#will execute in the context of the parent resource
{url,data} = @parent.extractUrlData @method, arguments
url += @opts.url or @name if @custom
@parent.ajax.call @, @method, url, data
show: (d) ->
console.log s(d) + @name + ": " + @method
#resource class - represents one set of crud ops
class Resource
constructor: (nameOrUrl, options = {}, parent) ->
validateOpts options
if parent and parent instanceof Resource
@name = nameOrUrl
validateStr 'name', @name
@constructChild parent, options
else
@url = nameOrUrl or ''
validateStr 'url', @url
@constructRoot options
constructRoot: (options) ->
@opts = inheritExtend defaultOpts, options
@root = @
@expectedIds = 0
@urlNoId = @url
@cache = new Cache @
@parent = null
@name = @opts.name or 'ROOT'
constructChild: (@parent, options) ->
validateStr 'name', @name
@error "Invalid parent" unless @parent instanceof Resource
@error "'#{name}' already exists" if @parent[@name]
options.url = '' unless options.url
@opts = inheritExtend @parent.opts, options
#dont use parent `isSingle`
@opts.isSingle = 'isSingle' of options and options.isSingle
@root = @parent.root
@urlNoId = @parent.url + "#{@opts.url or @name}/"
@url = @urlNoId
@expectedIds = @parent.expectedIds
unless @opts.isSingle
@expectedIds += 1
@url += ":ID_#{@expectedIds}/"
#add all verbs defined for this resource
$.each @opts.verbs, $.proxy @addVerb, @
if @destroy
@del = @destroy
@delete = deleteWarning
error: (msg) ->
error "Cannot add Resource: " + msg
add: (name, options) ->
@[name] = new Resource name, options, @
addVerb: (name, method, options) ->
@[name] = new Verb(name, method, options, @).call
show: (d=0)->
error "Plugin Bug! Recursion Fail" if d > 25
console.log(s(d)+@name+": " + @url) if @name
$.each @, (name, fn) ->
fn.instance.show(d+1) if $.type(fn) is 'function' and fn.instance instanceof Verb and name isnt 'del'
$.each @, (name,res) ->
if name isnt "parent" and name isnt "root" and res instanceof Resource
res.show(d+1)
null
toString: ->
@name
extractUrlData: (name, args) ->
ids = []
data = null
params = null
for arg in args
t = $.type(arg)
if t is 'string' or t is 'number'
ids.push(arg)
else if t is 'object' and data is null
data = arg
else if t is 'object' and params is null
params = arg
else
error "Invalid argument: #{arg} (#{t})." +
" Must be strings or ints (IDs) followed by one optional object and one optional query params object."
providedIds = ids.length
canUrl = name isnt 'create'
canUrlNoId = name isnt 'update' and name isnt 'delete'
url = null
url = @url if canUrl and providedIds is @expectedIds
url = @urlNoId if canUrlNoId and providedIds is @expectedIds - 1
if url is null
msg = (@expectedIds - 1) if canUrlNoId
msg = ((if msg then msg+' or ' else '') + @expectedIds) if canUrl
error "Invalid number of ID arguments, required #{msg}, provided #{providedIds}"
for id, i in ids
url = url.replace new RegExp("\/:ID_#{i+1}\/"), "/#{id}/"
url += "?#{$.param params}" if params
{url, data}
ajax: (method, url, data) ->
error "method missing" unless method
error "url missing" unless url
headers = {}
# console.log method, url, data
if @opts.username and @opts.password
encoded = encode64 @opts.username + ":" + @opts.password
headers.Authorization = "Basic #{encoded}"
if data and @opts.stringifyData and method not in ['GET', 'HEAD']
data = stringify data
headers['Content-Type'] = "application/json"
if @opts.methodOverride and method not in ['GET', 'HEAD', 'POST']
headers['X-HTTP-Method-Override'] = method
method = 'POST'
if @opts.stripTrailingSlash
url = url.replace /\/$/, ""
ajaxOpts = { url, type:method, headers }
ajaxOpts.data = data if data
#add this verb's/resource's defaults
ajaxOpts = $.extend true, {}, @opts.ajax, ajaxOpts
useCache = @opts.cache and $.inArray(method, @opts.cachableMethods) >= 0
if useCache
key = @root.cache.key ajaxOpts
req = @root.cache.get key
return req if req
#when method not in cachable methds, clear cache entries matching this url
if @opts.cache and @opts.autoClearCache and $.inArray(method, @opts.cachableMethods) is -1
escapedUrl = url.replace(/([.?*+^$[\]\\(){}|-])/g, "\\$1")
@root.cache.clear(new RegExp(escapedUrl))
req = @opts.request @parent, ajaxOpts
if useCache
req.done => @root.cache.put key, req
return req
# Public API
Resource.defaults = defaultOpts
$.RestClient = Resource
|
[
{
"context": "###\n File: drykup.coffee\n Author: Mark Hahn\n DryCup is a CoffeeScript html generator compati",
"end": 45,
"score": 0.9998826384544373,
"start": 36,
"tag": "NAME",
"value": "Mark Hahn"
},
{
"context": "ced via the MIT license.\n See https://github.com/mark-hahn/dr... | drykup.coffee | mark-hahn/drykup | 1 | ###
File: drykup.coffee
Author: Mark Hahn
DryCup is a CoffeeScript html generator compatible with CoffeeKup but without the magic.
DryKup is open-sourced via the MIT license.
See https://github.com/mark-hahn/drykup
###
# ------------------- Constants stolen from coffeeKup (Release 0.3.2) ---------------------
# ----------------------------- See KOFFEECUP-LICENSE file --------------------------------
# Values available to the `doctype` function inside a template.
# Ex.: `doctype 'strict'`
doctypes =
'default': '<!DOCTYPE html>'
'5': '<!DOCTYPE html>'
'xml': '<?xml version="1.0" encoding="utf-8" ?>'
'transitional': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">'
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">'
'1.1': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">'
'basic': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">'
'mobile': '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">'
'ce': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "ce-html-1.0-transitional.dtd">'
# Private HTML element reference.
# Please mind the gap (1 space at the beginning of each subsequent line).
elements =
# Valid HTML 5 elements requiring a closing tag.
# Note: the `var` element is out for obvious reasons, please use `tag 'var'`.
regular: 'a abbr address article aside audio b bdi bdo blockquote body button
canvas caption cite code colgroup datalist dd del details dfn div dl dt em
fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 head header hgroup
html i iframe ins kbd label legend li map mark menu meter nav noscript object
ol optgroup option output p pre progress q rp rt ruby s samp script section
select small span strong sub summary sup table tbody td textarea tfoot
th thead time title tr u ul video'
# Valid self-closing HTML 5 elements.
void: 'area base br col command embed hr img input keygen link meta param
source track wbr'
obsolete: 'applet acronym bgsound dir frameset noframes isindex listing
nextid noembed plaintext rb strike xmp big blink center font marquee multicol
nobr spacer tt'
obsolete_void: 'basefont frame'
# Create a unique list of element names merging the desired groups.
merge_elements = (args...) ->
result = []
for a in args
for element in elements[a].split ' '
result.push element unless element in result
result
# ------------------- constants for shorthand aliases -------------------------
attrAliases =
w:'width', h:'height', s:'src', src:'src', hr:'href', href:'href', c:'class', db:'data-bind'
i:'id', n:'name', v:'value', m:'method', t:'type'
cp:'cellpadding', b:'border', cs:'colspan', rs:'rowspan', chk:'checked', sel:'selected'
styleValueAliases =
'm:a':'margin:auto', 'ml:a':'margin-left:auto', 'mt:a':'margin-top:auto'
'mr:a':'margin-right:auto', 'mb:a':'margin-bottom:auto'
'c:b':'clear:both', 'f:l':'float:left', 'f:r':'float:right', 'f:n':'float:none'
'fw:b':'font-weight:bold', 'fw:n':'font-weight:normal'
'fs:i':'font-style:italic', 'fs:n':'font-style:normal'
'p:a':'position:absolute', 'p:r':'position:relative', 'p:f':'position:fixed'
'd:n':'display:none', 'd:b':'display:block', 'd:f':'display:fixed'
'ta:l':'text-align:left', 'ta:c':'text-align:center', 'ta:r':'text-align:right'
'o:a':'overflow:auto', 'o:h':'overflow:hidden'
'c:a':'cursor:auto', 'c:p':'cursor:pointer'
'tt:u':'text-transform:uppercase', 'tt:c':'text-transform:capitalize'
'tt:l':'text-transform:lowercase'
'td:n':'text-decoration:none', 'td:u':'text-decoration:underline'
'td:lt':'text-decoration: line-through', 'lh:n':'line-height:normal'
'b:n': 'border:none'
'b:1pxsb' :'border:1px solid black'
'bl:1pxsb':'border-left:1px solid black', 'bt:1pxsb':'border-top:1px solid black'
'br:1pxsb':'border-right:1px solid black', 'bb:1pxsb':'border-bottom:1px solid black'
'b:1pxsg' :'border:1px solid gray'
'bl:1pxsg':'border-left:1px solid gray', 'bt:1pxsg':'border-top:1px solid gray'
'br:1pxsg':'border-right:1px solid gray', 'bb:1pxsg':'border-bottom:1px solid gray'
'zi:a':'z-index:auto', 'zi:i':'z-index:inherit'
'v:v':'visibility:visible', 'v:h':'visibility:hidden'
styleAliases =
l:'left', t:'top', r:'right', w:'width', h:'height', c:'color', bc:'background-color', bot:'bottom'
fs:'font-size', lh:'line-height', zi:'z-index'
b:'border', bl:'border-left', bt:'border-top', br:'border-right', bb:'border-bottom'
m:'margin', ml:'margin-left', mt:'margin-top', mr:'margin-right', mb:'margin-bottom'
p:'padding', pl:'padding-left', pt:'padding-top', pr:'padding-right', pb:'padding-bottom'
v:'visibility', ff:'font-family'
# ---------------------------- alias shorthand expansion code ---------------------------------
whiteSpace = (str) ->
str.replace(/([^~])\+/g, '$1 ').replace(/~\+/g, '+')
expandAttrs = (v = '', styleOnly = false) ->
attrs = {}; styles = {}
v = v.replace /\s+/g, '~`~'
parts = v.split '~`~'
for part in parts
if not (thirds = ///^ ([^=:]*) (=|:) (.*) $///.exec part) then continue
[d, name, sep, value] = thirds
if not styleOnly and sep == '='
if name == 'in'
attrs.id = value
attrs.name = value
else
if (aa = attrAliases[name]) then name = aa
attrs[name] = whiteSpace value
if sep == ':'
if (sva = styleValueAliases[part]) then [name, value] = sva.split ':'
else
if (sa = styleAliases[name]) then name = sa
if name not in ['z-index','opacity'] and
/^(-|\+)?(\d|\.)+$/.test value then value = value + 'px'
else value = whiteSpace value
styles[name] = value
style = ("#{k}:#{v}" for k, v of styles).join '; '
if styleOnly then return style
if style then attrs['style'] = style
attrs
expandStyle = (v) ->
s = ''
while parts = v.match(///^ ([^{]*) \{ ([^}]*) \} ([\s\S]*) $///)
[x, pfx, style, v] = parts
s += pfx + '{' + expandAttrs(style, true) + '}'
s + v
extendX = (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
# -------------------------------- main drykup code ------------------------------
class Drykup
constructor: (opts = {}) ->
@indent = opts.indent ? ''
@htmlOut = opts.htmlOut ? ''
@expand = opts.expand ? false
resetHtml: (html = '') -> @htmlOut = html
defineGlobalTagFuncs: -> if window then for name, func of @ then window[name] = func
addText: (s) -> if s then @htmlOut += @indent + s + '\n'; ''
attrStr: (obj) ->
attrstr = ''
for name, val of obj
if @expand and name is 'x' then attrstr += @attrStr expandAttrs val
else
vstr = val.toString()
q = (if vstr.indexOf('"') isnt -1 then "'" else '"')
attrstr += ' ' + name + '=' + q + vstr + q
attrstr
normalTag: (tagName, args) ->
attrstr = innertext = ''
func = null
for arg in args
switch typeof arg
when 'undefined','null' then continue
when 'string', 'number' then innertext = arg
when 'function' then func = arg
when 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@htmlOut += @indent + '<' + tagName + attrstr + '>'
if func and tagName isnt 'textarea'
@htmlOut += '\n'
@indent += ' '
@addText innertext
func?()
@indent = @indent[0..-3]
@addText '</' + tagName + '>'
else
@htmlOut += innertext + '</' + tagName + '>' + '\n'
selfClosingTag: (tagName, args) ->
attrstr = ''
for arg in args
if not arg? then continue
if typeof arg is 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@addText '<' + tagName + attrstr + ' />' + '\n'
styleFunc: (str) ->
if typeof str isnt 'string'
# console.log 'DryKup: invalid argument, tag style, ' + str.toString()
return
@addText '<style>' + (if @expand then expandStyle str else str)+'\n' + @indent + '</style>'
extendX: (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
drykup = (opts) ->
dk = new Drykup(opts)
dk.doctype = (type) -> dk.addText doctypes[type]
dk.text = (s) -> dk.addText s
dk.coffeescript = -> dk.addText 'coffeescript tag not implemented'
dk.style = (s) -> dk.styleFunc s
for tagName in merge_elements 'regular', 'obsolete'
do (tagName) ->
dk[tagName] = (args...) -> dk.normalTag tagName, args
for tagName in merge_elements 'void', 'obsolete_void'
do (tagName) ->
dk[tagName] = (args...) -> dk.selfClosingTag tagName, args
dk
drykup.extendX = extendX
if module?.exports
module.exports = drykup
else
window.drykup = drykup
| 191432 | ###
File: drykup.coffee
Author: <NAME>
DryCup is a CoffeeScript html generator compatible with CoffeeKup but without the magic.
DryKup is open-sourced via the MIT license.
See https://github.com/mark-hahn/drykup
###
# ------------------- Constants stolen from coffeeKup (Release 0.3.2) ---------------------
# ----------------------------- See KOFFEECUP-LICENSE file --------------------------------
# Values available to the `doctype` function inside a template.
# Ex.: `doctype 'strict'`
doctypes =
'default': '<!DOCTYPE html>'
'5': '<!DOCTYPE html>'
'xml': '<?xml version="1.0" encoding="utf-8" ?>'
'transitional': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">'
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">'
'1.1': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">'
'basic': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">'
'mobile': '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">'
'ce': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "ce-html-1.0-transitional.dtd">'
# Private HTML element reference.
# Please mind the gap (1 space at the beginning of each subsequent line).
elements =
# Valid HTML 5 elements requiring a closing tag.
# Note: the `var` element is out for obvious reasons, please use `tag 'var'`.
regular: 'a abbr address article aside audio b bdi bdo blockquote body button
canvas caption cite code colgroup datalist dd del details dfn div dl dt em
fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 head header hgroup
html i iframe ins kbd label legend li map mark menu meter nav noscript object
ol optgroup option output p pre progress q rp rt ruby s samp script section
select small span strong sub summary sup table tbody td textarea tfoot
th thead time title tr u ul video'
# Valid self-closing HTML 5 elements.
void: 'area base br col command embed hr img input keygen link meta param
source track wbr'
obsolete: 'applet acronym bgsound dir frameset noframes isindex listing
nextid noembed plaintext rb strike xmp big blink center font marquee multicol
nobr spacer tt'
obsolete_void: 'basefont frame'
# Create a unique list of element names merging the desired groups.
merge_elements = (args...) ->
result = []
for a in args
for element in elements[a].split ' '
result.push element unless element in result
result
# ------------------- constants for shorthand aliases -------------------------
attrAliases =
w:'width', h:'height', s:'src', src:'src', hr:'href', href:'href', c:'class', db:'data-bind'
i:'id', n:'name', v:'value', m:'method', t:'type'
cp:'cellpadding', b:'border', cs:'colspan', rs:'rowspan', chk:'checked', sel:'selected'
styleValueAliases =
'm:a':'margin:auto', 'ml:a':'margin-left:auto', 'mt:a':'margin-top:auto'
'mr:a':'margin-right:auto', 'mb:a':'margin-bottom:auto'
'c:b':'clear:both', 'f:l':'float:left', 'f:r':'float:right', 'f:n':'float:none'
'fw:b':'font-weight:bold', 'fw:n':'font-weight:normal'
'fs:i':'font-style:italic', 'fs:n':'font-style:normal'
'p:a':'position:absolute', 'p:r':'position:relative', 'p:f':'position:fixed'
'd:n':'display:none', 'd:b':'display:block', 'd:f':'display:fixed'
'ta:l':'text-align:left', 'ta:c':'text-align:center', 'ta:r':'text-align:right'
'o:a':'overflow:auto', 'o:h':'overflow:hidden'
'c:a':'cursor:auto', 'c:p':'cursor:pointer'
'tt:u':'text-transform:uppercase', 'tt:c':'text-transform:capitalize'
'tt:l':'text-transform:lowercase'
'td:n':'text-decoration:none', 'td:u':'text-decoration:underline'
'td:lt':'text-decoration: line-through', 'lh:n':'line-height:normal'
'b:n': 'border:none'
'b:1pxsb' :'border:1px solid black'
'bl:1pxsb':'border-left:1px solid black', 'bt:1pxsb':'border-top:1px solid black'
'br:1pxsb':'border-right:1px solid black', 'bb:1pxsb':'border-bottom:1px solid black'
'b:1pxsg' :'border:1px solid gray'
'bl:1pxsg':'border-left:1px solid gray', 'bt:1pxsg':'border-top:1px solid gray'
'br:1pxsg':'border-right:1px solid gray', 'bb:1pxsg':'border-bottom:1px solid gray'
'zi:a':'z-index:auto', 'zi:i':'z-index:inherit'
'v:v':'visibility:visible', 'v:h':'visibility:hidden'
styleAliases =
l:'left', t:'top', r:'right', w:'width', h:'height', c:'color', bc:'background-color', bot:'bottom'
fs:'font-size', lh:'line-height', zi:'z-index'
b:'border', bl:'border-left', bt:'border-top', br:'border-right', bb:'border-bottom'
m:'margin', ml:'margin-left', mt:'margin-top', mr:'margin-right', mb:'margin-bottom'
p:'padding', pl:'padding-left', pt:'padding-top', pr:'padding-right', pb:'padding-bottom'
v:'visibility', ff:'font-family'
# ---------------------------- alias shorthand expansion code ---------------------------------
whiteSpace = (str) ->
str.replace(/([^~])\+/g, '$1 ').replace(/~\+/g, '+')
expandAttrs = (v = '', styleOnly = false) ->
attrs = {}; styles = {}
v = v.replace /\s+/g, '~`~'
parts = v.split '~`~'
for part in parts
if not (thirds = ///^ ([^=:]*) (=|:) (.*) $///.exec part) then continue
[d, name, sep, value] = thirds
if not styleOnly and sep == '='
if name == 'in'
attrs.id = value
attrs.name = value
else
if (aa = attrAliases[name]) then name = aa
attrs[name] = whiteSpace value
if sep == ':'
if (sva = styleValueAliases[part]) then [name, value] = sva.split ':'
else
if (sa = styleAliases[name]) then name = sa
if name not in ['z-index','opacity'] and
/^(-|\+)?(\d|\.)+$/.test value then value = value + 'px'
else value = whiteSpace value
styles[name] = value
style = ("#{k}:#{v}" for k, v of styles).join '; '
if styleOnly then return style
if style then attrs['style'] = style
attrs
expandStyle = (v) ->
s = ''
while parts = v.match(///^ ([^{]*) \{ ([^}]*) \} ([\s\S]*) $///)
[x, pfx, style, v] = parts
s += pfx + '{' + expandAttrs(style, true) + '}'
s + v
extendX = (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
# -------------------------------- main drykup code ------------------------------
class Drykup
constructor: (opts = {}) ->
@indent = opts.indent ? ''
@htmlOut = opts.htmlOut ? ''
@expand = opts.expand ? false
resetHtml: (html = '') -> @htmlOut = html
defineGlobalTagFuncs: -> if window then for name, func of @ then window[name] = func
addText: (s) -> if s then @htmlOut += @indent + s + '\n'; ''
attrStr: (obj) ->
attrstr = ''
for name, val of obj
if @expand and name is 'x' then attrstr += @attrStr expandAttrs val
else
vstr = val.toString()
q = (if vstr.indexOf('"') isnt -1 then "'" else '"')
attrstr += ' ' + name + '=' + q + vstr + q
attrstr
normalTag: (tagName, args) ->
attrstr = innertext = ''
func = null
for arg in args
switch typeof arg
when 'undefined','null' then continue
when 'string', 'number' then innertext = arg
when 'function' then func = arg
when 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@htmlOut += @indent + '<' + tagName + attrstr + '>'
if func and tagName isnt 'textarea'
@htmlOut += '\n'
@indent += ' '
@addText innertext
func?()
@indent = @indent[0..-3]
@addText '</' + tagName + '>'
else
@htmlOut += innertext + '</' + tagName + '>' + '\n'
selfClosingTag: (tagName, args) ->
attrstr = ''
for arg in args
if not arg? then continue
if typeof arg is 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@addText '<' + tagName + attrstr + ' />' + '\n'
styleFunc: (str) ->
if typeof str isnt 'string'
# console.log 'DryKup: invalid argument, tag style, ' + str.toString()
return
@addText '<style>' + (if @expand then expandStyle str else str)+'\n' + @indent + '</style>'
extendX: (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
drykup = (opts) ->
dk = new Drykup(opts)
dk.doctype = (type) -> dk.addText doctypes[type]
dk.text = (s) -> dk.addText s
dk.coffeescript = -> dk.addText 'coffeescript tag not implemented'
dk.style = (s) -> dk.styleFunc s
for tagName in merge_elements 'regular', 'obsolete'
do (tagName) ->
dk[tagName] = (args...) -> dk.normalTag tagName, args
for tagName in merge_elements 'void', 'obsolete_void'
do (tagName) ->
dk[tagName] = (args...) -> dk.selfClosingTag tagName, args
dk
drykup.extendX = extendX
if module?.exports
module.exports = drykup
else
window.drykup = drykup
| true | ###
File: drykup.coffee
Author: PI:NAME:<NAME>END_PI
DryCup is a CoffeeScript html generator compatible with CoffeeKup but without the magic.
DryKup is open-sourced via the MIT license.
See https://github.com/mark-hahn/drykup
###
# ------------------- Constants stolen from coffeeKup (Release 0.3.2) ---------------------
# ----------------------------- See KOFFEECUP-LICENSE file --------------------------------
# Values available to the `doctype` function inside a template.
# Ex.: `doctype 'strict'`
doctypes =
'default': '<!DOCTYPE html>'
'5': '<!DOCTYPE html>'
'xml': '<?xml version="1.0" encoding="utf-8" ?>'
'transitional': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">'
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">'
'1.1': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">'
'basic': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">'
'mobile': '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">'
'ce': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "ce-html-1.0-transitional.dtd">'
# Private HTML element reference.
# Please mind the gap (1 space at the beginning of each subsequent line).
elements =
# Valid HTML 5 elements requiring a closing tag.
# Note: the `var` element is out for obvious reasons, please use `tag 'var'`.
regular: 'a abbr address article aside audio b bdi bdo blockquote body button
canvas caption cite code colgroup datalist dd del details dfn div dl dt em
fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 head header hgroup
html i iframe ins kbd label legend li map mark menu meter nav noscript object
ol optgroup option output p pre progress q rp rt ruby s samp script section
select small span strong sub summary sup table tbody td textarea tfoot
th thead time title tr u ul video'
# Valid self-closing HTML 5 elements.
void: 'area base br col command embed hr img input keygen link meta param
source track wbr'
obsolete: 'applet acronym bgsound dir frameset noframes isindex listing
nextid noembed plaintext rb strike xmp big blink center font marquee multicol
nobr spacer tt'
obsolete_void: 'basefont frame'
# Create a unique list of element names merging the desired groups.
merge_elements = (args...) ->
result = []
for a in args
for element in elements[a].split ' '
result.push element unless element in result
result
# ------------------- constants for shorthand aliases -------------------------
attrAliases =
w:'width', h:'height', s:'src', src:'src', hr:'href', href:'href', c:'class', db:'data-bind'
i:'id', n:'name', v:'value', m:'method', t:'type'
cp:'cellpadding', b:'border', cs:'colspan', rs:'rowspan', chk:'checked', sel:'selected'
styleValueAliases =
'm:a':'margin:auto', 'ml:a':'margin-left:auto', 'mt:a':'margin-top:auto'
'mr:a':'margin-right:auto', 'mb:a':'margin-bottom:auto'
'c:b':'clear:both', 'f:l':'float:left', 'f:r':'float:right', 'f:n':'float:none'
'fw:b':'font-weight:bold', 'fw:n':'font-weight:normal'
'fs:i':'font-style:italic', 'fs:n':'font-style:normal'
'p:a':'position:absolute', 'p:r':'position:relative', 'p:f':'position:fixed'
'd:n':'display:none', 'd:b':'display:block', 'd:f':'display:fixed'
'ta:l':'text-align:left', 'ta:c':'text-align:center', 'ta:r':'text-align:right'
'o:a':'overflow:auto', 'o:h':'overflow:hidden'
'c:a':'cursor:auto', 'c:p':'cursor:pointer'
'tt:u':'text-transform:uppercase', 'tt:c':'text-transform:capitalize'
'tt:l':'text-transform:lowercase'
'td:n':'text-decoration:none', 'td:u':'text-decoration:underline'
'td:lt':'text-decoration: line-through', 'lh:n':'line-height:normal'
'b:n': 'border:none'
'b:1pxsb' :'border:1px solid black'
'bl:1pxsb':'border-left:1px solid black', 'bt:1pxsb':'border-top:1px solid black'
'br:1pxsb':'border-right:1px solid black', 'bb:1pxsb':'border-bottom:1px solid black'
'b:1pxsg' :'border:1px solid gray'
'bl:1pxsg':'border-left:1px solid gray', 'bt:1pxsg':'border-top:1px solid gray'
'br:1pxsg':'border-right:1px solid gray', 'bb:1pxsg':'border-bottom:1px solid gray'
'zi:a':'z-index:auto', 'zi:i':'z-index:inherit'
'v:v':'visibility:visible', 'v:h':'visibility:hidden'
styleAliases =
l:'left', t:'top', r:'right', w:'width', h:'height', c:'color', bc:'background-color', bot:'bottom'
fs:'font-size', lh:'line-height', zi:'z-index'
b:'border', bl:'border-left', bt:'border-top', br:'border-right', bb:'border-bottom'
m:'margin', ml:'margin-left', mt:'margin-top', mr:'margin-right', mb:'margin-bottom'
p:'padding', pl:'padding-left', pt:'padding-top', pr:'padding-right', pb:'padding-bottom'
v:'visibility', ff:'font-family'
# ---------------------------- alias shorthand expansion code ---------------------------------
whiteSpace = (str) ->
str.replace(/([^~])\+/g, '$1 ').replace(/~\+/g, '+')
expandAttrs = (v = '', styleOnly = false) ->
attrs = {}; styles = {}
v = v.replace /\s+/g, '~`~'
parts = v.split '~`~'
for part in parts
if not (thirds = ///^ ([^=:]*) (=|:) (.*) $///.exec part) then continue
[d, name, sep, value] = thirds
if not styleOnly and sep == '='
if name == 'in'
attrs.id = value
attrs.name = value
else
if (aa = attrAliases[name]) then name = aa
attrs[name] = whiteSpace value
if sep == ':'
if (sva = styleValueAliases[part]) then [name, value] = sva.split ':'
else
if (sa = styleAliases[name]) then name = sa
if name not in ['z-index','opacity'] and
/^(-|\+)?(\d|\.)+$/.test value then value = value + 'px'
else value = whiteSpace value
styles[name] = value
style = ("#{k}:#{v}" for k, v of styles).join '; '
if styleOnly then return style
if style then attrs['style'] = style
attrs
expandStyle = (v) ->
s = ''
while parts = v.match(///^ ([^{]*) \{ ([^}]*) \} ([\s\S]*) $///)
[x, pfx, style, v] = parts
s += pfx + '{' + expandAttrs(style, true) + '}'
s + v
extendX = (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
# -------------------------------- main drykup code ------------------------------
class Drykup
constructor: (opts = {}) ->
@indent = opts.indent ? ''
@htmlOut = opts.htmlOut ? ''
@expand = opts.expand ? false
resetHtml: (html = '') -> @htmlOut = html
defineGlobalTagFuncs: -> if window then for name, func of @ then window[name] = func
addText: (s) -> if s then @htmlOut += @indent + s + '\n'; ''
attrStr: (obj) ->
attrstr = ''
for name, val of obj
if @expand and name is 'x' then attrstr += @attrStr expandAttrs val
else
vstr = val.toString()
q = (if vstr.indexOf('"') isnt -1 then "'" else '"')
attrstr += ' ' + name + '=' + q + vstr + q
attrstr
normalTag: (tagName, args) ->
attrstr = innertext = ''
func = null
for arg in args
switch typeof arg
when 'undefined','null' then continue
when 'string', 'number' then innertext = arg
when 'function' then func = arg
when 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@htmlOut += @indent + '<' + tagName + attrstr + '>'
if func and tagName isnt 'textarea'
@htmlOut += '\n'
@indent += ' '
@addText innertext
func?()
@indent = @indent[0..-3]
@addText '</' + tagName + '>'
else
@htmlOut += innertext + '</' + tagName + '>' + '\n'
selfClosingTag: (tagName, args) ->
attrstr = ''
for arg in args
if not arg? then continue
if typeof arg is 'object' then attrstr += @attrStr arg
else console.log 'DryKup: invalid argument, tag ' + tagName + ', ' + arg.toString()
@addText '<' + tagName + attrstr + ' />' + '\n'
styleFunc: (str) ->
if typeof str isnt 'string'
# console.log 'DryKup: invalid argument, tag style, ' + str.toString()
return
@addText '<style>' + (if @expand then expandStyle str else str)+'\n' + @indent + '</style>'
extendX: (newSpecStrs = {}, oldSpecStrs = {}) ->
for key, newSpecStr of newSpecStrs
if not (oldSpecStr = oldSpecStrs[key]) then oldSpecStrs[key] = newSpecStr; continue
specsObj = {}
addSpecStr = (specStr) ->
for spec in (specStr.replace(/\s+/g, '~`~').split '~`~')
if not (specParts = ///^ ([^=:]*(=|:)) (.*) $///.exec spec) then continue
specsObj[specParts[1]] = specParts[3]
null
addSpecStr oldSpecStr
addSpecStr newSpecStr
oldSpecStrs[key] = (nameSep + val for nameSep, val of specsObj).join ' '
oldSpecStrs
drykup = (opts) ->
dk = new Drykup(opts)
dk.doctype = (type) -> dk.addText doctypes[type]
dk.text = (s) -> dk.addText s
dk.coffeescript = -> dk.addText 'coffeescript tag not implemented'
dk.style = (s) -> dk.styleFunc s
for tagName in merge_elements 'regular', 'obsolete'
do (tagName) ->
dk[tagName] = (args...) -> dk.normalTag tagName, args
for tagName in merge_elements 'void', 'obsolete_void'
do (tagName) ->
dk[tagName] = (args...) -> dk.selfClosingTag tagName, args
dk
drykup.extendX = extendX
if module?.exports
module.exports = drykup
else
window.drykup = drykup
|
[
{
"context": " expect(domainEvent.payload.name).to.be.equal 'John'\n expect(domainEvent.payload.email).to.be.",
"end": 7639,
"score": 0.9994611740112305,
"start": 7635,
"tag": "NAME",
"value": "John"
},
{
"context": " expect(domainEvent.payload.email).to.be.equal 'john@ex... | src/context/command.feature_spec.coffee | efacilitation/eventric | 150 | describe 'Command Feature', ->
it 'should reject with a descriptive error given the context was not initialized yet', ->
someContext = eventric.context 'ExampleContext'
someContext.command 'DoSomething'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'ExampleContext'
expect(error.message).to.contain 'DoSomething'
it 'should reject with a command not found error given the command has no registered handler', ->
someContext = eventric.context 'Examplecontext'
someContext.initialize()
.then ->
someContext.command 'DoSomething',
id: 42
foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceof Error
it 'should call the command handler with the passed in params given the command has a registered handler', ->
someContext = eventric.context 'Examplecontext'
commandHandlerStub = sandbox.stub()
someContext.addCommandHandlers DoSomething: commandHandlerStub
params =
id: 42
foo: 'bar'
someContext.initialize()
.then ->
someContext.command 'DoSomething', params
.then ->
expect(commandHandlerStub).to.have.been.calledWith params
describe 'given a created and initialized example context including an aggregate', ->
exampleContext = null
beforeEach ->
exampleContext = eventric.context 'exampleContext'
# Domain Events
exampleContext.defineDomainEvent 'ExampleCreated', ->
exampleContext.defineDomainEvent 'SomethingHappened', (params) ->
@someId = params.someId
@someProperty = params.someProperty
class ExampleAggregate
create: ->
@$emitDomainEvent 'ExampleCreated'
doSomething: (someId, someProperty) ->
@$emitDomainEvent 'SomethingHappened',
someId: someId
someProperty: someProperty
handleSomethingHappened: (domainEvent) ->
@someId = domainEvent.payload.someId
@someProperty = domainEvent.payload.someProperty
exampleContext.addAggregate 'Example', ExampleAggregate
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example'
.then (example) ->
example.$save()
DoSomething: (params) ->
@$aggregate.load 'Example', params.aggregateId
.then (example) ->
example.doSomething params.someId, params.someProperty
example.$save()
exampleContext.initialize()
it 'should trigger the correct domain event given one command is sent to the context', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload.someId).to.equal 'some-id'
expect(domainEvent.payload.someProperty).to.equal 'some-property'
expect(domainEvent.name).to.equal 'SomethingHappened'
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it 'should execute all commands as expected given multiple commands are sent to the context', (done) ->
commandCount = 0
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
commandCount++
if commandCount == 2
expect(commandCount).to.equal 2
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it '[bugfix] should return the correct payload given an array at the domain event definition', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload).to.deep.equal
someId: 'some-id'
someProperty: ['value-1', 'value-2']
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: ['value-1', 'value-2']
.catch done
return
describe 'given a command handler rejects with an error', ->
dummyError = null
beforeEach ->
dummyError = new Error 'dummy error'
it 'should re-throw an error with a descriptive message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should re-throw an error with a descriptive message given the command handler throws a synchronous error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should make it possible to access the original error message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.originalErrorMessage).to.equal 'dummy error'
it 'should throw a generic error given the command handler rejects without an error', ->
exampleContext.addCommandHandlers
CommandWhichRejectsWithoutAnError: (params) ->
new Promise (resolve, reject) ->
reject()
exampleContext.command 'CommandWhichRejectsWithoutAnError', foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWhichRejectsWithoutAnError'
expect(error.message).to.contain '{"foo":"bar"}'
describe 'creating an aggregate', ->
it 'should emit the create domain event after creating an aggregate', (done) ->
exampleContext = eventric.context 'Examplecontext'
exampleContext.defineDomainEvent 'ExampleCreated', (params) ->
@name = params.name
@email = params.email
class Example
create: (params) ->
@$emitDomainEvent 'ExampleCreated', params
exampleContext.addAggregate 'Example', Example
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example', params
.then (example) ->
example.$save()
exampleContext.subscribeToDomainEvent 'ExampleCreated', (domainEvent) ->
expect(domainEvent.payload.name).to.be.equal 'John'
expect(domainEvent.payload.email).to.be.equal 'john@example.com'
done()
exampleContext.initialize()
.then ->
exampleContext.command 'CreateExample',
name: 'John'
email: 'john@example.com'
.catch done
return
| 109329 | describe 'Command Feature', ->
it 'should reject with a descriptive error given the context was not initialized yet', ->
someContext = eventric.context 'ExampleContext'
someContext.command 'DoSomething'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'ExampleContext'
expect(error.message).to.contain 'DoSomething'
it 'should reject with a command not found error given the command has no registered handler', ->
someContext = eventric.context 'Examplecontext'
someContext.initialize()
.then ->
someContext.command 'DoSomething',
id: 42
foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceof Error
it 'should call the command handler with the passed in params given the command has a registered handler', ->
someContext = eventric.context 'Examplecontext'
commandHandlerStub = sandbox.stub()
someContext.addCommandHandlers DoSomething: commandHandlerStub
params =
id: 42
foo: 'bar'
someContext.initialize()
.then ->
someContext.command 'DoSomething', params
.then ->
expect(commandHandlerStub).to.have.been.calledWith params
describe 'given a created and initialized example context including an aggregate', ->
exampleContext = null
beforeEach ->
exampleContext = eventric.context 'exampleContext'
# Domain Events
exampleContext.defineDomainEvent 'ExampleCreated', ->
exampleContext.defineDomainEvent 'SomethingHappened', (params) ->
@someId = params.someId
@someProperty = params.someProperty
class ExampleAggregate
create: ->
@$emitDomainEvent 'ExampleCreated'
doSomething: (someId, someProperty) ->
@$emitDomainEvent 'SomethingHappened',
someId: someId
someProperty: someProperty
handleSomethingHappened: (domainEvent) ->
@someId = domainEvent.payload.someId
@someProperty = domainEvent.payload.someProperty
exampleContext.addAggregate 'Example', ExampleAggregate
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example'
.then (example) ->
example.$save()
DoSomething: (params) ->
@$aggregate.load 'Example', params.aggregateId
.then (example) ->
example.doSomething params.someId, params.someProperty
example.$save()
exampleContext.initialize()
it 'should trigger the correct domain event given one command is sent to the context', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload.someId).to.equal 'some-id'
expect(domainEvent.payload.someProperty).to.equal 'some-property'
expect(domainEvent.name).to.equal 'SomethingHappened'
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it 'should execute all commands as expected given multiple commands are sent to the context', (done) ->
commandCount = 0
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
commandCount++
if commandCount == 2
expect(commandCount).to.equal 2
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it '[bugfix] should return the correct payload given an array at the domain event definition', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload).to.deep.equal
someId: 'some-id'
someProperty: ['value-1', 'value-2']
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: ['value-1', 'value-2']
.catch done
return
describe 'given a command handler rejects with an error', ->
dummyError = null
beforeEach ->
dummyError = new Error 'dummy error'
it 'should re-throw an error with a descriptive message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should re-throw an error with a descriptive message given the command handler throws a synchronous error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should make it possible to access the original error message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.originalErrorMessage).to.equal 'dummy error'
it 'should throw a generic error given the command handler rejects without an error', ->
exampleContext.addCommandHandlers
CommandWhichRejectsWithoutAnError: (params) ->
new Promise (resolve, reject) ->
reject()
exampleContext.command 'CommandWhichRejectsWithoutAnError', foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWhichRejectsWithoutAnError'
expect(error.message).to.contain '{"foo":"bar"}'
describe 'creating an aggregate', ->
it 'should emit the create domain event after creating an aggregate', (done) ->
exampleContext = eventric.context 'Examplecontext'
exampleContext.defineDomainEvent 'ExampleCreated', (params) ->
@name = params.name
@email = params.email
class Example
create: (params) ->
@$emitDomainEvent 'ExampleCreated', params
exampleContext.addAggregate 'Example', Example
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example', params
.then (example) ->
example.$save()
exampleContext.subscribeToDomainEvent 'ExampleCreated', (domainEvent) ->
expect(domainEvent.payload.name).to.be.equal '<NAME>'
expect(domainEvent.payload.email).to.be.equal '<EMAIL>'
done()
exampleContext.initialize()
.then ->
exampleContext.command 'CreateExample',
name: '<NAME>'
email: '<EMAIL>'
.catch done
return
| true | describe 'Command Feature', ->
it 'should reject with a descriptive error given the context was not initialized yet', ->
someContext = eventric.context 'ExampleContext'
someContext.command 'DoSomething'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'ExampleContext'
expect(error.message).to.contain 'DoSomething'
it 'should reject with a command not found error given the command has no registered handler', ->
someContext = eventric.context 'Examplecontext'
someContext.initialize()
.then ->
someContext.command 'DoSomething',
id: 42
foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceof Error
it 'should call the command handler with the passed in params given the command has a registered handler', ->
someContext = eventric.context 'Examplecontext'
commandHandlerStub = sandbox.stub()
someContext.addCommandHandlers DoSomething: commandHandlerStub
params =
id: 42
foo: 'bar'
someContext.initialize()
.then ->
someContext.command 'DoSomething', params
.then ->
expect(commandHandlerStub).to.have.been.calledWith params
describe 'given a created and initialized example context including an aggregate', ->
exampleContext = null
beforeEach ->
exampleContext = eventric.context 'exampleContext'
# Domain Events
exampleContext.defineDomainEvent 'ExampleCreated', ->
exampleContext.defineDomainEvent 'SomethingHappened', (params) ->
@someId = params.someId
@someProperty = params.someProperty
class ExampleAggregate
create: ->
@$emitDomainEvent 'ExampleCreated'
doSomething: (someId, someProperty) ->
@$emitDomainEvent 'SomethingHappened',
someId: someId
someProperty: someProperty
handleSomethingHappened: (domainEvent) ->
@someId = domainEvent.payload.someId
@someProperty = domainEvent.payload.someProperty
exampleContext.addAggregate 'Example', ExampleAggregate
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example'
.then (example) ->
example.$save()
DoSomething: (params) ->
@$aggregate.load 'Example', params.aggregateId
.then (example) ->
example.doSomething params.someId, params.someProperty
example.$save()
exampleContext.initialize()
it 'should trigger the correct domain event given one command is sent to the context', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload.someId).to.equal 'some-id'
expect(domainEvent.payload.someProperty).to.equal 'some-property'
expect(domainEvent.name).to.equal 'SomethingHappened'
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it 'should execute all commands as expected given multiple commands are sent to the context', (done) ->
commandCount = 0
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
commandCount++
if commandCount == 2
expect(commandCount).to.equal 2
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: 'some-property'
.catch done
return
it '[bugfix] should return the correct payload given an array at the domain event definition', (done) ->
exampleContext.subscribeToDomainEvent 'SomethingHappened', (domainEvent) ->
expect(domainEvent.payload).to.deep.equal
someId: 'some-id'
someProperty: ['value-1', 'value-2']
done()
exampleContext.command 'CreateExample'
.then (exampleId) ->
exampleContext.command 'DoSomething',
aggregateId: exampleId
someId: 'some-id'
someProperty: ['value-1', 'value-2']
.catch done
return
describe 'given a command handler rejects with an error', ->
dummyError = null
beforeEach ->
dummyError = new Error 'dummy error'
it 'should re-throw an error with a descriptive message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should re-throw an error with a descriptive message given the command handler throws a synchronous error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWithError'
expect(error.message).to.contain '{"foo":"bar"}'
it 'should make it possible to access the original error message given the command handler triggers an error', ->
exampleContext.addCommandHandlers
CommandWithError: (params) ->
new Promise ->
throw dummyError
exampleContext.command 'CommandWithError', foo: 'bar'
.catch (error) ->
expect(error).to.equal dummyError
expect(error.originalErrorMessage).to.equal 'dummy error'
it 'should throw a generic error given the command handler rejects without an error', ->
exampleContext.addCommandHandlers
CommandWhichRejectsWithoutAnError: (params) ->
new Promise (resolve, reject) ->
reject()
exampleContext.command 'CommandWhichRejectsWithoutAnError', foo: 'bar'
.catch (error) ->
expect(error).to.be.an.instanceOf Error
expect(error.message).to.contain 'exampleContext'
expect(error.message).to.contain 'CommandWhichRejectsWithoutAnError'
expect(error.message).to.contain '{"foo":"bar"}'
describe 'creating an aggregate', ->
it 'should emit the create domain event after creating an aggregate', (done) ->
exampleContext = eventric.context 'Examplecontext'
exampleContext.defineDomainEvent 'ExampleCreated', (params) ->
@name = params.name
@email = params.email
class Example
create: (params) ->
@$emitDomainEvent 'ExampleCreated', params
exampleContext.addAggregate 'Example', Example
exampleContext.addCommandHandlers
CreateExample: (params) ->
@$aggregate.create 'Example', params
.then (example) ->
example.$save()
exampleContext.subscribeToDomainEvent 'ExampleCreated', (domainEvent) ->
expect(domainEvent.payload.name).to.be.equal 'PI:NAME:<NAME>END_PI'
expect(domainEvent.payload.email).to.be.equal 'PI:EMAIL:<EMAIL>END_PI'
done()
exampleContext.initialize()
.then ->
exampleContext.command 'CreateExample',
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
.catch done
return
|
[
{
"context": "n of birds (whooping cranes).\n\nhttps://github.com/NicMcPhee/whooping-crane-model\n\nCopyright (c) 2015 Nic McPh",
"end": 90,
"score": 0.99922114610672,
"start": 81,
"tag": "USERNAME",
"value": "NicMcPhee"
},
{
"context": "NicMcPhee/whooping-crane-model\n\nCopyright (c... | src/lib/population.coffee | NicMcPhee/whooping-crane-model | 0 | ###
Basic model of a population of birds (whooping cranes).
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 Nic McPhee
Licensed under the MIT license.
###
'use strict'
ModelParameters = require './model_parameters'
Bird = require './bird'
# Move shuffle, chunk to a util file
# From https://gist.github.com/ddgromit/859699
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
# From http://bl.ocks.org/milafrerichs/7301183
chunk = (array, chunkSize) ->
[].concat.apply [], array.map((elem, i) ->
(if i % chunkSize then [] else [array.slice(i, i + chunkSize)])
)
class Population
constructor: (popSize, proportionEarlyNesters = 0.5) ->
@_unpairedBirds =
@makeBird(proportionEarlyNesters) for [0...popSize]
@_pairings = []
makeBird: (proportionEarlyNesters) ->
bird = new Bird(@nestingPreference(proportionEarlyNesters))
bird.rollBackBirthYear()
bird
nestingPreference: (proportionEarlyNesters) ->
if Math.random() < proportionEarlyNesters
Bird.EARLY
else
Bird.LATE
addBird: (bird) ->
bird ?= new Bird()
@_unpairedBirds.push(bird)
birds: -> @_unpairedBirds.concat([].concat.apply([], @_pairings))
unpairedBirds: -> @_unpairedBirds
matingPairs: -> @_pairings
size: -> @_unpairedBirds.length + 2*@_pairings.length
proportionLateNesters: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isLate()).length / @size()
proportionWildBorn: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isWild()).length / @size()
mateUnpairedBirds: ->
toMate = @_unpairedBirds.filter((b) -> b.canMate())
if toMate.length % 2 == 1
toMate = toMate[1..]
shuffle(toMate)
@_unpairedBirds = @_unpairedBirds.filter((b) -> not (b in toMate))
@_pairings = @_pairings.concat(chunk(toMate, 2))
mortalityPass: ->
@_unpairedBirds =
@_unpairedBirds.filter((b) -> b.survives())
survivingPairs = []
for pair in @_pairings
survivors = pair.filter((b) -> b.survives())
if survivors.length == 2
survivingPairs.push(pair)
else if survivors.length == 1
@_unpairedBirds.push(survivors[0])
@_pairings = survivingPairs
capToCarryingCapacity: ->
@mortalityPass() while @size() > ModelParameters.carryingCapacity
module.exports = Population
| 205167 | ###
Basic model of a population of birds (whooping cranes).
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 <NAME>
Licensed under the MIT license.
###
'use strict'
ModelParameters = require './model_parameters'
Bird = require './bird'
# Move shuffle, chunk to a util file
# From https://gist.github.com/ddgromit/859699
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
# From http://bl.ocks.org/milafrerichs/7301183
chunk = (array, chunkSize) ->
[].concat.apply [], array.map((elem, i) ->
(if i % chunkSize then [] else [array.slice(i, i + chunkSize)])
)
class Population
constructor: (popSize, proportionEarlyNesters = 0.5) ->
@_unpairedBirds =
@makeBird(proportionEarlyNesters) for [0...popSize]
@_pairings = []
makeBird: (proportionEarlyNesters) ->
bird = new Bird(@nestingPreference(proportionEarlyNesters))
bird.rollBackBirthYear()
bird
nestingPreference: (proportionEarlyNesters) ->
if Math.random() < proportionEarlyNesters
Bird.EARLY
else
Bird.LATE
addBird: (bird) ->
bird ?= new Bird()
@_unpairedBirds.push(bird)
birds: -> @_unpairedBirds.concat([].concat.apply([], @_pairings))
unpairedBirds: -> @_unpairedBirds
matingPairs: -> @_pairings
size: -> @_unpairedBirds.length + 2*@_pairings.length
proportionLateNesters: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isLate()).length / @size()
proportionWildBorn: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isWild()).length / @size()
mateUnpairedBirds: ->
toMate = @_unpairedBirds.filter((b) -> b.canMate())
if toMate.length % 2 == 1
toMate = toMate[1..]
shuffle(toMate)
@_unpairedBirds = @_unpairedBirds.filter((b) -> not (b in toMate))
@_pairings = @_pairings.concat(chunk(toMate, 2))
mortalityPass: ->
@_unpairedBirds =
@_unpairedBirds.filter((b) -> b.survives())
survivingPairs = []
for pair in @_pairings
survivors = pair.filter((b) -> b.survives())
if survivors.length == 2
survivingPairs.push(pair)
else if survivors.length == 1
@_unpairedBirds.push(survivors[0])
@_pairings = survivingPairs
capToCarryingCapacity: ->
@mortalityPass() while @size() > ModelParameters.carryingCapacity
module.exports = Population
| true | ###
Basic model of a population of birds (whooping cranes).
https://github.com/NicMcPhee/whooping-crane-model
Copyright (c) 2015 PI:NAME:<NAME>END_PI
Licensed under the MIT license.
###
'use strict'
ModelParameters = require './model_parameters'
Bird = require './bird'
# Move shuffle, chunk to a util file
# From https://gist.github.com/ddgromit/859699
shuffle = (a) ->
i = a.length
while --i > 0
j = ~~(Math.random() * (i + 1))
t = a[j]
a[j] = a[i]
a[i] = t
a
# From http://bl.ocks.org/milafrerichs/7301183
chunk = (array, chunkSize) ->
[].concat.apply [], array.map((elem, i) ->
(if i % chunkSize then [] else [array.slice(i, i + chunkSize)])
)
class Population
constructor: (popSize, proportionEarlyNesters = 0.5) ->
@_unpairedBirds =
@makeBird(proportionEarlyNesters) for [0...popSize]
@_pairings = []
makeBird: (proportionEarlyNesters) ->
bird = new Bird(@nestingPreference(proportionEarlyNesters))
bird.rollBackBirthYear()
bird
nestingPreference: (proportionEarlyNesters) ->
if Math.random() < proportionEarlyNesters
Bird.EARLY
else
Bird.LATE
addBird: (bird) ->
bird ?= new Bird()
@_unpairedBirds.push(bird)
birds: -> @_unpairedBirds.concat([].concat.apply([], @_pairings))
unpairedBirds: -> @_unpairedBirds
matingPairs: -> @_pairings
size: -> @_unpairedBirds.length + 2*@_pairings.length
proportionLateNesters: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isLate()).length / @size()
proportionWildBorn: ->
return 0 if @size() == 0
@birds().filter((b) -> b.isWild()).length / @size()
mateUnpairedBirds: ->
toMate = @_unpairedBirds.filter((b) -> b.canMate())
if toMate.length % 2 == 1
toMate = toMate[1..]
shuffle(toMate)
@_unpairedBirds = @_unpairedBirds.filter((b) -> not (b in toMate))
@_pairings = @_pairings.concat(chunk(toMate, 2))
mortalityPass: ->
@_unpairedBirds =
@_unpairedBirds.filter((b) -> b.survives())
survivingPairs = []
for pair in @_pairings
survivors = pair.filter((b) -> b.survives())
if survivors.length == 2
survivingPairs.push(pair)
else if survivors.length == 1
@_unpairedBirds.push(survivors[0])
@_pairings = survivingPairs
capToCarryingCapacity: ->
@mortalityPass() while @size() > ModelParameters.carryingCapacity
module.exports = Population
|
[
{
"context": " @_super()\n @labelPathDidChange()\n\n # TODO(Peter): This is a hack. Some computed don't fire proper",
"end": 1354,
"score": 0.9882214069366455,
"start": 1349,
"tag": "NAME",
"value": "Peter"
},
{
"context": " Ember.run this, @updateDropdownLayout\n\n # TODO(P... | src/select.coffee | eflexsystems/ember-widgets | 0 | get = (object, key) ->
return undefined unless object
return object unless key
object.get?(key) or object[key]
set = (object, key, value) ->
return unless object and key
object.set?(key, value) or object[key] = value;
# The view for each item in the select.
Ember.Widgets.SelectOptionView = Ember.ListItemView.extend
tagName: 'li'
templateName: 'select_item'
layoutName: 'select_item_layout'
classNames: 'ember-select-result-item'
classNameBindings: ['content.isGroupOption:ember-select-group',
'isHighlighted:highlighted']
labelPath: Ember.computed.alias 'controller.optionLabelPath'
isHighlighted: Ember.computed ->
@get('controller.highlighted') is @get('content')
.property 'controller.highlighted', 'content'
labelPathDidChange: Ember.observer ->
labelPath = @get 'labelPath'
# if it is a raw string, the path is just the context
# if labelPath is specified, the path should be context.labelPath
path = if labelPath then "content.#{labelPath}" else 'content'
# We are creating a computed property called label that is an alias of
# 'context.#{labelPath}'
Ember.defineProperty(this, 'label', Ember.computed.alias(path))
@notifyPropertyChange 'label'
, 'content', 'labelPath'
didInsertElement: ->
@_super()
@labelPathDidChange()
# TODO(Peter): This is a hack. Some computed don't fire properly if
# they are dependent on the context. e.g. isHighlighted may not update
# if it is dependent on the context. This seems to fix the issue
updateContext: (context) ->
@_super context
@set 'content', context
click: ->
return if @get('content.isGroupOption')
@set 'controller.selection', @get('content')
@get('controller').userDidSelect @get 'content'
# if there's a selection and the dropdown is unexpanded, we want to
# propagate the click event
# if the dropdown is expanded and we select something, don't propagate
if @get('controller.showDropdown')
@get('controller').send 'hideDropdown'
# return false to prevent propagation
return no
mouseEnter: ->
return if @get('content.isGroupOption')
@set 'controller.highlighted', @get('content')
Ember.Widgets.SelectComponent =
Ember.Component.extend Ember.Widgets.BodyEventListener,
Ember.AddeparMixins.ResizeHandlerMixin,
layoutName: 'select'
classNames: 'ember-select'
attributeBindings: ['tabindex']
classNameBindings: ['showDropdown:open', 'isDropup:dropup']
itemViewClass: 'Ember.Widgets.SelectOptionView'
prompt: 'Select a Value'
disabled: no
# we need to set tabindex so that div responds to key events
highlightedIndex: -1
tabindex: -1
showDropdown: no
dropdownHeight: 300
# Important: rowHeight must be synched with the CSS
rowHeight: 26
# Option to indicate whether we should sort the labels
sortLabels: yes
# If isSelect is true, we will not show the search box
isSelect: no
# Align dropdown-menu above the button
isDropup: no
# Align dropdown-menu to the right of the button
isDropdownMenuPulledRight: no
# Change the icon when necessary
dropdownToggleIcon: 'fa fa-caret-down'
# Change the button class when necessary
buttonClass: 'btn btn-default'
dropdownMenuClass: ''
# The list of options
content: []
selection: null
query: ''
optionLabelPath: ''
optionValuePath: ''
optionGroupPath: ''
optionDefaultPath: ''
# This augments the dropdown to provide a place for adding a select menu that
# possibly says 'create item' or something along that line
selectMenuView: null
updateDropdownLayout: Ember.observer ->
return if @get('state') isnt 'inDOM' or @get('showDropdown') is no
# Render the dropdown in a hidden state to get the size
@$('.js-dropdown-menu').css('visibility', 'hidden');
# Render the dropdown completely into the DOM for offset()
Ember.run.next this, ->
dropdownButton = @$('.js-dropdown-toggle')[0]
dropdownButtonHeight = @$(dropdownButton).outerHeight()
dropdownButtonOffset = @$(dropdownButton).offset()
dropdownMenu = @$('.js-dropdown-menu')[0]
dropdownMenuHeight = @$(dropdownMenu).outerHeight()
dropdownMenuWidth = @$(dropdownMenu).outerWidth()
dropdownMenuOffset = @$(dropdownMenu).offset()
# Only switch from dropUp to dropDown if there's this much extra space
# under where the dropDown would be. This prevents the popup from jiggling
# up and down
dropdownMargin = 15
if @get('isDropup')
dropdownMenuBottom = dropdownButtonOffset.top + dropdownButtonHeight +
dropdownMenuHeight + dropdownMargin
else
dropdownMenuBottom = dropdownMenuOffset.top + dropdownMenuHeight
@set 'isDropup', dropdownMenuBottom > window.innerHeight
@set 'isDropdownMenuPulledRight', dropdownButtonOffset.left +
dropdownMenuWidth + dropdownMargin > window.innerWidth
@$('.js-dropdown-menu').css('visibility', 'visible');
return
, 'showDropdown', 'window.innerHeight'
onResizeEnd: ->
# We need to put this on the run loop, because the resize event came from
# the window. Otherwise, we get a warning when used in the tests. You have
# turned on testing mode, which disables the run-loop's autorun. You
# will need to wrap any code with asynchronous side-effects in an Ember.run
Ember.run this, @updateDropdownLayout
# TODO(Peter): consider calling this optionViewClass?
itemView: Ember.computed ->
itemViewClass = @get 'itemViewClass'
if typeof itemViewClass is 'string'
return Ember.get itemViewClass
itemViewClass
.property 'itemViewClass'
# TODO(Peter): consider calling this selectedOptionViewClass?
selectedItemView: Ember.computed ->
@get('itemView').extend
tagName: 'span'
labelPath: Ember.computed.alias 'controller.optionLabelPath'
context: Ember.computed.alias 'controller.selection'
.property 'itemView'
selectedLabel: Ember.computed ->
get @get('selection'), @get('optionLabelPath')
.property 'selection', 'optionLabelPath'
searchView: Ember.TextField.extend
placeholder: 'Search'
valueBinding: 'parentView.query'
# we want to focus on search input when dropdown is opened. We need to put
# this in a run loop to wait for the event that triggers the showDropdown
# to finishes before trying to focus the input. Otherwise, focus when be
# "stolen" from us.
showDropdownDidChange: Ember.observer ->
Ember.run.next this, -> @$().focus() if @get('state') is 'inDOM'
, 'parentView.showDropdown'
# This is a hack. Ember.ListView doesn't handle case when total height
# is less than height properly
listView: Ember.ListView.extend
style: Ember.computed ->
height = Math.min @get('height'), @get('totalHeight')
"height: #{height}px"
.property('height', 'totalHeight'),
# the list of content that is filtered down based on the query entered
# in the textbox
filteredContent: Ember.computed ->
content = @get 'content'
query = @get 'query'
return [] unless content
filteredContent = @get('content').filter (item) => @matcher(query, item)
return filteredContent unless @get('sortLabels')
_.sortBy filteredContent, (item) => get(item, @get('optionLabelPath'))?.toLowerCase()
.property 'content.@each', 'query', 'optionLabelPath', 'sortLabels'
# the list of content that is grouped by the content in the optionGroupPath
# e.g. {name: 'Addepar', location: 'Mountain View'}
# {name: 'Google', location: 'Mountain View'}
# if we group by location we will get
# Mountain View
# Addepar
# Google
groupedContent: Ember.computed ->
path = @get 'optionGroupPath'
content = @get 'filteredContent'
return content unless path
groups = _.groupBy content, (item) -> get(item, path)
result = Ember.A()
_.keys(groups).sort().forEach (key) ->
result.pushObject Ember.Object.create isGroupOption: yes, name:key
result.pushObjects groups[key]
result
.property 'filteredContent', 'optionGroupPath'
hasNoResults: Ember.computed.empty 'filteredContent'
value: Ember.computed (key, value) ->
if arguments.length is 2 # setter
valuePath = @get 'optionValuePath'
selection = value
selection = @get('content').findProperty(valuePath, value) if valuePath
@set 'selection', selection
value
else # getter
valuePath = @get 'optionValuePath'
selection = @get 'selection'
if valuePath then get(selection, valuePath) else selection
.property 'selection'
didInsertElement: ->
@_super()
@setDefaultSelection()
# It matches the item label with the query. This can be overrideen for better
matcher: (searchText, item) ->
return yes unless searchText
label = get(item, @get('optionLabelPath'))
escapedSearchText = searchText.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
regex = new RegExp(escapedSearchText, 'i')
regex.test(label)
# TODO(Peter): This needs to be rethought
setDefaultSelection: Ember.observer ->
# do not set default selection if selection is defined
return if @get 'selection'
content = @get 'content'
defaultPath = @get 'optionDefaultPath'
return unless content and defaultPath
@set 'selection', content.findProperty(defaultPath)
, 'content.@each'
selectableOptionsDidChange: Ember.observer ->
highlighted = @get('highlighted')
if not @get('selectableOptions').contains(highlighted)
@set 'highlighted', @get('selectableOptions.firstObject')
, 'selectableOptions'
###
# SELECTION RELATED
###
KEY_EVENTS:
8: 'deletePressed'
27: 'escapePressed'
13: 'enterPressed'
38: 'upArrowPressed'
40: 'downArrowPressed'
# All the selectable options - namely everything except for the non-group
# options that are artificially created.
selectableOptions: Ember.computed ->
(@get('groupedContent') or []).filter (item) ->
not get(item, 'isGroupOption')
.property 'groupedContent'
# The option that is currently highlighted.
highlighted: Ember.computed (key, value) ->
content = @get('selectableOptions') or []
value = value or []
if arguments.length is 1 # getter
index = @get 'highlightedIndex'
value = content.objectAt index
else # setter
index = content.indexOf value
@setHighlightedIndex index, yes
value
.property 'selectableOptions', 'highlightedIndex'
bodyClick: -> @send 'hideDropdown'
keyDown: (event) ->
# show dropdown if dropdown is not already showing
return @set('showDropdown', yes) unless @get 'showDropdown'
map = @get 'KEY_EVENTS'
method = map[event.keyCode]
@get(method)?.apply(this, arguments) if method
deletePressed: Ember.K
escapePressed: (event) ->
@send 'hideDropdown'
enterPressed: (event) ->
item = @get 'highlighted'
@set 'selection', item unless Ember.isEmpty(item)
@userDidSelect(item) unless Ember.isEmpty(item)
# in case dropdown doesn't close
@send 'hideDropdown'
# TODO(Peter): HACK the web app somehow reloads when enter is pressed.
event.preventDefault()
upArrowPressed: (event) ->
sel = @get 'highlightedIndex'
index = if event.ctrlKey or event.metaKey then 0 else sel - 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing up arrow
event.preventDefault()
downArrowPressed: (event) ->
sel = @get 'highlightedIndex'
clen = @get 'selectableOptions.length'
index = if event.ctrlKey or event.metaKey then clen - 1 else sel + 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing down arrow
event.preventDefault()
setHighlightedIndex: (index, ensureVisible) ->
return unless @ensureIndex index
@set 'highlightedIndex', index
@ensureVisible index if ensureVisible
ensureIndex: (index) ->
clen = @get 'selectableOptions.length'
index >= 0 and index < clen
# Scroll the list to make sure the given index is visible.
ensureVisible: (index) ->
$listView = @$('.ember-list-view')
listView = Ember.View.views[$listView.attr('id')]
startIndex = listView._startingIndex()
numRows = listView._childViewCount() - 1
endIndex = startIndex + numRows
item = @get('selectableOptions').objectAt(index)
newIndex = @get('groupedContent').indexOf(item)
if index is 0
$listView.scrollTop 0
else if newIndex < startIndex
$listView.scrollTop newIndex * @get('rowHeight')
else if newIndex >= endIndex
$listView.scrollTop (newIndex - numRows + 1.5) * @get('rowHeight')
#TODO Refactor other parts to use this method to set selection
userDidSelect: (selection) ->
@sendAction 'userSelected', selection
actions:
toggleDropdown: (event) ->
return if @get('disabled')
@toggleProperty 'showDropdown'
hideDropdown: (event) ->
@set 'showDropdown', no
Ember.Handlebars.helper('select-component', Ember.Widgets.SelectComponent)
| 181479 | get = (object, key) ->
return undefined unless object
return object unless key
object.get?(key) or object[key]
set = (object, key, value) ->
return unless object and key
object.set?(key, value) or object[key] = value;
# The view for each item in the select.
Ember.Widgets.SelectOptionView = Ember.ListItemView.extend
tagName: 'li'
templateName: 'select_item'
layoutName: 'select_item_layout'
classNames: 'ember-select-result-item'
classNameBindings: ['content.isGroupOption:ember-select-group',
'isHighlighted:highlighted']
labelPath: Ember.computed.alias 'controller.optionLabelPath'
isHighlighted: Ember.computed ->
@get('controller.highlighted') is @get('content')
.property 'controller.highlighted', 'content'
labelPathDidChange: Ember.observer ->
labelPath = @get 'labelPath'
# if it is a raw string, the path is just the context
# if labelPath is specified, the path should be context.labelPath
path = if labelPath then "content.#{labelPath}" else 'content'
# We are creating a computed property called label that is an alias of
# 'context.#{labelPath}'
Ember.defineProperty(this, 'label', Ember.computed.alias(path))
@notifyPropertyChange 'label'
, 'content', 'labelPath'
didInsertElement: ->
@_super()
@labelPathDidChange()
# TODO(<NAME>): This is a hack. Some computed don't fire properly if
# they are dependent on the context. e.g. isHighlighted may not update
# if it is dependent on the context. This seems to fix the issue
updateContext: (context) ->
@_super context
@set 'content', context
click: ->
return if @get('content.isGroupOption')
@set 'controller.selection', @get('content')
@get('controller').userDidSelect @get 'content'
# if there's a selection and the dropdown is unexpanded, we want to
# propagate the click event
# if the dropdown is expanded and we select something, don't propagate
if @get('controller.showDropdown')
@get('controller').send 'hideDropdown'
# return false to prevent propagation
return no
mouseEnter: ->
return if @get('content.isGroupOption')
@set 'controller.highlighted', @get('content')
Ember.Widgets.SelectComponent =
Ember.Component.extend Ember.Widgets.BodyEventListener,
Ember.AddeparMixins.ResizeHandlerMixin,
layoutName: 'select'
classNames: 'ember-select'
attributeBindings: ['tabindex']
classNameBindings: ['showDropdown:open', 'isDropup:dropup']
itemViewClass: 'Ember.Widgets.SelectOptionView'
prompt: 'Select a Value'
disabled: no
# we need to set tabindex so that div responds to key events
highlightedIndex: -1
tabindex: -1
showDropdown: no
dropdownHeight: 300
# Important: rowHeight must be synched with the CSS
rowHeight: 26
# Option to indicate whether we should sort the labels
sortLabels: yes
# If isSelect is true, we will not show the search box
isSelect: no
# Align dropdown-menu above the button
isDropup: no
# Align dropdown-menu to the right of the button
isDropdownMenuPulledRight: no
# Change the icon when necessary
dropdownToggleIcon: 'fa fa-caret-down'
# Change the button class when necessary
buttonClass: 'btn btn-default'
dropdownMenuClass: ''
# The list of options
content: []
selection: null
query: ''
optionLabelPath: ''
optionValuePath: ''
optionGroupPath: ''
optionDefaultPath: ''
# This augments the dropdown to provide a place for adding a select menu that
# possibly says 'create item' or something along that line
selectMenuView: null
updateDropdownLayout: Ember.observer ->
return if @get('state') isnt 'inDOM' or @get('showDropdown') is no
# Render the dropdown in a hidden state to get the size
@$('.js-dropdown-menu').css('visibility', 'hidden');
# Render the dropdown completely into the DOM for offset()
Ember.run.next this, ->
dropdownButton = @$('.js-dropdown-toggle')[0]
dropdownButtonHeight = @$(dropdownButton).outerHeight()
dropdownButtonOffset = @$(dropdownButton).offset()
dropdownMenu = @$('.js-dropdown-menu')[0]
dropdownMenuHeight = @$(dropdownMenu).outerHeight()
dropdownMenuWidth = @$(dropdownMenu).outerWidth()
dropdownMenuOffset = @$(dropdownMenu).offset()
# Only switch from dropUp to dropDown if there's this much extra space
# under where the dropDown would be. This prevents the popup from jiggling
# up and down
dropdownMargin = 15
if @get('isDropup')
dropdownMenuBottom = dropdownButtonOffset.top + dropdownButtonHeight +
dropdownMenuHeight + dropdownMargin
else
dropdownMenuBottom = dropdownMenuOffset.top + dropdownMenuHeight
@set 'isDropup', dropdownMenuBottom > window.innerHeight
@set 'isDropdownMenuPulledRight', dropdownButtonOffset.left +
dropdownMenuWidth + dropdownMargin > window.innerWidth
@$('.js-dropdown-menu').css('visibility', 'visible');
return
, 'showDropdown', 'window.innerHeight'
onResizeEnd: ->
# We need to put this on the run loop, because the resize event came from
# the window. Otherwise, we get a warning when used in the tests. You have
# turned on testing mode, which disables the run-loop's autorun. You
# will need to wrap any code with asynchronous side-effects in an Ember.run
Ember.run this, @updateDropdownLayout
# TODO(<NAME>): consider calling this optionViewClass?
itemView: Ember.computed ->
itemViewClass = @get 'itemViewClass'
if typeof itemViewClass is 'string'
return Ember.get itemViewClass
itemViewClass
.property 'itemViewClass'
# TODO(<NAME>): consider calling this selectedOptionViewClass?
selectedItemView: Ember.computed ->
@get('itemView').extend
tagName: 'span'
labelPath: Ember.computed.alias 'controller.optionLabelPath'
context: Ember.computed.alias 'controller.selection'
.property 'itemView'
selectedLabel: Ember.computed ->
get @get('selection'), @get('optionLabelPath')
.property 'selection', 'optionLabelPath'
searchView: Ember.TextField.extend
placeholder: 'Search'
valueBinding: 'parentView.query'
# we want to focus on search input when dropdown is opened. We need to put
# this in a run loop to wait for the event that triggers the showDropdown
# to finishes before trying to focus the input. Otherwise, focus when be
# "stolen" from us.
showDropdownDidChange: Ember.observer ->
Ember.run.next this, -> @$().focus() if @get('state') is 'inDOM'
, 'parentView.showDropdown'
# This is a hack. Ember.ListView doesn't handle case when total height
# is less than height properly
listView: Ember.ListView.extend
style: Ember.computed ->
height = Math.min @get('height'), @get('totalHeight')
"height: #{height}px"
.property('height', 'totalHeight'),
# the list of content that is filtered down based on the query entered
# in the textbox
filteredContent: Ember.computed ->
content = @get 'content'
query = @get 'query'
return [] unless content
filteredContent = @get('content').filter (item) => @matcher(query, item)
return filteredContent unless @get('sortLabels')
_.sortBy filteredContent, (item) => get(item, @get('optionLabelPath'))?.toLowerCase()
.property 'content.@each', 'query', 'optionLabelPath', 'sortLabels'
# the list of content that is grouped by the content in the optionGroupPath
# e.g. {name: '<NAME>', location: 'Mountain View'}
# {name: 'Google', location: 'Mountain View'}
# if we group by location we will get
# Mountain View
# <NAME>
# Google
groupedContent: Ember.computed ->
path = @get 'optionGroupPath'
content = @get 'filteredContent'
return content unless path
groups = _.groupBy content, (item) -> get(item, path)
result = Ember.A()
_.keys(groups).sort().forEach (key) ->
result.pushObject Ember.Object.create isGroupOption: yes, name:key
result.pushObjects groups[key]
result
.property 'filteredContent', 'optionGroupPath'
hasNoResults: Ember.computed.empty 'filteredContent'
value: Ember.computed (key, value) ->
if arguments.length is 2 # setter
valuePath = @get 'optionValuePath'
selection = value
selection = @get('content').findProperty(valuePath, value) if valuePath
@set 'selection', selection
value
else # getter
valuePath = @get 'optionValuePath'
selection = @get 'selection'
if valuePath then get(selection, valuePath) else selection
.property 'selection'
didInsertElement: ->
@_super()
@setDefaultSelection()
# It matches the item label with the query. This can be overrideen for better
matcher: (searchText, item) ->
return yes unless searchText
label = get(item, @get('optionLabelPath'))
escapedSearchText = searchText.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
regex = new RegExp(escapedSearchText, 'i')
regex.test(label)
# TODO(<NAME>): This needs to be rethought
setDefaultSelection: Ember.observer ->
# do not set default selection if selection is defined
return if @get 'selection'
content = @get 'content'
defaultPath = @get 'optionDefaultPath'
return unless content and defaultPath
@set 'selection', content.findProperty(defaultPath)
, 'content.@each'
selectableOptionsDidChange: Ember.observer ->
highlighted = @get('highlighted')
if not @get('selectableOptions').contains(highlighted)
@set 'highlighted', @get('selectableOptions.firstObject')
, 'selectableOptions'
###
# SELECTION RELATED
###
KEY_EVENTS:
8: 'deletePressed'
27: 'escapePressed'
13: 'enterPressed'
38: 'upArrowPressed'
40: 'downArrowPressed'
# All the selectable options - namely everything except for the non-group
# options that are artificially created.
selectableOptions: Ember.computed ->
(@get('groupedContent') or []).filter (item) ->
not get(item, 'isGroupOption')
.property 'groupedContent'
# The option that is currently highlighted.
highlighted: Ember.computed (key, value) ->
content = @get('selectableOptions') or []
value = value or []
if arguments.length is 1 # getter
index = @get 'highlightedIndex'
value = content.objectAt index
else # setter
index = content.indexOf value
@setHighlightedIndex index, yes
value
.property 'selectableOptions', 'highlightedIndex'
bodyClick: -> @send 'hideDropdown'
keyDown: (event) ->
# show dropdown if dropdown is not already showing
return @set('showDropdown', yes) unless @get 'showDropdown'
map = @get 'KEY_EVENTS'
method = map[event.keyCode]
@get(method)?.apply(this, arguments) if method
deletePressed: Ember.K
escapePressed: (event) ->
@send 'hideDropdown'
enterPressed: (event) ->
item = @get 'highlighted'
@set 'selection', item unless Ember.isEmpty(item)
@userDidSelect(item) unless Ember.isEmpty(item)
# in case dropdown doesn't close
@send 'hideDropdown'
# TODO(<NAME>): HACK the web app somehow reloads when enter is pressed.
event.preventDefault()
upArrowPressed: (event) ->
sel = @get 'highlightedIndex'
index = if event.ctrlKey or event.metaKey then 0 else sel - 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing up arrow
event.preventDefault()
downArrowPressed: (event) ->
sel = @get 'highlightedIndex'
clen = @get 'selectableOptions.length'
index = if event.ctrlKey or event.metaKey then clen - 1 else sel + 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing down arrow
event.preventDefault()
setHighlightedIndex: (index, ensureVisible) ->
return unless @ensureIndex index
@set 'highlightedIndex', index
@ensureVisible index if ensureVisible
ensureIndex: (index) ->
clen = @get 'selectableOptions.length'
index >= 0 and index < clen
# Scroll the list to make sure the given index is visible.
ensureVisible: (index) ->
$listView = @$('.ember-list-view')
listView = Ember.View.views[$listView.attr('id')]
startIndex = listView._startingIndex()
numRows = listView._childViewCount() - 1
endIndex = startIndex + numRows
item = @get('selectableOptions').objectAt(index)
newIndex = @get('groupedContent').indexOf(item)
if index is 0
$listView.scrollTop 0
else if newIndex < startIndex
$listView.scrollTop newIndex * @get('rowHeight')
else if newIndex >= endIndex
$listView.scrollTop (newIndex - numRows + 1.5) * @get('rowHeight')
#TODO Refactor other parts to use this method to set selection
userDidSelect: (selection) ->
@sendAction 'userSelected', selection
actions:
toggleDropdown: (event) ->
return if @get('disabled')
@toggleProperty 'showDropdown'
hideDropdown: (event) ->
@set 'showDropdown', no
Ember.Handlebars.helper('select-component', Ember.Widgets.SelectComponent)
| true | get = (object, key) ->
return undefined unless object
return object unless key
object.get?(key) or object[key]
set = (object, key, value) ->
return unless object and key
object.set?(key, value) or object[key] = value;
# The view for each item in the select.
Ember.Widgets.SelectOptionView = Ember.ListItemView.extend
tagName: 'li'
templateName: 'select_item'
layoutName: 'select_item_layout'
classNames: 'ember-select-result-item'
classNameBindings: ['content.isGroupOption:ember-select-group',
'isHighlighted:highlighted']
labelPath: Ember.computed.alias 'controller.optionLabelPath'
isHighlighted: Ember.computed ->
@get('controller.highlighted') is @get('content')
.property 'controller.highlighted', 'content'
labelPathDidChange: Ember.observer ->
labelPath = @get 'labelPath'
# if it is a raw string, the path is just the context
# if labelPath is specified, the path should be context.labelPath
path = if labelPath then "content.#{labelPath}" else 'content'
# We are creating a computed property called label that is an alias of
# 'context.#{labelPath}'
Ember.defineProperty(this, 'label', Ember.computed.alias(path))
@notifyPropertyChange 'label'
, 'content', 'labelPath'
didInsertElement: ->
@_super()
@labelPathDidChange()
# TODO(PI:NAME:<NAME>END_PI): This is a hack. Some computed don't fire properly if
# they are dependent on the context. e.g. isHighlighted may not update
# if it is dependent on the context. This seems to fix the issue
updateContext: (context) ->
@_super context
@set 'content', context
click: ->
return if @get('content.isGroupOption')
@set 'controller.selection', @get('content')
@get('controller').userDidSelect @get 'content'
# if there's a selection and the dropdown is unexpanded, we want to
# propagate the click event
# if the dropdown is expanded and we select something, don't propagate
if @get('controller.showDropdown')
@get('controller').send 'hideDropdown'
# return false to prevent propagation
return no
mouseEnter: ->
return if @get('content.isGroupOption')
@set 'controller.highlighted', @get('content')
Ember.Widgets.SelectComponent =
Ember.Component.extend Ember.Widgets.BodyEventListener,
Ember.AddeparMixins.ResizeHandlerMixin,
layoutName: 'select'
classNames: 'ember-select'
attributeBindings: ['tabindex']
classNameBindings: ['showDropdown:open', 'isDropup:dropup']
itemViewClass: 'Ember.Widgets.SelectOptionView'
prompt: 'Select a Value'
disabled: no
# we need to set tabindex so that div responds to key events
highlightedIndex: -1
tabindex: -1
showDropdown: no
dropdownHeight: 300
# Important: rowHeight must be synched with the CSS
rowHeight: 26
# Option to indicate whether we should sort the labels
sortLabels: yes
# If isSelect is true, we will not show the search box
isSelect: no
# Align dropdown-menu above the button
isDropup: no
# Align dropdown-menu to the right of the button
isDropdownMenuPulledRight: no
# Change the icon when necessary
dropdownToggleIcon: 'fa fa-caret-down'
# Change the button class when necessary
buttonClass: 'btn btn-default'
dropdownMenuClass: ''
# The list of options
content: []
selection: null
query: ''
optionLabelPath: ''
optionValuePath: ''
optionGroupPath: ''
optionDefaultPath: ''
# This augments the dropdown to provide a place for adding a select menu that
# possibly says 'create item' or something along that line
selectMenuView: null
updateDropdownLayout: Ember.observer ->
return if @get('state') isnt 'inDOM' or @get('showDropdown') is no
# Render the dropdown in a hidden state to get the size
@$('.js-dropdown-menu').css('visibility', 'hidden');
# Render the dropdown completely into the DOM for offset()
Ember.run.next this, ->
dropdownButton = @$('.js-dropdown-toggle')[0]
dropdownButtonHeight = @$(dropdownButton).outerHeight()
dropdownButtonOffset = @$(dropdownButton).offset()
dropdownMenu = @$('.js-dropdown-menu')[0]
dropdownMenuHeight = @$(dropdownMenu).outerHeight()
dropdownMenuWidth = @$(dropdownMenu).outerWidth()
dropdownMenuOffset = @$(dropdownMenu).offset()
# Only switch from dropUp to dropDown if there's this much extra space
# under where the dropDown would be. This prevents the popup from jiggling
# up and down
dropdownMargin = 15
if @get('isDropup')
dropdownMenuBottom = dropdownButtonOffset.top + dropdownButtonHeight +
dropdownMenuHeight + dropdownMargin
else
dropdownMenuBottom = dropdownMenuOffset.top + dropdownMenuHeight
@set 'isDropup', dropdownMenuBottom > window.innerHeight
@set 'isDropdownMenuPulledRight', dropdownButtonOffset.left +
dropdownMenuWidth + dropdownMargin > window.innerWidth
@$('.js-dropdown-menu').css('visibility', 'visible');
return
, 'showDropdown', 'window.innerHeight'
onResizeEnd: ->
# We need to put this on the run loop, because the resize event came from
# the window. Otherwise, we get a warning when used in the tests. You have
# turned on testing mode, which disables the run-loop's autorun. You
# will need to wrap any code with asynchronous side-effects in an Ember.run
Ember.run this, @updateDropdownLayout
# TODO(PI:NAME:<NAME>END_PI): consider calling this optionViewClass?
itemView: Ember.computed ->
itemViewClass = @get 'itemViewClass'
if typeof itemViewClass is 'string'
return Ember.get itemViewClass
itemViewClass
.property 'itemViewClass'
# TODO(PI:NAME:<NAME>END_PI): consider calling this selectedOptionViewClass?
selectedItemView: Ember.computed ->
@get('itemView').extend
tagName: 'span'
labelPath: Ember.computed.alias 'controller.optionLabelPath'
context: Ember.computed.alias 'controller.selection'
.property 'itemView'
selectedLabel: Ember.computed ->
get @get('selection'), @get('optionLabelPath')
.property 'selection', 'optionLabelPath'
searchView: Ember.TextField.extend
placeholder: 'Search'
valueBinding: 'parentView.query'
# we want to focus on search input when dropdown is opened. We need to put
# this in a run loop to wait for the event that triggers the showDropdown
# to finishes before trying to focus the input. Otherwise, focus when be
# "stolen" from us.
showDropdownDidChange: Ember.observer ->
Ember.run.next this, -> @$().focus() if @get('state') is 'inDOM'
, 'parentView.showDropdown'
# This is a hack. Ember.ListView doesn't handle case when total height
# is less than height properly
listView: Ember.ListView.extend
style: Ember.computed ->
height = Math.min @get('height'), @get('totalHeight')
"height: #{height}px"
.property('height', 'totalHeight'),
# the list of content that is filtered down based on the query entered
# in the textbox
filteredContent: Ember.computed ->
content = @get 'content'
query = @get 'query'
return [] unless content
filteredContent = @get('content').filter (item) => @matcher(query, item)
return filteredContent unless @get('sortLabels')
_.sortBy filteredContent, (item) => get(item, @get('optionLabelPath'))?.toLowerCase()
.property 'content.@each', 'query', 'optionLabelPath', 'sortLabels'
# the list of content that is grouped by the content in the optionGroupPath
# e.g. {name: 'PI:NAME:<NAME>END_PI', location: 'Mountain View'}
# {name: 'Google', location: 'Mountain View'}
# if we group by location we will get
# Mountain View
# PI:NAME:<NAME>END_PI
# Google
groupedContent: Ember.computed ->
path = @get 'optionGroupPath'
content = @get 'filteredContent'
return content unless path
groups = _.groupBy content, (item) -> get(item, path)
result = Ember.A()
_.keys(groups).sort().forEach (key) ->
result.pushObject Ember.Object.create isGroupOption: yes, name:key
result.pushObjects groups[key]
result
.property 'filteredContent', 'optionGroupPath'
hasNoResults: Ember.computed.empty 'filteredContent'
value: Ember.computed (key, value) ->
if arguments.length is 2 # setter
valuePath = @get 'optionValuePath'
selection = value
selection = @get('content').findProperty(valuePath, value) if valuePath
@set 'selection', selection
value
else # getter
valuePath = @get 'optionValuePath'
selection = @get 'selection'
if valuePath then get(selection, valuePath) else selection
.property 'selection'
didInsertElement: ->
@_super()
@setDefaultSelection()
# It matches the item label with the query. This can be overrideen for better
matcher: (searchText, item) ->
return yes unless searchText
label = get(item, @get('optionLabelPath'))
escapedSearchText = searchText.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
regex = new RegExp(escapedSearchText, 'i')
regex.test(label)
# TODO(PI:NAME:<NAME>END_PI): This needs to be rethought
setDefaultSelection: Ember.observer ->
# do not set default selection if selection is defined
return if @get 'selection'
content = @get 'content'
defaultPath = @get 'optionDefaultPath'
return unless content and defaultPath
@set 'selection', content.findProperty(defaultPath)
, 'content.@each'
selectableOptionsDidChange: Ember.observer ->
highlighted = @get('highlighted')
if not @get('selectableOptions').contains(highlighted)
@set 'highlighted', @get('selectableOptions.firstObject')
, 'selectableOptions'
###
# SELECTION RELATED
###
KEY_EVENTS:
8: 'deletePressed'
27: 'escapePressed'
13: 'enterPressed'
38: 'upArrowPressed'
40: 'downArrowPressed'
# All the selectable options - namely everything except for the non-group
# options that are artificially created.
selectableOptions: Ember.computed ->
(@get('groupedContent') or []).filter (item) ->
not get(item, 'isGroupOption')
.property 'groupedContent'
# The option that is currently highlighted.
highlighted: Ember.computed (key, value) ->
content = @get('selectableOptions') or []
value = value or []
if arguments.length is 1 # getter
index = @get 'highlightedIndex'
value = content.objectAt index
else # setter
index = content.indexOf value
@setHighlightedIndex index, yes
value
.property 'selectableOptions', 'highlightedIndex'
bodyClick: -> @send 'hideDropdown'
keyDown: (event) ->
# show dropdown if dropdown is not already showing
return @set('showDropdown', yes) unless @get 'showDropdown'
map = @get 'KEY_EVENTS'
method = map[event.keyCode]
@get(method)?.apply(this, arguments) if method
deletePressed: Ember.K
escapePressed: (event) ->
@send 'hideDropdown'
enterPressed: (event) ->
item = @get 'highlighted'
@set 'selection', item unless Ember.isEmpty(item)
@userDidSelect(item) unless Ember.isEmpty(item)
# in case dropdown doesn't close
@send 'hideDropdown'
# TODO(PI:NAME:<NAME>END_PI): HACK the web app somehow reloads when enter is pressed.
event.preventDefault()
upArrowPressed: (event) ->
sel = @get 'highlightedIndex'
index = if event.ctrlKey or event.metaKey then 0 else sel - 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing up arrow
event.preventDefault()
downArrowPressed: (event) ->
sel = @get 'highlightedIndex'
clen = @get 'selectableOptions.length'
index = if event.ctrlKey or event.metaKey then clen - 1 else sel + 1
@setHighlightedIndex index, yes
# we want to prevent the app from scroll when pressing down arrow
event.preventDefault()
setHighlightedIndex: (index, ensureVisible) ->
return unless @ensureIndex index
@set 'highlightedIndex', index
@ensureVisible index if ensureVisible
ensureIndex: (index) ->
clen = @get 'selectableOptions.length'
index >= 0 and index < clen
# Scroll the list to make sure the given index is visible.
ensureVisible: (index) ->
$listView = @$('.ember-list-view')
listView = Ember.View.views[$listView.attr('id')]
startIndex = listView._startingIndex()
numRows = listView._childViewCount() - 1
endIndex = startIndex + numRows
item = @get('selectableOptions').objectAt(index)
newIndex = @get('groupedContent').indexOf(item)
if index is 0
$listView.scrollTop 0
else if newIndex < startIndex
$listView.scrollTop newIndex * @get('rowHeight')
else if newIndex >= endIndex
$listView.scrollTop (newIndex - numRows + 1.5) * @get('rowHeight')
#TODO Refactor other parts to use this method to set selection
userDidSelect: (selection) ->
@sendAction 'userSelected', selection
actions:
toggleDropdown: (event) ->
return if @get('disabled')
@toggleProperty 'showDropdown'
hideDropdown: (event) ->
@set 'showDropdown', no
Ember.Handlebars.helper('select-component', Ember.Widgets.SelectComponent)
|
[
{
"context": " grunt-smart-assets\n# *\n# *\n# * Copyright (c) 2014 Shapovalov Alexandr\n# * Licensed under the MIT license.\n#\n\"use strict",
"end": 75,
"score": 0.9997130632400513,
"start": 56,
"tag": "NAME",
"value": "Shapovalov Alexandr"
}
] | Gruntfile.coffee | Freezko/grunt-smart-assets | 0 | #
# * grunt-smart-assets
# *
# *
# * Copyright (c) 2014 Shapovalov Alexandr
# * Licensed under the MIT license.
#
"use strict"
module.exports = (grunt) ->
# load all npm grunt tasks
require("load-grunt-tasks") grunt
#require('time-grunt')(grunt)
# Project configuration.
grunt.initConfig
jshint:
all: [
"Gruntfile.js"
"tasks/*.js"
"<%= nodeunit.tests %>"
]
options:
jshintrc: ".jshintrc"
reporter: require("jshint-stylish")
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
smart_assets:
compile:
options:
files:
cwd: 'test/test-app/app'
dest: 'test/test-app/dist'
cleanDest: true
streamTasks:
coffee:
from: ['.coffee']
to: '.js'
options:
sourceMap: true,
bare: true
sass:
from: ['.sass', '.scss']
to: '.css'
afterTasks:
autoprefixer:
src: ['**/*.css', '!**/library/**']
options:
map: true
html:
cwd: 'test/test-app/html'
dest: 'test/test-app/html-dest'
src: '*.html'
assetDir: 'test/test-app'
rev: true
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", [
"smart_assets"
"nodeunit"
]
# By default, lint and run all tests.
grunt.registerTask "default", [
"jshint"
"test"
]
return
| 128789 | #
# * grunt-smart-assets
# *
# *
# * Copyright (c) 2014 <NAME>
# * Licensed under the MIT license.
#
"use strict"
module.exports = (grunt) ->
# load all npm grunt tasks
require("load-grunt-tasks") grunt
#require('time-grunt')(grunt)
# Project configuration.
grunt.initConfig
jshint:
all: [
"Gruntfile.js"
"tasks/*.js"
"<%= nodeunit.tests %>"
]
options:
jshintrc: ".jshintrc"
reporter: require("jshint-stylish")
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
smart_assets:
compile:
options:
files:
cwd: 'test/test-app/app'
dest: 'test/test-app/dist'
cleanDest: true
streamTasks:
coffee:
from: ['.coffee']
to: '.js'
options:
sourceMap: true,
bare: true
sass:
from: ['.sass', '.scss']
to: '.css'
afterTasks:
autoprefixer:
src: ['**/*.css', '!**/library/**']
options:
map: true
html:
cwd: 'test/test-app/html'
dest: 'test/test-app/html-dest'
src: '*.html'
assetDir: 'test/test-app'
rev: true
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", [
"smart_assets"
"nodeunit"
]
# By default, lint and run all tests.
grunt.registerTask "default", [
"jshint"
"test"
]
return
| true | #
# * grunt-smart-assets
# *
# *
# * Copyright (c) 2014 PI:NAME:<NAME>END_PI
# * Licensed under the MIT license.
#
"use strict"
module.exports = (grunt) ->
# load all npm grunt tasks
require("load-grunt-tasks") grunt
#require('time-grunt')(grunt)
# Project configuration.
grunt.initConfig
jshint:
all: [
"Gruntfile.js"
"tasks/*.js"
"<%= nodeunit.tests %>"
]
options:
jshintrc: ".jshintrc"
reporter: require("jshint-stylish")
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
smart_assets:
compile:
options:
files:
cwd: 'test/test-app/app'
dest: 'test/test-app/dist'
cleanDest: true
streamTasks:
coffee:
from: ['.coffee']
to: '.js'
options:
sourceMap: true,
bare: true
sass:
from: ['.sass', '.scss']
to: '.css'
afterTasks:
autoprefixer:
src: ['**/*.css', '!**/library/**']
options:
map: true
html:
cwd: 'test/test-app/html'
dest: 'test/test-app/html-dest'
src: '*.html'
assetDir: 'test/test-app'
rev: true
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", [
"smart_assets"
"nodeunit"
]
# By default, lint and run all tests.
grunt.registerTask "default", [
"jshint"
"test"
]
return
|
[
{
"context": "eactDOMServer.renderToStaticMarkup MyComp name : 'Bender'\n # console.log reactHtml\n expe",
"end": 1860,
"score": 0.9545695781707764,
"start": 1854,
"tag": "NAME",
"value": "Bender"
}
] | test/test_addon.coffee | Meettya/clinch.csbx | 0 | ###
Test suite for addon
###
_ = require 'lodash'
fs = require 'fs'
path = require 'path'
vm = require 'vm'
React = require 'react'
ReactDOMServer = require 'react-dom/server'
CoffeeScript = require 'coffee-script'
lib_path = GLOBAL?.lib_path || ''
fixtures = path.join __dirname, "fixtures"
fixturesOk = path.join fixtures, "component.csbx"
fixturesErrCS = path.join fixtures, "with_coffee_error.csbx"
fixturesErrJSX = path.join fixtures, "with_jsx_error.csbx"
results =
ok : '<div class="message"><p>Hello Bender!!!</p></div>'
# get addon
adon_file = 'addon'
addon_path = path.join lib_path, adon_file
Compiller = require addon_path
READ_OPTIONS = encoding : 'utf8'
describe 'Addon:', ->
describe 'itself', ->
it 'should export "extension" and "processor"', ->
expect(Compiller).to.have.all.keys ['extension', 'processor']
it 'should export string as "extension"', ->
expect(Compiller.extension).to.be.a 'string'
it 'should export function as "processor"', ->
expect(Compiller.processor).to.be.a 'function'
describe 'as addon', ->
it 'should export correct file extension ".csbx"', ->
expect(Compiller.extension).to.equal '.csbx'
it 'should compile correct .csbx file', (done) ->
fs.readFile fixturesOk, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesOk, (err, code) ->
expect(err).to.be.null
expect(code).to.be.a 'string'
# console.log code
# test result
vm.runInNewContext code, sandbox = { React, module:exports:null }
react_comp = sandbox.module.exports
MyComp = React.createFactory react_comp
reactHtml = ReactDOMServer.renderToStaticMarkup MyComp name : 'Bender'
# console.log reactHtml
expect(reactHtml).to.equal results.ok
done()
it 'should return error on incorrect coffee-part file', (done) ->
fs.readFile fixturesErrCS, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrCS, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
it 'should return error on incorrect jsx-part file', (done) ->
fs.readFile fixturesErrJSX, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrJSX, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
| 177482 | ###
Test suite for addon
###
_ = require 'lodash'
fs = require 'fs'
path = require 'path'
vm = require 'vm'
React = require 'react'
ReactDOMServer = require 'react-dom/server'
CoffeeScript = require 'coffee-script'
lib_path = GLOBAL?.lib_path || ''
fixtures = path.join __dirname, "fixtures"
fixturesOk = path.join fixtures, "component.csbx"
fixturesErrCS = path.join fixtures, "with_coffee_error.csbx"
fixturesErrJSX = path.join fixtures, "with_jsx_error.csbx"
results =
ok : '<div class="message"><p>Hello Bender!!!</p></div>'
# get addon
adon_file = 'addon'
addon_path = path.join lib_path, adon_file
Compiller = require addon_path
READ_OPTIONS = encoding : 'utf8'
describe 'Addon:', ->
describe 'itself', ->
it 'should export "extension" and "processor"', ->
expect(Compiller).to.have.all.keys ['extension', 'processor']
it 'should export string as "extension"', ->
expect(Compiller.extension).to.be.a 'string'
it 'should export function as "processor"', ->
expect(Compiller.processor).to.be.a 'function'
describe 'as addon', ->
it 'should export correct file extension ".csbx"', ->
expect(Compiller.extension).to.equal '.csbx'
it 'should compile correct .csbx file', (done) ->
fs.readFile fixturesOk, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesOk, (err, code) ->
expect(err).to.be.null
expect(code).to.be.a 'string'
# console.log code
# test result
vm.runInNewContext code, sandbox = { React, module:exports:null }
react_comp = sandbox.module.exports
MyComp = React.createFactory react_comp
reactHtml = ReactDOMServer.renderToStaticMarkup MyComp name : '<NAME>'
# console.log reactHtml
expect(reactHtml).to.equal results.ok
done()
it 'should return error on incorrect coffee-part file', (done) ->
fs.readFile fixturesErrCS, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrCS, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
it 'should return error on incorrect jsx-part file', (done) ->
fs.readFile fixturesErrJSX, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrJSX, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
| true | ###
Test suite for addon
###
_ = require 'lodash'
fs = require 'fs'
path = require 'path'
vm = require 'vm'
React = require 'react'
ReactDOMServer = require 'react-dom/server'
CoffeeScript = require 'coffee-script'
lib_path = GLOBAL?.lib_path || ''
fixtures = path.join __dirname, "fixtures"
fixturesOk = path.join fixtures, "component.csbx"
fixturesErrCS = path.join fixtures, "with_coffee_error.csbx"
fixturesErrJSX = path.join fixtures, "with_jsx_error.csbx"
results =
ok : '<div class="message"><p>Hello Bender!!!</p></div>'
# get addon
adon_file = 'addon'
addon_path = path.join lib_path, adon_file
Compiller = require addon_path
READ_OPTIONS = encoding : 'utf8'
describe 'Addon:', ->
describe 'itself', ->
it 'should export "extension" and "processor"', ->
expect(Compiller).to.have.all.keys ['extension', 'processor']
it 'should export string as "extension"', ->
expect(Compiller.extension).to.be.a 'string'
it 'should export function as "processor"', ->
expect(Compiller.processor).to.be.a 'function'
describe 'as addon', ->
it 'should export correct file extension ".csbx"', ->
expect(Compiller.extension).to.equal '.csbx'
it 'should compile correct .csbx file', (done) ->
fs.readFile fixturesOk, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesOk, (err, code) ->
expect(err).to.be.null
expect(code).to.be.a 'string'
# console.log code
# test result
vm.runInNewContext code, sandbox = { React, module:exports:null }
react_comp = sandbox.module.exports
MyComp = React.createFactory react_comp
reactHtml = ReactDOMServer.renderToStaticMarkup MyComp name : 'PI:NAME:<NAME>END_PI'
# console.log reactHtml
expect(reactHtml).to.equal results.ok
done()
it 'should return error on incorrect coffee-part file', (done) ->
fs.readFile fixturesErrCS, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrCS, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
it 'should return error on incorrect jsx-part file', (done) ->
fs.readFile fixturesErrJSX, READ_OPTIONS, (err, data) ->
expect(err).to.be.null
expect(data).to.be.a 'string'
Compiller.processor data, fixturesErrJSX, (err, code) ->
# console.log err
expect(err).to.be.an.instanceof Error
done()
|
[
{
"context": " \"Face generator, inspired by weird faces study by Matthias Dörfelt aka mokafolio.\"\n\n\t# \tparameters: ()->\n\t# \t\tparame",
"end": 266,
"score": 0.9998922944068909,
"start": 250,
"tag": "NAME",
"value": "Matthias Dörfelt"
},
{
"context": "pired by weird faces stud... | coffee/Items/Paths/Shapes/FaceShape.coffee | arthursw/comme-un-dessein-client | 0 |
# class FaceShape extends RShape
# @Shape = P.Path.Rectangle
# @label = 'Face generator'
# # @iconURL = 'static/images/icons/inverted/spiral.png'
# # @iconAlt = 'spiral'
# @description = "Face generator, inspired by weird faces study by Matthias Dörfelt aka mokafolio."
# parameters: ()->
# parameters = super()
# parameters['Parameters'] ?= {}
# parameters['Parameters'].minRadius =
# type: 'slider'
# label: 'Minimum radius'
# min: 0
# max: 100
# default: 0
# parameters['Parameters'].nTurns =
# type: 'slider'
# label: 'Number of turns'
# min: 1
# max: 50
# default: 10
# parameters['Parameters'].nSides =
# type: 'slider'
# label: 'Sides'
# min: 3
# max: 100
# default: 50
# return parameters
# createShape: ()->
# @headShape = @addPath(new P.Path.Ellipse(@rectangle.expand(-20,-10)))
# @headShape.flatten(50)
# for segment in @headShape.segments
# segment.point.x += Math.random()*20
# segment.point.y += Math.random()*5
# segment.handleIn += Math.random()*5
# segment.handleOut += Math.random()*5
# @headShape.smooth()
# nozeShape = Math.random()
# center = @rectangle.center
# width = @rectangle.width
# height = @rectangle.height
# rangeRandMM = (min, max)->
# return min + (max-min)*Math.random()
# rangeRandC = (center, amplitude)->
# return center + amplitude*(Math.random()-0.5)
# # noze
# if nozeShape < 0.333 # two nostrils
# deltaX = 0.1*width + Math.random()*10
# x = center.x - deltaX
# y = center.y + rangeRandC(0, 5)
# position = center.add(x, y)
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeLeft = @addPath(new P.Path.Ellipse(position, size))
# position += 2*deltaX
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeRight = @addPath(new P.Path.Ellipse(position, size))
# else if nozeShape < 0.666 # noze toward left
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(5,10)))
# noze.smooth()
# else # noze toward right
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(-Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(15,20)))
# noze.smooth()
# # eyes
# deltaX = rangeRandC(0, 0.1*width)
# x = center.x - deltaX
# y = @rectangle.top + width/3 + rangeRandC(0, 10)
# position = new P.Point(x, y)
# size = new P.Size(Math.max(Math.random()*30,deltaX), Math.random()*30)
# eyeLeft = @addPath(new P.Path.Ellipse(position, size))
# position.x += 2*deltaX
# eyeRight = @addPath(new P.Path.Ellipse(position, size))
# eyeRight.position.x += rangeRandC(0, 5)
# eyeLeft.position.x += rangeRandC(0, 5)
# for i in [1 .. eyeLeft.segments.length-1]
# eyeLeft.segments[i].point.x += Math.random()*3
# eyeLeft.segments[i].point.y += Math.random()*3
# eyeRight.segments[i].point.x += Math.random()*3
# eyeRight.segments[i].point.y += Math.random()*3
# return
# R.FaceShape = FaceShape
# R.pathClasses.push(R.FaceShape)
| 43012 |
# class FaceShape extends RShape
# @Shape = P.Path.Rectangle
# @label = 'Face generator'
# # @iconURL = 'static/images/icons/inverted/spiral.png'
# # @iconAlt = 'spiral'
# @description = "Face generator, inspired by weird faces study by <NAME> aka mokafolio."
# parameters: ()->
# parameters = super()
# parameters['Parameters'] ?= {}
# parameters['Parameters'].minRadius =
# type: 'slider'
# label: 'Minimum radius'
# min: 0
# max: 100
# default: 0
# parameters['Parameters'].nTurns =
# type: 'slider'
# label: 'Number of turns'
# min: 1
# max: 50
# default: 10
# parameters['Parameters'].nSides =
# type: 'slider'
# label: 'Sides'
# min: 3
# max: 100
# default: 50
# return parameters
# createShape: ()->
# @headShape = @addPath(new P.Path.Ellipse(@rectangle.expand(-20,-10)))
# @headShape.flatten(50)
# for segment in @headShape.segments
# segment.point.x += Math.random()*20
# segment.point.y += Math.random()*5
# segment.handleIn += Math.random()*5
# segment.handleOut += Math.random()*5
# @headShape.smooth()
# nozeShape = Math.random()
# center = @rectangle.center
# width = @rectangle.width
# height = @rectangle.height
# rangeRandMM = (min, max)->
# return min + (max-min)*Math.random()
# rangeRandC = (center, amplitude)->
# return center + amplitude*(Math.random()-0.5)
# # noze
# if nozeShape < 0.333 # two nostrils
# deltaX = 0.1*width + Math.random()*10
# x = center.x - deltaX
# y = center.y + rangeRandC(0, 5)
# position = center.add(x, y)
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeLeft = @addPath(new P.Path.Ellipse(position, size))
# position += 2*deltaX
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeRight = @addPath(new P.Path.Ellipse(position, size))
# else if nozeShape < 0.666 # noze toward left
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(5,10)))
# noze.smooth()
# else # noze toward right
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(-Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(15,20)))
# noze.smooth()
# # eyes
# deltaX = rangeRandC(0, 0.1*width)
# x = center.x - deltaX
# y = @rectangle.top + width/3 + rangeRandC(0, 10)
# position = new P.Point(x, y)
# size = new P.Size(Math.max(Math.random()*30,deltaX), Math.random()*30)
# eyeLeft = @addPath(new P.Path.Ellipse(position, size))
# position.x += 2*deltaX
# eyeRight = @addPath(new P.Path.Ellipse(position, size))
# eyeRight.position.x += rangeRandC(0, 5)
# eyeLeft.position.x += rangeRandC(0, 5)
# for i in [1 .. eyeLeft.segments.length-1]
# eyeLeft.segments[i].point.x += Math.random()*3
# eyeLeft.segments[i].point.y += Math.random()*3
# eyeRight.segments[i].point.x += Math.random()*3
# eyeRight.segments[i].point.y += Math.random()*3
# return
# R.FaceShape = FaceShape
# R.pathClasses.push(R.FaceShape)
| true |
# class FaceShape extends RShape
# @Shape = P.Path.Rectangle
# @label = 'Face generator'
# # @iconURL = 'static/images/icons/inverted/spiral.png'
# # @iconAlt = 'spiral'
# @description = "Face generator, inspired by weird faces study by PI:NAME:<NAME>END_PI aka mokafolio."
# parameters: ()->
# parameters = super()
# parameters['Parameters'] ?= {}
# parameters['Parameters'].minRadius =
# type: 'slider'
# label: 'Minimum radius'
# min: 0
# max: 100
# default: 0
# parameters['Parameters'].nTurns =
# type: 'slider'
# label: 'Number of turns'
# min: 1
# max: 50
# default: 10
# parameters['Parameters'].nSides =
# type: 'slider'
# label: 'Sides'
# min: 3
# max: 100
# default: 50
# return parameters
# createShape: ()->
# @headShape = @addPath(new P.Path.Ellipse(@rectangle.expand(-20,-10)))
# @headShape.flatten(50)
# for segment in @headShape.segments
# segment.point.x += Math.random()*20
# segment.point.y += Math.random()*5
# segment.handleIn += Math.random()*5
# segment.handleOut += Math.random()*5
# @headShape.smooth()
# nozeShape = Math.random()
# center = @rectangle.center
# width = @rectangle.width
# height = @rectangle.height
# rangeRandMM = (min, max)->
# return min + (max-min)*Math.random()
# rangeRandC = (center, amplitude)->
# return center + amplitude*(Math.random()-0.5)
# # noze
# if nozeShape < 0.333 # two nostrils
# deltaX = 0.1*width + Math.random()*10
# x = center.x - deltaX
# y = center.y + rangeRandC(0, 5)
# position = center.add(x, y)
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeLeft = @addPath(new P.Path.Ellipse(position, size))
# position += 2*deltaX
# size = new P.Size(Math.random()*5, Math.random()*5)
# nozeRight = @addPath(new P.Path.Ellipse(position, size))
# else if nozeShape < 0.666 # noze toward left
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(5,10)))
# noze.smooth()
# else # noze toward right
# noze = @addPath()
# noze.add(center)
# noze.add(center.add(-Math.random()*15, Math.random()*5))
# noze.add(center.add(0, rangeRandMM(15,20)))
# noze.smooth()
# # eyes
# deltaX = rangeRandC(0, 0.1*width)
# x = center.x - deltaX
# y = @rectangle.top + width/3 + rangeRandC(0, 10)
# position = new P.Point(x, y)
# size = new P.Size(Math.max(Math.random()*30,deltaX), Math.random()*30)
# eyeLeft = @addPath(new P.Path.Ellipse(position, size))
# position.x += 2*deltaX
# eyeRight = @addPath(new P.Path.Ellipse(position, size))
# eyeRight.position.x += rangeRandC(0, 5)
# eyeLeft.position.x += rangeRandC(0, 5)
# for i in [1 .. eyeLeft.segments.length-1]
# eyeLeft.segments[i].point.x += Math.random()*3
# eyeLeft.segments[i].point.y += Math.random()*3
# eyeRight.segments[i].point.x += Math.random()*3
# eyeRight.segments[i].point.y += Math.random()*3
# return
# R.FaceShape = FaceShape
# R.pathClasses.push(R.FaceShape)
|
[
{
"context": "represents a complete city\n if key == 'cityID' and currentNode[key]\n result = \n ",
"end": 5781,
"score": 0.9623651504516602,
"start": 5775,
"tag": "KEY",
"value": "cityID"
}
] | source/stringsearchmodule/stringsearchmodule.coffee | JhonnyJason/citysearch-on-socket-sources | 0 |
stringsearchmodule = {name: "stringsearchmodule"}
#log Switch
log = (arg) ->
if allModules.debugmodule.modulesToDebug["stringsearchmodule"]? then console.log "[stringsearchmodule]: " + arg
return
#region internal variables
dataStruct =
rootNode:
cityCount: 0
cityID: 0
cityName: ''
currentEntryToInsert: null
currentCityName: ''
currentSensitiveCityName: ''
currentCityID: 0
currentCharIndex: 0
currentNode: null
initialized: false
resultStruct =
maxResults: 30
results: []
#endregion
##initialization function -> is automatically being called! ONLY RELY ON DOM AND VARIABLES!! NO PLUGINS NO OHTER INITIALIZATIONS!!
stringsearchmodule.initialize = () ->
log "stringsearchmodule.initialize"
#region internal functions
#==========================================================================================
# insert Data functions
#==========================================================================================
# inserts the current entry which at this point should have been set into the datastructure
insertCityEntry = ->
#start off @ 0
dataStruct.currentCharIndex = 0
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#insert for root entry - cityCount represents the number of cities wich may be reached at this or any child nodes
dataStruct.rootNode.cityCount++
#insert stuff for the next Node
if !dataStruct.rootNode[currentChar]
#create leave(potential node) if there is no appropriate one yet
dataStruct.rootNode[currentChar] = cityCount: 1
else
dataStruct.rootNode[currentChar].cityCount++
#otherwise the city is at or at any child of this leave/node
#we set a new base node for where we continue to insert the parts of the rest of the names
dataStruct.currentNode = dataStruct.rootNode[currentChar]
#insert further nodes
while dataStruct.currentNode
insertNextNode()
#clean datastruct
dataStruct.currentCharIndex = 0
dataStruct.currentNode = null
dataStruct.currentEntryToInsert = null
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# is called for every character of the object to insert the node for it's seach path
insertNextNode = ->
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
###
console.log("____________________________________________");
console.log("City Name: " + dataStruct.currentCityName);
console.log("City ID: " + dataStruct.currentCityID);
console.log("current Char index: " + dataStruct.currentCharIndex);
console.log("current Char: " + currentChar);
console.log("- - - - - - - - - - - - - - - - - - - - - - \n");
###
if !currentChar
# we reached the end
dataStruct.currentNode.cityID = dataStruct.currentCityID
dataStruct.currentNode.cityName = dataStruct.currentSensitiveCityName
dataStruct.currentNode = null
#is the break condition to recognize we're through
return
#create new leave(potential node) if there is no appropriate one yet
if !dataStruct.currentNode[currentChar]
dataStruct.currentNode[currentChar] = cityCount: 1
else
dataStruct.currentNode[currentChar].cityCount++
#progress to next node
dataStruct.currentNode = dataStruct.currentNode[currentChar]
return
#==========================================================================================
# search functions
#===========================================================================================
# this will start the loop to go through all relevant nodes
# when this function is called the seachString has been stored @ datastruct.currentCityName
# also the maximum number of search results is stored @ resultStruct.maxResults
retrieveSearchResults = ->
log 'retrieveSearchResults'
#reset all relevant parameters
dataStruct.currentNode = dataStruct.rootNode
dataStruct.currentCharIndex = 0
resultStruct.results = []
#iterating over the searchString
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
while currentChar
if dataStruct.currentNode[currentChar]
dataStruct.currentNode = dataStruct.currentNode[currentChar]
else
#when we donot find any we have no results and may end the search
log 'The search String does not fit any results!'
return
#prepare for next round
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#now we have gone through the whole searchString we now should have a search Result
if dataStruct.currentNode.cityCount > resultStruct.maxResults
#We have too many search results, so we send back none
log 'there are too many results!'
return
log "we have " + dataStruct.currentNode.cityCount + " results!"
collectSearchResults()
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# we have one through the whole searchString and the currentNode is @ the beginning of all
# search results
collectSearchResults = ->
log 'collectSearchResults'
currentNode = dataStruct.currentNode
#start recursive depth first search to retreive all search results
for key of currentNode
# skip loop if the property is from prototype
if currentNode.hasOwnProperty(key)
# console.log 'key: \'' + key + '\' / value: \'' + currentNode[key] + '\''
#if there is a cityID this node represents a complete city
if key == 'cityID' and currentNode[key]
result =
cityID: currentNode.cityID
cityName: currentNode.cityName
resultStruct.results.push result
#every key length 1 is pointing to a child node
if key.length == 1
dataStruct.currentNode = currentNode[key]
collectSearchResults()
return
#endregion
#region exposed functions
stringsearchmodule.doSearch = (searchString, maxResults) ->
log "stringsearchmodule.doSearch " + searchString + ", " + maxResults
dataStruct.currentCityName = searchString
resultStruct.maxResults = maxResults
retrieveSearchResults()
return resultStruct.results
stringsearchmodule.addEntry = (entry) ->
# log "stringsearchmodule.addEntry"
dataStruct.currentEntryToInsert = entry
dataStruct.currentCityID = entry.id
dataStruct.currentCityName = entry.name.toLowerCase() + ', ' + entry.country.toLowerCase()
dataStruct.currentSensitiveCityName = entry.name + ', ' + entry.country
insertCityEntry()
return
#endregion exposed functions
export default stringsearchmodule | 222893 |
stringsearchmodule = {name: "stringsearchmodule"}
#log Switch
log = (arg) ->
if allModules.debugmodule.modulesToDebug["stringsearchmodule"]? then console.log "[stringsearchmodule]: " + arg
return
#region internal variables
dataStruct =
rootNode:
cityCount: 0
cityID: 0
cityName: ''
currentEntryToInsert: null
currentCityName: ''
currentSensitiveCityName: ''
currentCityID: 0
currentCharIndex: 0
currentNode: null
initialized: false
resultStruct =
maxResults: 30
results: []
#endregion
##initialization function -> is automatically being called! ONLY RELY ON DOM AND VARIABLES!! NO PLUGINS NO OHTER INITIALIZATIONS!!
stringsearchmodule.initialize = () ->
log "stringsearchmodule.initialize"
#region internal functions
#==========================================================================================
# insert Data functions
#==========================================================================================
# inserts the current entry which at this point should have been set into the datastructure
insertCityEntry = ->
#start off @ 0
dataStruct.currentCharIndex = 0
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#insert for root entry - cityCount represents the number of cities wich may be reached at this or any child nodes
dataStruct.rootNode.cityCount++
#insert stuff for the next Node
if !dataStruct.rootNode[currentChar]
#create leave(potential node) if there is no appropriate one yet
dataStruct.rootNode[currentChar] = cityCount: 1
else
dataStruct.rootNode[currentChar].cityCount++
#otherwise the city is at or at any child of this leave/node
#we set a new base node for where we continue to insert the parts of the rest of the names
dataStruct.currentNode = dataStruct.rootNode[currentChar]
#insert further nodes
while dataStruct.currentNode
insertNextNode()
#clean datastruct
dataStruct.currentCharIndex = 0
dataStruct.currentNode = null
dataStruct.currentEntryToInsert = null
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# is called for every character of the object to insert the node for it's seach path
insertNextNode = ->
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
###
console.log("____________________________________________");
console.log("City Name: " + dataStruct.currentCityName);
console.log("City ID: " + dataStruct.currentCityID);
console.log("current Char index: " + dataStruct.currentCharIndex);
console.log("current Char: " + currentChar);
console.log("- - - - - - - - - - - - - - - - - - - - - - \n");
###
if !currentChar
# we reached the end
dataStruct.currentNode.cityID = dataStruct.currentCityID
dataStruct.currentNode.cityName = dataStruct.currentSensitiveCityName
dataStruct.currentNode = null
#is the break condition to recognize we're through
return
#create new leave(potential node) if there is no appropriate one yet
if !dataStruct.currentNode[currentChar]
dataStruct.currentNode[currentChar] = cityCount: 1
else
dataStruct.currentNode[currentChar].cityCount++
#progress to next node
dataStruct.currentNode = dataStruct.currentNode[currentChar]
return
#==========================================================================================
# search functions
#===========================================================================================
# this will start the loop to go through all relevant nodes
# when this function is called the seachString has been stored @ datastruct.currentCityName
# also the maximum number of search results is stored @ resultStruct.maxResults
retrieveSearchResults = ->
log 'retrieveSearchResults'
#reset all relevant parameters
dataStruct.currentNode = dataStruct.rootNode
dataStruct.currentCharIndex = 0
resultStruct.results = []
#iterating over the searchString
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
while currentChar
if dataStruct.currentNode[currentChar]
dataStruct.currentNode = dataStruct.currentNode[currentChar]
else
#when we donot find any we have no results and may end the search
log 'The search String does not fit any results!'
return
#prepare for next round
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#now we have gone through the whole searchString we now should have a search Result
if dataStruct.currentNode.cityCount > resultStruct.maxResults
#We have too many search results, so we send back none
log 'there are too many results!'
return
log "we have " + dataStruct.currentNode.cityCount + " results!"
collectSearchResults()
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# we have one through the whole searchString and the currentNode is @ the beginning of all
# search results
collectSearchResults = ->
log 'collectSearchResults'
currentNode = dataStruct.currentNode
#start recursive depth first search to retreive all search results
for key of currentNode
# skip loop if the property is from prototype
if currentNode.hasOwnProperty(key)
# console.log 'key: \'' + key + '\' / value: \'' + currentNode[key] + '\''
#if there is a cityID this node represents a complete city
if key == '<KEY>' and currentNode[key]
result =
cityID: currentNode.cityID
cityName: currentNode.cityName
resultStruct.results.push result
#every key length 1 is pointing to a child node
if key.length == 1
dataStruct.currentNode = currentNode[key]
collectSearchResults()
return
#endregion
#region exposed functions
stringsearchmodule.doSearch = (searchString, maxResults) ->
log "stringsearchmodule.doSearch " + searchString + ", " + maxResults
dataStruct.currentCityName = searchString
resultStruct.maxResults = maxResults
retrieveSearchResults()
return resultStruct.results
stringsearchmodule.addEntry = (entry) ->
# log "stringsearchmodule.addEntry"
dataStruct.currentEntryToInsert = entry
dataStruct.currentCityID = entry.id
dataStruct.currentCityName = entry.name.toLowerCase() + ', ' + entry.country.toLowerCase()
dataStruct.currentSensitiveCityName = entry.name + ', ' + entry.country
insertCityEntry()
return
#endregion exposed functions
export default stringsearchmodule | true |
stringsearchmodule = {name: "stringsearchmodule"}
#log Switch
log = (arg) ->
if allModules.debugmodule.modulesToDebug["stringsearchmodule"]? then console.log "[stringsearchmodule]: " + arg
return
#region internal variables
dataStruct =
rootNode:
cityCount: 0
cityID: 0
cityName: ''
currentEntryToInsert: null
currentCityName: ''
currentSensitiveCityName: ''
currentCityID: 0
currentCharIndex: 0
currentNode: null
initialized: false
resultStruct =
maxResults: 30
results: []
#endregion
##initialization function -> is automatically being called! ONLY RELY ON DOM AND VARIABLES!! NO PLUGINS NO OHTER INITIALIZATIONS!!
stringsearchmodule.initialize = () ->
log "stringsearchmodule.initialize"
#region internal functions
#==========================================================================================
# insert Data functions
#==========================================================================================
# inserts the current entry which at this point should have been set into the datastructure
insertCityEntry = ->
#start off @ 0
dataStruct.currentCharIndex = 0
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#insert for root entry - cityCount represents the number of cities wich may be reached at this or any child nodes
dataStruct.rootNode.cityCount++
#insert stuff for the next Node
if !dataStruct.rootNode[currentChar]
#create leave(potential node) if there is no appropriate one yet
dataStruct.rootNode[currentChar] = cityCount: 1
else
dataStruct.rootNode[currentChar].cityCount++
#otherwise the city is at or at any child of this leave/node
#we set a new base node for where we continue to insert the parts of the rest of the names
dataStruct.currentNode = dataStruct.rootNode[currentChar]
#insert further nodes
while dataStruct.currentNode
insertNextNode()
#clean datastruct
dataStruct.currentCharIndex = 0
dataStruct.currentNode = null
dataStruct.currentEntryToInsert = null
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# is called for every character of the object to insert the node for it's seach path
insertNextNode = ->
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
###
console.log("____________________________________________");
console.log("City Name: " + dataStruct.currentCityName);
console.log("City ID: " + dataStruct.currentCityID);
console.log("current Char index: " + dataStruct.currentCharIndex);
console.log("current Char: " + currentChar);
console.log("- - - - - - - - - - - - - - - - - - - - - - \n");
###
if !currentChar
# we reached the end
dataStruct.currentNode.cityID = dataStruct.currentCityID
dataStruct.currentNode.cityName = dataStruct.currentSensitiveCityName
dataStruct.currentNode = null
#is the break condition to recognize we're through
return
#create new leave(potential node) if there is no appropriate one yet
if !dataStruct.currentNode[currentChar]
dataStruct.currentNode[currentChar] = cityCount: 1
else
dataStruct.currentNode[currentChar].cityCount++
#progress to next node
dataStruct.currentNode = dataStruct.currentNode[currentChar]
return
#==========================================================================================
# search functions
#===========================================================================================
# this will start the loop to go through all relevant nodes
# when this function is called the seachString has been stored @ datastruct.currentCityName
# also the maximum number of search results is stored @ resultStruct.maxResults
retrieveSearchResults = ->
log 'retrieveSearchResults'
#reset all relevant parameters
dataStruct.currentNode = dataStruct.rootNode
dataStruct.currentCharIndex = 0
resultStruct.results = []
#iterating over the searchString
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
while currentChar
if dataStruct.currentNode[currentChar]
dataStruct.currentNode = dataStruct.currentNode[currentChar]
else
#when we donot find any we have no results and may end the search
log 'The search String does not fit any results!'
return
#prepare for next round
dataStruct.currentCharIndex++
currentChar = dataStruct.currentCityName.charAt(dataStruct.currentCharIndex)
#now we have gone through the whole searchString we now should have a search Result
if dataStruct.currentNode.cityCount > resultStruct.maxResults
#We have too many search results, so we send back none
log 'there are too many results!'
return
log "we have " + dataStruct.currentNode.cityCount + " results!"
collectSearchResults()
return
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# we have one through the whole searchString and the currentNode is @ the beginning of all
# search results
collectSearchResults = ->
log 'collectSearchResults'
currentNode = dataStruct.currentNode
#start recursive depth first search to retreive all search results
for key of currentNode
# skip loop if the property is from prototype
if currentNode.hasOwnProperty(key)
# console.log 'key: \'' + key + '\' / value: \'' + currentNode[key] + '\''
#if there is a cityID this node represents a complete city
if key == 'PI:KEY:<KEY>END_PI' and currentNode[key]
result =
cityID: currentNode.cityID
cityName: currentNode.cityName
resultStruct.results.push result
#every key length 1 is pointing to a child node
if key.length == 1
dataStruct.currentNode = currentNode[key]
collectSearchResults()
return
#endregion
#region exposed functions
stringsearchmodule.doSearch = (searchString, maxResults) ->
log "stringsearchmodule.doSearch " + searchString + ", " + maxResults
dataStruct.currentCityName = searchString
resultStruct.maxResults = maxResults
retrieveSearchResults()
return resultStruct.results
stringsearchmodule.addEntry = (entry) ->
# log "stringsearchmodule.addEntry"
dataStruct.currentEntryToInsert = entry
dataStruct.currentCityID = entry.id
dataStruct.currentCityName = entry.name.toLowerCase() + ', ' + entry.country.toLowerCase()
dataStruct.currentSensitiveCityName = entry.name + ', ' + entry.country
insertCityEntry()
return
#endregion exposed functions
export default stringsearchmodule |
[
{
"context": "escript'\n 'javascript'\n 'coffee'\n]\n\nauthor : 'Michal Srb <xixixao@seznam.cz>'\nhomepage : 'http://gkz.githu",
"end": 388,
"score": 0.9998878240585327,
"start": 378,
"tag": "NAME",
"value": "Michal Srb"
},
{
"context": "javascript'\n 'coffee'\n]\n\nauthor : '... | package.coffee | xixixao/prelude-ls | 5 | name : 'prelude-coffee'
version : '0.1.0'
description : "prelude.coffee is a JavaScript functional programming library. It is written for and in CoffeeScript. It is a mutated clone of LiveScript's prelude.js which in turn is based of Haskell's Prelude module."
keywords : [
'library'
'prelude'
'livescript'
'coffeescript'
'javascript'
'coffee'
]
author : 'Michal Srb <xixixao@seznam.cz>'
homepage : 'http://gkz.github.com/prelude-ls/'
bugs : 'https://github.com/gkz/prelude-ls/issues'
licenses : [
type: 'MIT', url: 'https://raw.github.com/gkz/prelude-ls/master/LICENSE'
]
engines : node: '>= 0.8.0'
files : [
'prelude.js'
'prelude-browser.js'
'prelude-browser-min.js'
'README.md'
'LICENSE'
]
main : './prelude.js'
repository: type: 'git', url: 'git://github.com/xixixao/prelude-coffee.git'
| 192077 | name : 'prelude-coffee'
version : '0.1.0'
description : "prelude.coffee is a JavaScript functional programming library. It is written for and in CoffeeScript. It is a mutated clone of LiveScript's prelude.js which in turn is based of Haskell's Prelude module."
keywords : [
'library'
'prelude'
'livescript'
'coffeescript'
'javascript'
'coffee'
]
author : '<NAME> <<EMAIL>>'
homepage : 'http://gkz.github.com/prelude-ls/'
bugs : 'https://github.com/gkz/prelude-ls/issues'
licenses : [
type: 'MIT', url: 'https://raw.github.com/gkz/prelude-ls/master/LICENSE'
]
engines : node: '>= 0.8.0'
files : [
'prelude.js'
'prelude-browser.js'
'prelude-browser-min.js'
'README.md'
'LICENSE'
]
main : './prelude.js'
repository: type: 'git', url: 'git://github.com/xixixao/prelude-coffee.git'
| true | name : 'prelude-coffee'
version : '0.1.0'
description : "prelude.coffee is a JavaScript functional programming library. It is written for and in CoffeeScript. It is a mutated clone of LiveScript's prelude.js which in turn is based of Haskell's Prelude module."
keywords : [
'library'
'prelude'
'livescript'
'coffeescript'
'javascript'
'coffee'
]
author : 'PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>'
homepage : 'http://gkz.github.com/prelude-ls/'
bugs : 'https://github.com/gkz/prelude-ls/issues'
licenses : [
type: 'MIT', url: 'https://raw.github.com/gkz/prelude-ls/master/LICENSE'
]
engines : node: '>= 0.8.0'
files : [
'prelude.js'
'prelude-browser.js'
'prelude-browser-min.js'
'README.md'
'LICENSE'
]
main : './prelude.js'
repository: type: 'git', url: 'git://github.com/xixixao/prelude-coffee.git'
|
[
{
"context": "# Description:\n# I'm Hikakin\n#\n# Commands:\n# hikakin\n#\n# Author:\n# - toshi",
"end": 30,
"score": 0.9969227313995361,
"start": 23,
"tag": "NAME",
"value": "Hikakin"
},
{
"context": "ikakin\n#\n# Commands:\n# hikakin\n#\n# Author:\n# - toshimaru\n\nmodule... | src/scripts/hikakin.coffee | toshimaru/hubot-hikakin | 0 | # Description:
# I'm Hikakin
#
# Commands:
# hikakin
#
# Author:
# - toshimaru
module.exports = (robot) ->
robot.hear /hikakin/i, (msg) ->
msg.send 'http://i.imgur.com/TRLZYyZ.gif'
| 68970 | # Description:
# I'm <NAME>
#
# Commands:
# hikakin
#
# Author:
# - toshimaru
module.exports = (robot) ->
robot.hear /hikakin/i, (msg) ->
msg.send 'http://i.imgur.com/TRLZYyZ.gif'
| true | # Description:
# I'm PI:NAME:<NAME>END_PI
#
# Commands:
# hikakin
#
# Author:
# - toshimaru
module.exports = (robot) ->
robot.hear /hikakin/i, (msg) ->
msg.send 'http://i.imgur.com/TRLZYyZ.gif'
|
[
{
"context": "e next ###\n# Adapted from https://gist.github.com/paulirish/1579671 which derived from\n# http://paulirish.com",
"end": 77,
"score": 0.9995114207267761,
"start": 68,
"tag": "USERNAME",
"value": "paulirish"
},
{
"context": "t-er-animating\n# requestAnimationFrame polyfi... | js/polyfills/raf.coffee | Neilblaze/mojs | 1 | ### istanbul ignore next ###
# Adapted from https://gist.github.com/paulirish/1579671 which derived from
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by Erik Möller.
# Fixes from Paul Irish, Tino Zijdel, Andrew Mao, Klemen Slavič, Darius Bacon
# MIT license
do ->
'use strict'
vendors = [
'webkit'
'moz'
]
i = 0
w = window
while i < vendors.length and !w.requestAnimationFrame
vp = vendors[i]
w.requestAnimationFrame = w[vp + 'RequestAnimationFrame']
cancel = w[vp + 'CancelAnimationFrame']
w.cancelAnimationFrame = cancel or w[vp + 'CancelRequestAnimationFrame']
++i
isOldBrowser = !w.requestAnimationFrame or !w.cancelAnimationFrame
if /iP(ad|hone|od).*OS 6/.test(w.navigator.userAgent) or isOldBrowser
lastTime = 0
w.requestAnimationFrame = (callback) ->
now = Date.now()
nextTime = Math.max(lastTime + 16, now)
setTimeout (->
callback lastTime = nextTime
return
), nextTime - now
w.cancelAnimationFrame = clearTimeout
return
| 38961 | ### istanbul ignore next ###
# Adapted from https://gist.github.com/paulirish/1579671 which derived from
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by <NAME>.
# Fixes from <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
# MIT license
do ->
'use strict'
vendors = [
'webkit'
'moz'
]
i = 0
w = window
while i < vendors.length and !w.requestAnimationFrame
vp = vendors[i]
w.requestAnimationFrame = w[vp + 'RequestAnimationFrame']
cancel = w[vp + 'CancelAnimationFrame']
w.cancelAnimationFrame = cancel or w[vp + 'CancelRequestAnimationFrame']
++i
isOldBrowser = !w.requestAnimationFrame or !w.cancelAnimationFrame
if /iP(ad|hone|od).*OS 6/.test(w.navigator.userAgent) or isOldBrowser
lastTime = 0
w.requestAnimationFrame = (callback) ->
now = Date.now()
nextTime = Math.max(lastTime + 16, now)
setTimeout (->
callback lastTime = nextTime
return
), nextTime - now
w.cancelAnimationFrame = clearTimeout
return
| true | ### istanbul ignore next ###
# Adapted from https://gist.github.com/paulirish/1579671 which derived from
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by PI:NAME:<NAME>END_PI.
# Fixes from PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
# MIT license
do ->
'use strict'
vendors = [
'webkit'
'moz'
]
i = 0
w = window
while i < vendors.length and !w.requestAnimationFrame
vp = vendors[i]
w.requestAnimationFrame = w[vp + 'RequestAnimationFrame']
cancel = w[vp + 'CancelAnimationFrame']
w.cancelAnimationFrame = cancel or w[vp + 'CancelRequestAnimationFrame']
++i
isOldBrowser = !w.requestAnimationFrame or !w.cancelAnimationFrame
if /iP(ad|hone|od).*OS 6/.test(w.navigator.userAgent) or isOldBrowser
lastTime = 0
w.requestAnimationFrame = (callback) ->
now = Date.now()
nextTime = Math.max(lastTime + 16, now)
setTimeout (->
callback lastTime = nextTime
return
), nextTime - now
w.cancelAnimationFrame = clearTimeout
return
|
[
{
"context": "4*364*10\n @_username = @get_chain().user or \"tester_ralph\"\n @_uid = @get_chain().uid or username_to_ui",
"end": 7271,
"score": 0.999404788017273,
"start": 7259,
"tag": "USERNAME",
"value": "tester_ralph"
},
{
"context": "{obj, required}, cb) ->\n userid... | src/forge.iced | keybase/node-test-sigchain | 6 |
{make_esc} = require 'iced-error'
{athrow,akatch,unix_time} = require('iced-utils').util
kbpgp = require 'kbpgp'
proofs = require 'keybase-proofs'
constants = proofs.constants
{prng,createHash} = require 'crypto'
btcjs = require 'keybase-bitcoinjs-lib'
pgp_utils = require('pgp-utils')
{json_stringify_sorted} = pgp_utils.util
#===================================================
UID_HEX_LEN = 32
UID_SUFFIX = "19"
username_to_uid = (un) ->
hashlen = UID_HEX_LEN - 2
return createHash('sha256').update(un).digest('hex').slice(0, hashlen) + UID_SUFFIX
#===================================================
add_client = (arg) ->
arg.client = {
version : "5.2.30"
name : "keybase.io go client"
}
arg
#===================================================
# most of this copy-pasted from keybase-proofs, but I didn't want to
# introduce this code into that repo, since it's only for crafting
# malicious proofs -- MK 2017/4/3
generate_v2_with_corruption = ({links,proof, opts, hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
await proof._v_generate {}, esc defer()
generate_inner_arg = { version : 2 }
hooks.pre_generate_inner? { generate_inner_arg }
await proof.generate_json generate_inner_arg, esc defer s, o
inner = { str : s, obj : o }
hooks.pre_generate_outer? { proof, inner }
await proof.generate_outer { inner }, esc defer outer, outer_unpacked
res = {}
hooks.post_generate_outer? { links, proof, outer, inner, res }
outer = res.outer if res.outer?
await proof.sig_eng.box outer, esc(defer({pgp, raw, armored})), { dohash : true }
hooks.corrupt_box? { inner, outer, pgp, raw, armored }
{short_id, id} = proofs.make_ids raw
out = { inner, outer, pgp, raw, armored, short_id, id, links, outer_unpacked }
hooks.corrupt_ids? out
cb null, out
#===================================================
# Unlike v2 corrupt hooks, these hooks only accept the object, not the
# json str for obj/str consistency.
generate_v1_with_corruption = ({links,proof,opts,hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
opts = version : constants.versions.sig_v1
await proof._v_generate opts, esc defer()
corrupt_reverse_sig = null
if hooks?.corrupt_for_reverse_signature?
await proof.generate_json opts, esc defer _, json_obj
hooks.corrupt_for_reverse_signature {obj : json_obj}
corrupt_json = json_stringify_sorted json_obj
sigeng = proof.get_new_km().make_sig_eng()
await sigeng.box corrupt_json, esc defer {armored, type}
corrupt_reverse_sig = armored
await proof.generate_json opts, esc defer json, json_obj
if hooks?.corrupt_for_reverse_signature?
json_obj.body[hooks.corrupt_key_section].reverse_sig = corrupt_reverse_sig
json = json_stringify_sorted json_obj
inner = { str : json, obj : json_obj }
await proof.sig_eng.box json, esc(defer({pgp, raw, armored})), { dohash : true}
{short_id, id} = proofs.make_ids raw
out = { pgp, json, id, short_id, raw, armored, inner }
cb null, out
#===================================================
generate_proof = ({links, proof, linkdesc}, cb) ->
if (hooks = linkdesc.corrupt_v2_proof_hooks)?
generate_v2_with_corruption { links, proof, opts : {}, hooks }, cb
else if (hooks = linkdesc.corrupt_v1_proof_hooks)? # v1 hooks
generate_v1_with_corruption { links, proof, opts : {}, hooks }, cb
else
proof.generate_versioned { version : linkdesc.version, dohash : true }, cb
#===================================================
class Key
constructor : ({@km, @expire_in, @ctime, @revoked_at}) ->
get_kid : () -> @km.get_ekid().toString 'hex'
#===================================================
SIG_ID_SUFFIX = "0f"
class Link
constructor : ( {@linkdesc, @proof, @generate_res}) ->
inner_payload_json_str : () -> @generate_res.json or @generate_res.inner.str
get_payload_hash : (enc = 'hex') ->
createHash('sha256').update(@generate_res.outer or @inner_payload_json_str()).digest(enc)
get_sig_id : () -> @generate_res.id + SIG_ID_SUFFIX
to_json_full : () -> {
seqno : @proof.seqno
prev : @proof.prev
sig : @generate_res.armored
payload_hash : @get_payload_hash()
sig_id : @get_sig_id()
payload_json : @inner_payload_json_str()
kid: @proof.sig_eng.get_km().get_ekid().toString("hex")
ctime: @proof.ctime
sig_version : @linkdesc.version
}
to_json : () ->
if (@linkdesc.version is 2) and @linkdesc.stubbed then @to_json_stubbed()
else @to_json_full()
to_json_stubbed : () -> {
s2 : @generate_res.outer.toString('base64')
}
#===================================================
class Keyring
constructor : () ->
@bundles = []
@label = {}
to_json : () ->
# A list of bundles allows most callers to just use the first bundle as the
# eldest key. Tests involving a chain reset will need to know what key
# index they want, but they still won't need to hardcode the eldest key.
# Also callers should be computing KIDs themselves, so they don't need a
# map.
@bundles
#===================================================
exports.Forge = class Forge
#-------------------
constructor : ({@chain}) ->
@_keyring = new Keyring
@_links = []
@_link_tab = {}
@_assertions = []
@_time = 0
@_start = null
@_now = null
@_expire_in = 0
@_seqno = 1
@_prev = null
@_username = null
#-------------------
_compute_now : () ->
@_now = unix_time() unless @_now?
@_now
#-------------------
_get_expire_in : ({obj}) -> (obj.expire_in or @_expire_in)
#-------------------
_make_key : ({km, obj}, cb) ->
esc = make_esc cb, "_make_key"
k = new Key { km, ctime : @_compute_now(), expire_in : @_get_expire_in({obj}) }
await km.export_public { regen: true }, esc defer bundle
@_keyring.bundles.push(bundle)
@_keyring.label[obj.label] = k
cb null, k
#-------------------
_compute_time_or_default : (linkdesc, field) ->
if field?
@_compute_time(field)
else
linkdesc.ctime
#-------------------
_compute_time : (o, advance=false) ->
# Only advance time if `advance` argument is true. We want to only
# advance time when processing links' ctime, not every time we
# deal with a time field, so one link advances time at most one
# time.
ret = if typeof(o) is 'string'
if o is 'now' then @_compute_now()
else if not (m = o.match /^([\+-])?(\d+)$/) then null
else if m[1]?
if m[1] == '+'
tmp = @_compute_now() + parseInt(m[2])
@_now = tmp if advance
else
tmp = @_compute_now() - parseInt(m[2])
tmp
else
tmp = parseInt(m[2])
@_now = tmp if advance
tmp
else if typeof(o) isnt 'object' then null
else if o.sum?
sum = 0
for term in o.sum
sum += @_compute_time(term)
sum
else null
throw new Error "bad time: #{JSON.stringify o}" unless ret?
ret
#-------------------
_init : (cb) ->
try
@_start = if (t = @get_chain().ctime)? then @_compute_time(t, true) else @_compute_now()
@_expire_in = @get_chain().expire_in or 60*60*24*364*10
@_username = @get_chain().user or "tester_ralph"
@_uid = @get_chain().uid or username_to_uid @_username
catch e
err = e
cb err
#-------------------
_forge_link : ({linkdesc}, cb) ->
# Compute time at the very beginning of link forging. Other
# parameters of the link might want to use "current time".
linkdesc.ctime = if (t = linkdesc.ctime)? then @_compute_time(t, true) else @_compute_now()
# Use v=1 by default, but allow for v=2 and whatever else
linkdesc.version = if (v = linkdesc.version)? then v else 1
switch linkdesc.type
when 'eldest' then @_forge_eldest_link {linkdesc}, cb
when 'subkey' then @_forge_subkey_link {linkdesc}, cb
when 'sibkey' then @_forge_sibkey_link {linkdesc}, cb
when 'revoke' then @_forge_revoke_link {linkdesc}, cb
when 'track' then @_forge_track_link {linkdesc}, cb
when 'pgp_update' then @_forge_pgp_update_link {linkdesc}, cb
when 'btc' then @_forge_btc_link {linkdesc}, cb
when 'per_user_key' then @_forge_per_user_key {linkdesc}, cb
else cb (new Error "unhandled link type: #{linkdesc.type}"), null
#-------------------
_gen_key : ({obj, required}, cb) ->
userid = obj.userid or @_username
esc = make_esc cb, "_gen_key"
if (typ = obj.key?.gen)?
switch typ
when 'eddsa'
await kbpgp.kb.KeyManager.generate {}, esc defer km
when 'dh'
await kbpgp.kb.EncKeyManager.generate {}, esc defer km
when 'pgp_rsa'
await kbpgp.KeyManager.generate_rsa { userid : userid }, esc defer km
await km.sign {}, esc defer()
when 'pgp_ecc'
t = @_compute_time_or_default obj, obj.key.generated
await kbpgp.KeyManager.generate_ecc { userid : userid, generated: t, expire_in: { primary: obj.key.expire_in } }, esc defer km
await km.sign {}, esc defer()
else
await athrow (new Error "unknown key type: #{typ}"), defer()
else if required
await athrow (new Error "Required to generate key but none found"), defer()
key = null
if km?
await @_make_key {km, obj}, esc defer key
cb null, key
#-------------------
_populate_proof : ({linkdesc, proof}) ->
proof.seqno = linkdesc.seqno or @_seqno++
proof.prev = linkdesc.prev or @_prev
proof.host = "keybase.io"
proof.user =
local :
uid : linkdesc.uid or @_uid
username : linkdesc.username or @_username
proof.seq_type = proofs.constants.seq_types.PUBLIC
proof.ctime = linkdesc.ctime # Was already converted to "real time" in _forge_link
proof.expire_in = @_get_expire_in { obj : linkdesc }
proof.ignore_if_unsupported = linkdesc.ignore_if_unsupported
#-------------------
_forge_eldest_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_eldest_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
proof = new proofs.Eldest add_client {
sig_eng : key.km.make_sig_eng()
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
@_eldest_kid = key.km.get_ekid().toString 'hex'
cb null
#-------------------
_forge_subkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_subkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
parent = @_keyring.label[(ref = linkdesc.parent)]
unless parent?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
subkm : key.km
sig_eng : parent.km.make_sig_eng()
parent_kid : parent.km.get_ekid().toString 'hex'
eldest_kid : @_eldest_kid
}
proof = new proofs.Subkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_sibkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sibkm : key.km
sig_eng : signer.km.make_sig_eng()
eldest_kid : @_eldest_kid
}
proof = new proofs.Sibkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_track_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
proof = new proofs.Track {
eldest_kid : @_eldest_kid
sig_eng : signer.km.make_sig_eng()
track : {"basics":{"id_version":1,"last_id_change":1424384373,"username":"t_doug"},"id":"c4c565570e7e87cafd077509abf5f619","key":{"key_fingerprint":"23f9d8552c5d419976a8efdac11869d5bc47825f","kid":"0101bdda803b93cd728b21c588c77549e5dca960d4bcc589b4b80162ecc82f3c283b0a"},"pgp_keys":[{"key_fingerprint":"23f9d8552c5d419976a8efdac11869d5bc47825f","kid":"0101bdda803b93cd728b21c588c77549e5dca960d4bcc589b4b80162ecc82f3c283b0a"}],"remote_proofs":[],"seq_tail":null}
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_btc_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sig_eng : signer.km.make_sig_eng()
cryptocurrency :
type : "bitcoin"
address : (new btcjs.Address prng(20), 0).toBase58Check()
eldest_kid : @_eldest_kid
}
revoke = {}
if linkdesc.revoke?
await @_forge_revoke_section { revoke, linkdesc }, esc defer()
arg.revoke = revoke
proof = new proofs.Cryptocurrency add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
revoke = {}
args = {
sig_eng : signer.km.make_sig_eng(),
eldest_kid : @_eldest_kid
revoke
}
if (raw = linkdesc.revoke.raw)?
args.revoke = raw
else
await @_forge_revoke_section { linkdesc, revoke }, esc defer()
proof = new proofs.Revoke add_client args
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_section : ({linkdesc, revoke}, cb) ->
err = null
errs = []
if (key = linkdesc.revoke.key)?
unless (revoke.kid = @_keyring.label[key]?.get_kid())?
err = new Error "Cannot find key '#{key}' to revoke in link '#{linkdesc.label}'"
else if (arr = linkdesc.revoke.keys)?
revoke.kids = []
for a in arr
if (k = @_keyring.label[a]?.get_kid())?
revoke.kids.push k
else
errs.push "Failed to find revoke key '#{a}' in link '#{linkdesc.label}'"
else if (label = linkdesc.revoke.sig)?
unless (revoke.sig_id = @_link_tab[label]?.get_sig_id())?
err = new Error "Cannot find sig '#{label}' in link '#{linkdesc.label}'"
else if (sigs = linkdesc.revoke.sigs)?
revoke.sig_ids = []
for label in sigs
if (id = @_link_tab[label]?.get_sig_id())?
revoke.sig_ids.push id
else
errs.push "Failed to find sig '#{label}' in link '#{linkdesc.label}'"
if errs.length
err = new Error errs.join "; "
cb err
#-------------------
_forge_pgp_update_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_pgp_update_link"
key = @_keyring.label[linkdesc.pgp_update_key]
proof = new proofs.PGPUpdate {
sig_eng : @_keyring.label[linkdesc.signer].km.make_sig_eng()
pgpkm : key.km
eldest_kid : @_eldest_kid
}
# Remember current ekid to compare after updates. Our updates
# should not change ekid.
old_ekid = key.km.get_ekid()
lifespan = key.km.primary.lifespan
lifespan.expire_in = linkdesc.key_expire_in
lifespan.generated = @_compute_time linkdesc.generated if linkdesc.generated?
if uid = linkdesc.userid
key.km.userids[0] = new kbpgp.opkts.UserID(uid)
key.km.clear_pgp_internal_sigs()
await key.km.sign {}, esc defer()
await @_make_key { obj: linkdesc, km: key.km }, esc defer key
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
unless key.km.get_ekid().equals(old_ekid)
await athrow new Error('update failed : different ekid'), esc defer()
cb null
#-------------------
_forge_per_user_key : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_per_user_key"
signer = @_keyring.label[(ref = linkdesc.signer)]
await @_gen_key { obj: {label: linkdesc.label + '_enc', key: {gen: 'dh'}}, required: true}, esc defer ekm
await @_gen_key { obj: {label: linkdesc.label + '_sig', key: {gen: 'eddsa'}}, required: true}, esc defer skm
arg =
user :
local :
uid : @_uid
username : @_username
host : "keybase.io"
sig_eng : signer.km.make_sig_eng()
seqno : 0
prev : null
arg.kms =
encryption : ekm.km
signing : skm.km
arg.generation = 1
proof = new proofs.PerUserKey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_sign_and_commit_link : ({linkdesc, proof}, cb) ->
esc = make_esc cb, "_sign_and_commit_link"
@_populate_proof { linkdesc, proof }
await generate_proof { links: @_link_tab, proof, linkdesc }, esc defer generate_res
link = new Link { linkdesc, proof, generate_res }
@_prev = link.get_payload_hash()
@_links.push link
@_link_tab[linkdesc.label] = link
cb null
#-------------------
get_chain : () -> @chain
#-------------------
forge : (cb) ->
esc = make_esc cb, "Forge::forge"
await @_init esc defer()
if @chain.keys?
for name, parts of @chain.keys
if parts.gen
await @_gen_key { obj: parts.gen }, esc defer()
else
await kbpgp.KeyManager.import_from_armored_pgp { armored : parts.public }, esc defer km
await km.merge_pgp_private { armored : parts.private }, esc defer()
k = new Key { km, ctime : @_compute_now(), expire_in : @_expire_in }
@_keyring.bundles.push parts.public
@_keyring.label[name] = k
for linkdesc in @get_chain().links
await @_forge_link { linkdesc }, esc defer out
label_kids = {}
for label, key of @_keyring.label
label_kids[label] = key.km.get_ekid().toString "hex"
label_sigs = {}
for label, link of @_link_tab
label_sigs[label] = link.get_sig_id()
ret =
chain : (link.to_json() for link in @_links)
keys : @_keyring.to_json()
uid : @_uid
username : @_username
label_kids : label_kids
label_sigs : label_sigs
cb null, ret
#===================================================
| 158456 |
{make_esc} = require 'iced-error'
{athrow,akatch,unix_time} = require('iced-utils').util
kbpgp = require 'kbpgp'
proofs = require 'keybase-proofs'
constants = proofs.constants
{prng,createHash} = require 'crypto'
btcjs = require 'keybase-bitcoinjs-lib'
pgp_utils = require('pgp-utils')
{json_stringify_sorted} = pgp_utils.util
#===================================================
UID_HEX_LEN = 32
UID_SUFFIX = "19"
username_to_uid = (un) ->
hashlen = UID_HEX_LEN - 2
return createHash('sha256').update(un).digest('hex').slice(0, hashlen) + UID_SUFFIX
#===================================================
add_client = (arg) ->
arg.client = {
version : "5.2.30"
name : "keybase.io go client"
}
arg
#===================================================
# most of this copy-pasted from keybase-proofs, but I didn't want to
# introduce this code into that repo, since it's only for crafting
# malicious proofs -- MK 2017/4/3
generate_v2_with_corruption = ({links,proof, opts, hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
await proof._v_generate {}, esc defer()
generate_inner_arg = { version : 2 }
hooks.pre_generate_inner? { generate_inner_arg }
await proof.generate_json generate_inner_arg, esc defer s, o
inner = { str : s, obj : o }
hooks.pre_generate_outer? { proof, inner }
await proof.generate_outer { inner }, esc defer outer, outer_unpacked
res = {}
hooks.post_generate_outer? { links, proof, outer, inner, res }
outer = res.outer if res.outer?
await proof.sig_eng.box outer, esc(defer({pgp, raw, armored})), { dohash : true }
hooks.corrupt_box? { inner, outer, pgp, raw, armored }
{short_id, id} = proofs.make_ids raw
out = { inner, outer, pgp, raw, armored, short_id, id, links, outer_unpacked }
hooks.corrupt_ids? out
cb null, out
#===================================================
# Unlike v2 corrupt hooks, these hooks only accept the object, not the
# json str for obj/str consistency.
generate_v1_with_corruption = ({links,proof,opts,hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
opts = version : constants.versions.sig_v1
await proof._v_generate opts, esc defer()
corrupt_reverse_sig = null
if hooks?.corrupt_for_reverse_signature?
await proof.generate_json opts, esc defer _, json_obj
hooks.corrupt_for_reverse_signature {obj : json_obj}
corrupt_json = json_stringify_sorted json_obj
sigeng = proof.get_new_km().make_sig_eng()
await sigeng.box corrupt_json, esc defer {armored, type}
corrupt_reverse_sig = armored
await proof.generate_json opts, esc defer json, json_obj
if hooks?.corrupt_for_reverse_signature?
json_obj.body[hooks.corrupt_key_section].reverse_sig = corrupt_reverse_sig
json = json_stringify_sorted json_obj
inner = { str : json, obj : json_obj }
await proof.sig_eng.box json, esc(defer({pgp, raw, armored})), { dohash : true}
{short_id, id} = proofs.make_ids raw
out = { pgp, json, id, short_id, raw, armored, inner }
cb null, out
#===================================================
generate_proof = ({links, proof, linkdesc}, cb) ->
if (hooks = linkdesc.corrupt_v2_proof_hooks)?
generate_v2_with_corruption { links, proof, opts : {}, hooks }, cb
else if (hooks = linkdesc.corrupt_v1_proof_hooks)? # v1 hooks
generate_v1_with_corruption { links, proof, opts : {}, hooks }, cb
else
proof.generate_versioned { version : linkdesc.version, dohash : true }, cb
#===================================================
class Key
constructor : ({@km, @expire_in, @ctime, @revoked_at}) ->
get_kid : () -> @km.get_ekid().toString 'hex'
#===================================================
SIG_ID_SUFFIX = "0f"
class Link
constructor : ( {@linkdesc, @proof, @generate_res}) ->
inner_payload_json_str : () -> @generate_res.json or @generate_res.inner.str
get_payload_hash : (enc = 'hex') ->
createHash('sha256').update(@generate_res.outer or @inner_payload_json_str()).digest(enc)
get_sig_id : () -> @generate_res.id + SIG_ID_SUFFIX
to_json_full : () -> {
seqno : @proof.seqno
prev : @proof.prev
sig : @generate_res.armored
payload_hash : @get_payload_hash()
sig_id : @get_sig_id()
payload_json : @inner_payload_json_str()
kid: @proof.sig_eng.get_km().get_ekid().toString("hex")
ctime: @proof.ctime
sig_version : @linkdesc.version
}
to_json : () ->
if (@linkdesc.version is 2) and @linkdesc.stubbed then @to_json_stubbed()
else @to_json_full()
to_json_stubbed : () -> {
s2 : @generate_res.outer.toString('base64')
}
#===================================================
class Keyring
constructor : () ->
@bundles = []
@label = {}
to_json : () ->
# A list of bundles allows most callers to just use the first bundle as the
# eldest key. Tests involving a chain reset will need to know what key
# index they want, but they still won't need to hardcode the eldest key.
# Also callers should be computing KIDs themselves, so they don't need a
# map.
@bundles
#===================================================
exports.Forge = class Forge
#-------------------
constructor : ({@chain}) ->
@_keyring = new Keyring
@_links = []
@_link_tab = {}
@_assertions = []
@_time = 0
@_start = null
@_now = null
@_expire_in = 0
@_seqno = 1
@_prev = null
@_username = null
#-------------------
_compute_now : () ->
@_now = unix_time() unless @_now?
@_now
#-------------------
_get_expire_in : ({obj}) -> (obj.expire_in or @_expire_in)
#-------------------
_make_key : ({km, obj}, cb) ->
esc = make_esc cb, "_make_key"
k = new Key { km, ctime : @_compute_now(), expire_in : @_get_expire_in({obj}) }
await km.export_public { regen: true }, esc defer bundle
@_keyring.bundles.push(bundle)
@_keyring.label[obj.label] = k
cb null, k
#-------------------
_compute_time_or_default : (linkdesc, field) ->
if field?
@_compute_time(field)
else
linkdesc.ctime
#-------------------
_compute_time : (o, advance=false) ->
# Only advance time if `advance` argument is true. We want to only
# advance time when processing links' ctime, not every time we
# deal with a time field, so one link advances time at most one
# time.
ret = if typeof(o) is 'string'
if o is 'now' then @_compute_now()
else if not (m = o.match /^([\+-])?(\d+)$/) then null
else if m[1]?
if m[1] == '+'
tmp = @_compute_now() + parseInt(m[2])
@_now = tmp if advance
else
tmp = @_compute_now() - parseInt(m[2])
tmp
else
tmp = parseInt(m[2])
@_now = tmp if advance
tmp
else if typeof(o) isnt 'object' then null
else if o.sum?
sum = 0
for term in o.sum
sum += @_compute_time(term)
sum
else null
throw new Error "bad time: #{JSON.stringify o}" unless ret?
ret
#-------------------
_init : (cb) ->
try
@_start = if (t = @get_chain().ctime)? then @_compute_time(t, true) else @_compute_now()
@_expire_in = @get_chain().expire_in or 60*60*24*364*10
@_username = @get_chain().user or "tester_ralph"
@_uid = @get_chain().uid or username_to_uid @_username
catch e
err = e
cb err
#-------------------
_forge_link : ({linkdesc}, cb) ->
# Compute time at the very beginning of link forging. Other
# parameters of the link might want to use "current time".
linkdesc.ctime = if (t = linkdesc.ctime)? then @_compute_time(t, true) else @_compute_now()
# Use v=1 by default, but allow for v=2 and whatever else
linkdesc.version = if (v = linkdesc.version)? then v else 1
switch linkdesc.type
when 'eldest' then @_forge_eldest_link {linkdesc}, cb
when 'subkey' then @_forge_subkey_link {linkdesc}, cb
when 'sibkey' then @_forge_sibkey_link {linkdesc}, cb
when 'revoke' then @_forge_revoke_link {linkdesc}, cb
when 'track' then @_forge_track_link {linkdesc}, cb
when 'pgp_update' then @_forge_pgp_update_link {linkdesc}, cb
when 'btc' then @_forge_btc_link {linkdesc}, cb
when 'per_user_key' then @_forge_per_user_key {linkdesc}, cb
else cb (new Error "unhandled link type: #{linkdesc.type}"), null
#-------------------
_gen_key : ({obj, required}, cb) ->
userid = obj.userid or @_username
esc = make_esc cb, "_gen_key"
if (typ = obj.key?.gen)?
switch typ
when 'eddsa'
await kbpgp.kb.KeyManager.generate {}, esc defer km
when 'dh'
await kbpgp.kb.EncKeyManager.generate {}, esc defer km
when 'pgp_rsa'
await kbpgp.KeyManager.generate_rsa { userid : userid }, esc defer km
await km.sign {}, esc defer()
when 'pgp_ecc'
t = @_compute_time_or_default obj, obj.key.generated
await kbpgp.KeyManager.generate_ecc { userid : userid, generated: t, expire_in: { primary: obj.key.expire_in } }, esc defer km
await km.sign {}, esc defer()
else
await athrow (new Error "unknown key type: #{typ}"), defer()
else if required
await athrow (new Error "Required to generate key but none found"), defer()
key = null
if km?
await @_make_key {km, obj}, esc defer key
cb null, key
#-------------------
_populate_proof : ({linkdesc, proof}) ->
proof.seqno = linkdesc.seqno or @_seqno++
proof.prev = linkdesc.prev or @_prev
proof.host = "keybase.io"
proof.user =
local :
uid : linkdesc.uid or @_uid
username : linkdesc.username or @_username
proof.seq_type = proofs.constants.seq_types.PUBLIC
proof.ctime = linkdesc.ctime # Was already converted to "real time" in _forge_link
proof.expire_in = @_get_expire_in { obj : linkdesc }
proof.ignore_if_unsupported = linkdesc.ignore_if_unsupported
#-------------------
_forge_eldest_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_eldest_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
proof = new proofs.Eldest add_client {
sig_eng : key.km.make_sig_eng()
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
@_eldest_kid = key.km.get_ekid().toString 'hex'
cb null
#-------------------
_forge_subkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_subkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
parent = @_keyring.label[(ref = linkdesc.parent)]
unless parent?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
subkm : key.km
sig_eng : parent.km.make_sig_eng()
parent_kid : parent.km.get_ekid().toString 'hex'
eldest_kid : @_eldest_kid
}
proof = new proofs.Subkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_sibkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sibkm : key.km
sig_eng : signer.km.make_sig_eng()
eldest_kid : @_eldest_kid
}
proof = new proofs.Sibkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_track_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
proof = new proofs.Track {
eldest_kid : @_eldest_kid
sig_eng : signer.km.make_sig_eng()
track : {"basics":{"id_version":1,"last_id_change":1424384373,"username":"t_doug"},"id":"c4c565570e7e87cafd077509abf5f619","key":{"<KEY> <KEY>","<KEY>":"<KEY>"},"pgp_keys":[{"<KEY> <KEY>","<KEY>":"<KEY>"}],"remote_proofs":[],"seq_tail":null}
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_btc_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sig_eng : signer.km.make_sig_eng()
cryptocurrency :
type : "bitcoin"
address : (new btcjs.Address prng(20), 0).toBase58Check()
eldest_kid : @_eldest_kid
}
revoke = {}
if linkdesc.revoke?
await @_forge_revoke_section { revoke, linkdesc }, esc defer()
arg.revoke = revoke
proof = new proofs.Cryptocurrency add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
revoke = {}
args = {
sig_eng : signer.km.make_sig_eng(),
eldest_kid : @_eldest_kid
revoke
}
if (raw = linkdesc.revoke.raw)?
args.revoke = raw
else
await @_forge_revoke_section { linkdesc, revoke }, esc defer()
proof = new proofs.Revoke add_client args
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_section : ({linkdesc, revoke}, cb) ->
err = null
errs = []
if (key = linkdesc.revoke.key)?
unless (revoke.kid = @_keyring.label[key]?.get_kid())?
err = new Error "Cannot find key '#{key}' to revoke in link '#{linkdesc.label}'"
else if (arr = linkdesc.revoke.keys)?
revoke.kids = []
for a in arr
if (k = @_keyring.label[a]?.get_kid())?
revoke.kids.push k
else
errs.push "Failed to find revoke key '#{a}' in link '#{linkdesc.label}'"
else if (label = linkdesc.revoke.sig)?
unless (revoke.sig_id = @_link_tab[label]?.get_sig_id())?
err = new Error "Cannot find sig '#{label}' in link '#{linkdesc.label}'"
else if (sigs = linkdesc.revoke.sigs)?
revoke.sig_ids = []
for label in sigs
if (id = @_link_tab[label]?.get_sig_id())?
revoke.sig_ids.push id
else
errs.push "Failed to find sig '#{label}' in link '#{linkdesc.label}'"
if errs.length
err = new Error errs.join "; "
cb err
#-------------------
_forge_pgp_update_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_pgp_update_link"
key = @_keyring.label[linkdesc.pgp_update_key]
proof = new proofs.PGPUpdate {
sig_eng : @_keyring.label[linkdesc.signer].km.make_sig_eng()
pgpkm : key.km
eldest_kid : @_eldest_kid
}
# Remember current ekid to compare after updates. Our updates
# should not change ekid.
old_ekid = key.km.get_ekid()
lifespan = key.km.primary.lifespan
lifespan.expire_in = linkdesc.key_expire_in
lifespan.generated = @_compute_time linkdesc.generated if linkdesc.generated?
if uid = linkdesc.userid
key.km.userids[0] = new kbpgp.opkts.UserID(uid)
key.km.clear_pgp_internal_sigs()
await key.km.sign {}, esc defer()
await @_make_key { obj: linkdesc, km: key.km }, esc defer key
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
unless key.km.get_ekid().equals(old_ekid)
await athrow new Error('update failed : different ekid'), esc defer()
cb null
#-------------------
_forge_per_user_key : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_per_user_key"
signer = @_keyring.label[(ref = linkdesc.signer)]
await @_gen_key { obj: {label: linkdesc.label + '_enc', key: {gen: 'dh'}}, required: true}, esc defer ekm
await @_gen_key { obj: {label: linkdesc.label + '_sig', key: {gen: '<KEY>'}}, required: true}, esc defer skm
arg =
user :
local :
uid : @_uid
username : @_username
host : "keybase.io"
sig_eng : signer.km.make_sig_eng()
seqno : 0
prev : null
arg.kms =
encryption : ekm.km
signing : skm.km
arg.generation = 1
proof = new proofs.PerUserKey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_sign_and_commit_link : ({linkdesc, proof}, cb) ->
esc = make_esc cb, "_sign_and_commit_link"
@_populate_proof { linkdesc, proof }
await generate_proof { links: @_link_tab, proof, linkdesc }, esc defer generate_res
link = new Link { linkdesc, proof, generate_res }
@_prev = link.get_payload_hash()
@_links.push link
@_link_tab[linkdesc.label] = link
cb null
#-------------------
get_chain : () -> @chain
#-------------------
forge : (cb) ->
esc = make_esc cb, "Forge::forge"
await @_init esc defer()
if @chain.keys?
for name, parts of @chain.keys
if parts.gen
await @_gen_key { obj: parts.gen }, esc defer()
else
await kbpgp.KeyManager.import_from_armored_pgp { armored : parts.public }, esc defer km
await km.merge_pgp_private { armored : parts.private }, esc defer()
k = new Key { km, ctime : @_compute_now(), expire_in : @_expire_in }
@_keyring.bundles.push parts.public
@_keyring.label[name] = k
for linkdesc in @get_chain().links
await @_forge_link { linkdesc }, esc defer out
label_kids = {}
for label, key of @_keyring.label
label_kids[label] = key.km.get_ekid().toString "hex"
label_sigs = {}
for label, link of @_link_tab
label_sigs[label] = link.get_sig_id()
ret =
chain : (link.to_json() for link in @_links)
keys : @_keyring.to_json()
uid : @_uid
username : @_username
label_kids : label_kids
label_sigs : label_sigs
cb null, ret
#===================================================
| true |
{make_esc} = require 'iced-error'
{athrow,akatch,unix_time} = require('iced-utils').util
kbpgp = require 'kbpgp'
proofs = require 'keybase-proofs'
constants = proofs.constants
{prng,createHash} = require 'crypto'
btcjs = require 'keybase-bitcoinjs-lib'
pgp_utils = require('pgp-utils')
{json_stringify_sorted} = pgp_utils.util
#===================================================
UID_HEX_LEN = 32
UID_SUFFIX = "19"
username_to_uid = (un) ->
hashlen = UID_HEX_LEN - 2
return createHash('sha256').update(un).digest('hex').slice(0, hashlen) + UID_SUFFIX
#===================================================
add_client = (arg) ->
arg.client = {
version : "5.2.30"
name : "keybase.io go client"
}
arg
#===================================================
# most of this copy-pasted from keybase-proofs, but I didn't want to
# introduce this code into that repo, since it's only for crafting
# malicious proofs -- MK 2017/4/3
generate_v2_with_corruption = ({links,proof, opts, hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
await proof._v_generate {}, esc defer()
generate_inner_arg = { version : 2 }
hooks.pre_generate_inner? { generate_inner_arg }
await proof.generate_json generate_inner_arg, esc defer s, o
inner = { str : s, obj : o }
hooks.pre_generate_outer? { proof, inner }
await proof.generate_outer { inner }, esc defer outer, outer_unpacked
res = {}
hooks.post_generate_outer? { links, proof, outer, inner, res }
outer = res.outer if res.outer?
await proof.sig_eng.box outer, esc(defer({pgp, raw, armored})), { dohash : true }
hooks.corrupt_box? { inner, outer, pgp, raw, armored }
{short_id, id} = proofs.make_ids raw
out = { inner, outer, pgp, raw, armored, short_id, id, links, outer_unpacked }
hooks.corrupt_ids? out
cb null, out
#===================================================
# Unlike v2 corrupt hooks, these hooks only accept the object, not the
# json str for obj/str consistency.
generate_v1_with_corruption = ({links,proof,opts,hooks}, cb) ->
esc = make_esc cb, "generate"
out = null
opts = version : constants.versions.sig_v1
await proof._v_generate opts, esc defer()
corrupt_reverse_sig = null
if hooks?.corrupt_for_reverse_signature?
await proof.generate_json opts, esc defer _, json_obj
hooks.corrupt_for_reverse_signature {obj : json_obj}
corrupt_json = json_stringify_sorted json_obj
sigeng = proof.get_new_km().make_sig_eng()
await sigeng.box corrupt_json, esc defer {armored, type}
corrupt_reverse_sig = armored
await proof.generate_json opts, esc defer json, json_obj
if hooks?.corrupt_for_reverse_signature?
json_obj.body[hooks.corrupt_key_section].reverse_sig = corrupt_reverse_sig
json = json_stringify_sorted json_obj
inner = { str : json, obj : json_obj }
await proof.sig_eng.box json, esc(defer({pgp, raw, armored})), { dohash : true}
{short_id, id} = proofs.make_ids raw
out = { pgp, json, id, short_id, raw, armored, inner }
cb null, out
#===================================================
generate_proof = ({links, proof, linkdesc}, cb) ->
if (hooks = linkdesc.corrupt_v2_proof_hooks)?
generate_v2_with_corruption { links, proof, opts : {}, hooks }, cb
else if (hooks = linkdesc.corrupt_v1_proof_hooks)? # v1 hooks
generate_v1_with_corruption { links, proof, opts : {}, hooks }, cb
else
proof.generate_versioned { version : linkdesc.version, dohash : true }, cb
#===================================================
class Key
constructor : ({@km, @expire_in, @ctime, @revoked_at}) ->
get_kid : () -> @km.get_ekid().toString 'hex'
#===================================================
SIG_ID_SUFFIX = "0f"
class Link
constructor : ( {@linkdesc, @proof, @generate_res}) ->
inner_payload_json_str : () -> @generate_res.json or @generate_res.inner.str
get_payload_hash : (enc = 'hex') ->
createHash('sha256').update(@generate_res.outer or @inner_payload_json_str()).digest(enc)
get_sig_id : () -> @generate_res.id + SIG_ID_SUFFIX
to_json_full : () -> {
seqno : @proof.seqno
prev : @proof.prev
sig : @generate_res.armored
payload_hash : @get_payload_hash()
sig_id : @get_sig_id()
payload_json : @inner_payload_json_str()
kid: @proof.sig_eng.get_km().get_ekid().toString("hex")
ctime: @proof.ctime
sig_version : @linkdesc.version
}
to_json : () ->
if (@linkdesc.version is 2) and @linkdesc.stubbed then @to_json_stubbed()
else @to_json_full()
to_json_stubbed : () -> {
s2 : @generate_res.outer.toString('base64')
}
#===================================================
class Keyring
constructor : () ->
@bundles = []
@label = {}
to_json : () ->
# A list of bundles allows most callers to just use the first bundle as the
# eldest key. Tests involving a chain reset will need to know what key
# index they want, but they still won't need to hardcode the eldest key.
# Also callers should be computing KIDs themselves, so they don't need a
# map.
@bundles
#===================================================
exports.Forge = class Forge
#-------------------
constructor : ({@chain}) ->
@_keyring = new Keyring
@_links = []
@_link_tab = {}
@_assertions = []
@_time = 0
@_start = null
@_now = null
@_expire_in = 0
@_seqno = 1
@_prev = null
@_username = null
#-------------------
_compute_now : () ->
@_now = unix_time() unless @_now?
@_now
#-------------------
_get_expire_in : ({obj}) -> (obj.expire_in or @_expire_in)
#-------------------
_make_key : ({km, obj}, cb) ->
esc = make_esc cb, "_make_key"
k = new Key { km, ctime : @_compute_now(), expire_in : @_get_expire_in({obj}) }
await km.export_public { regen: true }, esc defer bundle
@_keyring.bundles.push(bundle)
@_keyring.label[obj.label] = k
cb null, k
#-------------------
_compute_time_or_default : (linkdesc, field) ->
if field?
@_compute_time(field)
else
linkdesc.ctime
#-------------------
_compute_time : (o, advance=false) ->
# Only advance time if `advance` argument is true. We want to only
# advance time when processing links' ctime, not every time we
# deal with a time field, so one link advances time at most one
# time.
ret = if typeof(o) is 'string'
if o is 'now' then @_compute_now()
else if not (m = o.match /^([\+-])?(\d+)$/) then null
else if m[1]?
if m[1] == '+'
tmp = @_compute_now() + parseInt(m[2])
@_now = tmp if advance
else
tmp = @_compute_now() - parseInt(m[2])
tmp
else
tmp = parseInt(m[2])
@_now = tmp if advance
tmp
else if typeof(o) isnt 'object' then null
else if o.sum?
sum = 0
for term in o.sum
sum += @_compute_time(term)
sum
else null
throw new Error "bad time: #{JSON.stringify o}" unless ret?
ret
#-------------------
_init : (cb) ->
try
@_start = if (t = @get_chain().ctime)? then @_compute_time(t, true) else @_compute_now()
@_expire_in = @get_chain().expire_in or 60*60*24*364*10
@_username = @get_chain().user or "tester_ralph"
@_uid = @get_chain().uid or username_to_uid @_username
catch e
err = e
cb err
#-------------------
_forge_link : ({linkdesc}, cb) ->
# Compute time at the very beginning of link forging. Other
# parameters of the link might want to use "current time".
linkdesc.ctime = if (t = linkdesc.ctime)? then @_compute_time(t, true) else @_compute_now()
# Use v=1 by default, but allow for v=2 and whatever else
linkdesc.version = if (v = linkdesc.version)? then v else 1
switch linkdesc.type
when 'eldest' then @_forge_eldest_link {linkdesc}, cb
when 'subkey' then @_forge_subkey_link {linkdesc}, cb
when 'sibkey' then @_forge_sibkey_link {linkdesc}, cb
when 'revoke' then @_forge_revoke_link {linkdesc}, cb
when 'track' then @_forge_track_link {linkdesc}, cb
when 'pgp_update' then @_forge_pgp_update_link {linkdesc}, cb
when 'btc' then @_forge_btc_link {linkdesc}, cb
when 'per_user_key' then @_forge_per_user_key {linkdesc}, cb
else cb (new Error "unhandled link type: #{linkdesc.type}"), null
#-------------------
_gen_key : ({obj, required}, cb) ->
userid = obj.userid or @_username
esc = make_esc cb, "_gen_key"
if (typ = obj.key?.gen)?
switch typ
when 'eddsa'
await kbpgp.kb.KeyManager.generate {}, esc defer km
when 'dh'
await kbpgp.kb.EncKeyManager.generate {}, esc defer km
when 'pgp_rsa'
await kbpgp.KeyManager.generate_rsa { userid : userid }, esc defer km
await km.sign {}, esc defer()
when 'pgp_ecc'
t = @_compute_time_or_default obj, obj.key.generated
await kbpgp.KeyManager.generate_ecc { userid : userid, generated: t, expire_in: { primary: obj.key.expire_in } }, esc defer km
await km.sign {}, esc defer()
else
await athrow (new Error "unknown key type: #{typ}"), defer()
else if required
await athrow (new Error "Required to generate key but none found"), defer()
key = null
if km?
await @_make_key {km, obj}, esc defer key
cb null, key
#-------------------
_populate_proof : ({linkdesc, proof}) ->
proof.seqno = linkdesc.seqno or @_seqno++
proof.prev = linkdesc.prev or @_prev
proof.host = "keybase.io"
proof.user =
local :
uid : linkdesc.uid or @_uid
username : linkdesc.username or @_username
proof.seq_type = proofs.constants.seq_types.PUBLIC
proof.ctime = linkdesc.ctime # Was already converted to "real time" in _forge_link
proof.expire_in = @_get_expire_in { obj : linkdesc }
proof.ignore_if_unsupported = linkdesc.ignore_if_unsupported
#-------------------
_forge_eldest_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_eldest_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
proof = new proofs.Eldest add_client {
sig_eng : key.km.make_sig_eng()
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
@_eldest_kid = key.km.get_ekid().toString 'hex'
cb null
#-------------------
_forge_subkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_subkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
parent = @_keyring.label[(ref = linkdesc.parent)]
unless parent?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
subkm : key.km
sig_eng : parent.km.make_sig_eng()
parent_kid : parent.km.get_ekid().toString 'hex'
eldest_kid : @_eldest_kid
}
proof = new proofs.Subkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_sibkey_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
await @_gen_key { obj : linkdesc, required : true }, esc defer key
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sibkm : key.km
sig_eng : signer.km.make_sig_eng()
eldest_kid : @_eldest_kid
}
proof = new proofs.Sibkey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_track_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
proof = new proofs.Track {
eldest_kid : @_eldest_kid
sig_eng : signer.km.make_sig_eng()
track : {"basics":{"id_version":1,"last_id_change":1424384373,"username":"t_doug"},"id":"c4c565570e7e87cafd077509abf5f619","key":{"PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI","PI:KEY:<KEY>END_PI":"PI:KEY:<KEY>END_PI"},"pgp_keys":[{"PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI","PI:KEY:<KEY>END_PI":"PI:KEY:<KEY>END_PI"}],"remote_proofs":[],"seq_tail":null}
}
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_btc_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown signer '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
arg = {
sig_eng : signer.km.make_sig_eng()
cryptocurrency :
type : "bitcoin"
address : (new btcjs.Address prng(20), 0).toBase58Check()
eldest_kid : @_eldest_kid
}
revoke = {}
if linkdesc.revoke?
await @_forge_revoke_section { revoke, linkdesc }, esc defer()
arg.revoke = revoke
proof = new proofs.Cryptocurrency add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_sibkey_link"
signer = @_keyring.label[(ref = linkdesc.signer)]
unless signer?
err = new Error "Unknown parent '#{ref}' in link '#{linkdesc.label}'"
await athrow err, esc defer()
revoke = {}
args = {
sig_eng : signer.km.make_sig_eng(),
eldest_kid : @_eldest_kid
revoke
}
if (raw = linkdesc.revoke.raw)?
args.revoke = raw
else
await @_forge_revoke_section { linkdesc, revoke }, esc defer()
proof = new proofs.Revoke add_client args
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_forge_revoke_section : ({linkdesc, revoke}, cb) ->
err = null
errs = []
if (key = linkdesc.revoke.key)?
unless (revoke.kid = @_keyring.label[key]?.get_kid())?
err = new Error "Cannot find key '#{key}' to revoke in link '#{linkdesc.label}'"
else if (arr = linkdesc.revoke.keys)?
revoke.kids = []
for a in arr
if (k = @_keyring.label[a]?.get_kid())?
revoke.kids.push k
else
errs.push "Failed to find revoke key '#{a}' in link '#{linkdesc.label}'"
else if (label = linkdesc.revoke.sig)?
unless (revoke.sig_id = @_link_tab[label]?.get_sig_id())?
err = new Error "Cannot find sig '#{label}' in link '#{linkdesc.label}'"
else if (sigs = linkdesc.revoke.sigs)?
revoke.sig_ids = []
for label in sigs
if (id = @_link_tab[label]?.get_sig_id())?
revoke.sig_ids.push id
else
errs.push "Failed to find sig '#{label}' in link '#{linkdesc.label}'"
if errs.length
err = new Error errs.join "; "
cb err
#-------------------
_forge_pgp_update_link : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_pgp_update_link"
key = @_keyring.label[linkdesc.pgp_update_key]
proof = new proofs.PGPUpdate {
sig_eng : @_keyring.label[linkdesc.signer].km.make_sig_eng()
pgpkm : key.km
eldest_kid : @_eldest_kid
}
# Remember current ekid to compare after updates. Our updates
# should not change ekid.
old_ekid = key.km.get_ekid()
lifespan = key.km.primary.lifespan
lifespan.expire_in = linkdesc.key_expire_in
lifespan.generated = @_compute_time linkdesc.generated if linkdesc.generated?
if uid = linkdesc.userid
key.km.userids[0] = new kbpgp.opkts.UserID(uid)
key.km.clear_pgp_internal_sigs()
await key.km.sign {}, esc defer()
await @_make_key { obj: linkdesc, km: key.km }, esc defer key
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
unless key.km.get_ekid().equals(old_ekid)
await athrow new Error('update failed : different ekid'), esc defer()
cb null
#-------------------
_forge_per_user_key : ({linkdesc}, cb) ->
esc = make_esc cb, "_forge_per_user_key"
signer = @_keyring.label[(ref = linkdesc.signer)]
await @_gen_key { obj: {label: linkdesc.label + '_enc', key: {gen: 'dh'}}, required: true}, esc defer ekm
await @_gen_key { obj: {label: linkdesc.label + '_sig', key: {gen: 'PI:KEY:<KEY>END_PI'}}, required: true}, esc defer skm
arg =
user :
local :
uid : @_uid
username : @_username
host : "keybase.io"
sig_eng : signer.km.make_sig_eng()
seqno : 0
prev : null
arg.kms =
encryption : ekm.km
signing : skm.km
arg.generation = 1
proof = new proofs.PerUserKey add_client arg
await @_sign_and_commit_link { linkdesc, proof }, esc defer()
cb null
#-------------------
_sign_and_commit_link : ({linkdesc, proof}, cb) ->
esc = make_esc cb, "_sign_and_commit_link"
@_populate_proof { linkdesc, proof }
await generate_proof { links: @_link_tab, proof, linkdesc }, esc defer generate_res
link = new Link { linkdesc, proof, generate_res }
@_prev = link.get_payload_hash()
@_links.push link
@_link_tab[linkdesc.label] = link
cb null
#-------------------
get_chain : () -> @chain
#-------------------
forge : (cb) ->
esc = make_esc cb, "Forge::forge"
await @_init esc defer()
if @chain.keys?
for name, parts of @chain.keys
if parts.gen
await @_gen_key { obj: parts.gen }, esc defer()
else
await kbpgp.KeyManager.import_from_armored_pgp { armored : parts.public }, esc defer km
await km.merge_pgp_private { armored : parts.private }, esc defer()
k = new Key { km, ctime : @_compute_now(), expire_in : @_expire_in }
@_keyring.bundles.push parts.public
@_keyring.label[name] = k
for linkdesc in @get_chain().links
await @_forge_link { linkdesc }, esc defer out
label_kids = {}
for label, key of @_keyring.label
label_kids[label] = key.km.get_ekid().toString "hex"
label_sigs = {}
for label, link of @_link_tab
label_sigs[label] = link.get_sig_id()
ret =
chain : (link.to_json() for link in @_links)
keys : @_keyring.to_json()
uid : @_uid
username : @_username
label_kids : label_kids
label_sigs : label_sigs
cb null, ret
#===================================================
|
[
{
"context": "kg, \"package.json\"), JSON.stringify(\n author: \"Evan Lucas\"\n name: \"version-no-tags-test\"\n version: \"0",
"end": 174,
"score": 0.9998687505722046,
"start": 164,
"tag": "NAME",
"value": "Evan Lucas"
}
] | deps/npm/test/tap/version-no-tags.coffee | lxe/io.coffee | 0 |
# windows fix for locked files
setup = ->
mkdirp.sync pkg
mkdirp.sync cache
fs.writeFileSync path.resolve(pkg, "package.json"), JSON.stringify(
author: "Evan Lucas"
name: "version-no-tags-test"
version: "0.0.0"
description: "Test for git-tag-version flag"
), "utf8"
process.chdir pkg
return
common = require("../common-tap.js")
test = require("tap").test
npm = require("../../")
osenv = require("osenv")
path = require("path")
fs = require("fs")
rimraf = require("rimraf")
mkdirp = require("mkdirp")
which = require("which")
spawn = require("child_process").spawn
pkg = path.resolve(__dirname, "version-no-tags")
cache = path.resolve(pkg, "cache")
test "npm version <semver> without git tag", (t) ->
setup()
npm.load
cache: cache
registry: common.registry
, ->
which "git", (err, git) ->
tagExists = (tag, _cb) ->
child1 = spawn(git, [
"tag"
"-l"
tag
])
out = ""
child1.stdout.on "data", (d) ->
out += d.toString()
return
child1.on "exit", ->
_cb null, Boolean(~out.indexOf(tag))
return
t.ifError err, "git found on system"
child2 = spawn(git, ["init"])
child2.stdout.pipe process.stdout
child2.on "exit", ->
npm.config.set "git-tag-version", false
npm.commands.version ["patch"], (err) ->
return t.fail("Error perform version patch") if err
p = path.resolve(pkg, "package")
testPkg = require(p)
t.fail testPkg.version + " !== \"0.0.1\"" if testPkg.version isnt "0.0.1"
t.equal "0.0.1", testPkg.version
tagExists "v0.0.1", (err, exists) ->
t.ifError err, "tag found to exist"
t.equal exists, false, "git tag DOES exist"
t.pass "git tag does not exist"
t.end()
return
return
return
return
return
return
test "cleanup", (t) ->
process.chdir osenv.tmpdir()
rimraf.sync pkg
t.end()
return
| 180622 |
# windows fix for locked files
setup = ->
mkdirp.sync pkg
mkdirp.sync cache
fs.writeFileSync path.resolve(pkg, "package.json"), JSON.stringify(
author: "<NAME>"
name: "version-no-tags-test"
version: "0.0.0"
description: "Test for git-tag-version flag"
), "utf8"
process.chdir pkg
return
common = require("../common-tap.js")
test = require("tap").test
npm = require("../../")
osenv = require("osenv")
path = require("path")
fs = require("fs")
rimraf = require("rimraf")
mkdirp = require("mkdirp")
which = require("which")
spawn = require("child_process").spawn
pkg = path.resolve(__dirname, "version-no-tags")
cache = path.resolve(pkg, "cache")
test "npm version <semver> without git tag", (t) ->
setup()
npm.load
cache: cache
registry: common.registry
, ->
which "git", (err, git) ->
tagExists = (tag, _cb) ->
child1 = spawn(git, [
"tag"
"-l"
tag
])
out = ""
child1.stdout.on "data", (d) ->
out += d.toString()
return
child1.on "exit", ->
_cb null, Boolean(~out.indexOf(tag))
return
t.ifError err, "git found on system"
child2 = spawn(git, ["init"])
child2.stdout.pipe process.stdout
child2.on "exit", ->
npm.config.set "git-tag-version", false
npm.commands.version ["patch"], (err) ->
return t.fail("Error perform version patch") if err
p = path.resolve(pkg, "package")
testPkg = require(p)
t.fail testPkg.version + " !== \"0.0.1\"" if testPkg.version isnt "0.0.1"
t.equal "0.0.1", testPkg.version
tagExists "v0.0.1", (err, exists) ->
t.ifError err, "tag found to exist"
t.equal exists, false, "git tag DOES exist"
t.pass "git tag does not exist"
t.end()
return
return
return
return
return
return
test "cleanup", (t) ->
process.chdir osenv.tmpdir()
rimraf.sync pkg
t.end()
return
| true |
# windows fix for locked files
setup = ->
mkdirp.sync pkg
mkdirp.sync cache
fs.writeFileSync path.resolve(pkg, "package.json"), JSON.stringify(
author: "PI:NAME:<NAME>END_PI"
name: "version-no-tags-test"
version: "0.0.0"
description: "Test for git-tag-version flag"
), "utf8"
process.chdir pkg
return
common = require("../common-tap.js")
test = require("tap").test
npm = require("../../")
osenv = require("osenv")
path = require("path")
fs = require("fs")
rimraf = require("rimraf")
mkdirp = require("mkdirp")
which = require("which")
spawn = require("child_process").spawn
pkg = path.resolve(__dirname, "version-no-tags")
cache = path.resolve(pkg, "cache")
test "npm version <semver> without git tag", (t) ->
setup()
npm.load
cache: cache
registry: common.registry
, ->
which "git", (err, git) ->
tagExists = (tag, _cb) ->
child1 = spawn(git, [
"tag"
"-l"
tag
])
out = ""
child1.stdout.on "data", (d) ->
out += d.toString()
return
child1.on "exit", ->
_cb null, Boolean(~out.indexOf(tag))
return
t.ifError err, "git found on system"
child2 = spawn(git, ["init"])
child2.stdout.pipe process.stdout
child2.on "exit", ->
npm.config.set "git-tag-version", false
npm.commands.version ["patch"], (err) ->
return t.fail("Error perform version patch") if err
p = path.resolve(pkg, "package")
testPkg = require(p)
t.fail testPkg.version + " !== \"0.0.1\"" if testPkg.version isnt "0.0.1"
t.equal "0.0.1", testPkg.version
tagExists "v0.0.1", (err, exists) ->
t.ifError err, "tag found to exist"
t.equal exists, false, "git tag DOES exist"
t.pass "git tag does not exist"
t.end()
return
return
return
return
return
return
test "cleanup", (t) ->
process.chdir osenv.tmpdir()
rimraf.sync pkg
t.end()
return
|
[
{
"context": "SSL: false\n auth:\n user: connection.user\n pass: connection.password || \"\"\n ",
"end": 6539,
"score": 0.862105667591095,
"start": 6524,
"tag": "USERNAME",
"value": "connection.user"
},
{
"context": " user: connection.us... | lib/existdb.coffee | subugoe/atom-existdb | 10 | EXistTreeView = require './existdb-tree-view'
SymbolsView = require './symbols.js'
ImportsView = require './imports.js'
quickfix = require './quickfix.js'
Config = require './project-config'
{CompositeDisposable, Range, Emitter} = require 'atom'
request = require 'request'
Provider = require "./provider"
Sync = require './sync.js'
util = require "./util"
_path = require 'path'
cp = require 'child_process'
fs = require 'fs-plus';
$ = require 'jquery'
XQUtils = require './xquery-helper'
InScopeVariables = require './var-visitor'
VariableReferences = require './ref-visitor'
COMPILE_MSG_RE = /.*line:?\s(\d+)/i
module.exports = Existdb =
subscriptions: null
projectConfig: null
provider: undefined
# symbolsView: undefined
treeView: undefined
startTagMarker: undefined
endTagMarker: undefined
activate: (@state) ->
console.log "Activating eXistdb"
require('atom-package-deps').install("existdb").then(
() ->
console.log("Initializing provider")
)
@emitter = new Emitter()
@projectConfig = new Config()
@sync = new Sync(@projectConfig);
@sync.on("status", (message) => @updateStatus(message))
@treeView = new EXistTreeView(@state, @projectConfig)
@treeView.on("status", (msg) => @updateStatus(msg))
@provider = new Provider(@projectConfig)
@symbolsView = new SymbolsView(@projectConfig, @)
@importsView = new ImportsView(@projectConfig)
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable()
@tagSubscriptions = new CompositeDisposable()
# @subscriptions.add atom.commands.add 'atom-workspace', 'existdb:sync-project': =>
# p = $('.tree-view .selected').map(() ->
# if this.getPath? then this.getPath() else ''
# ).get()[0]
# console.log("sync: %o", p)
# conf = @projectConfig.getProjectConfig(p)
# @watcherControl.sync(conf) if conf?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:run': => @run(atom.workspace.getActiveTextEditor())
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:file-symbols': => @gotoFileSymbol()
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:import-module': => @importModule()
@subscriptions.add atom.commands.add 'atom-workspace', 'existdb:toggle-tree-view': => @treeView.toggle()
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:rename-variable': @renameVariable
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:expand-selection': @expandSelection
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:goto-definition': =>
editor = atom.workspace.getActiveTextEditor()
pos = editor.getCursorBufferPosition()
scope = editor.scopeDescriptorForBufferPosition(pos)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: pos.row, col: pos.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
@gotoVarDefinition(parent, editor)
else
def = XQUtils.getFunctionDefinition(editor, pos)
@gotoDefinition(def.signature, editor) if def?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:deploy-latest': =>
regex = /\.xar$/;
dir = atom.project.getPaths()?[0]
xars = []
fs.traverseTree(dir,
(file) =>
if regex.test(file)
xars.push({ path: file, timestamp: fs.statSync(file).mtime })
(dir) => return true
=>
if xars.length > 0
xars.sort((a, b) -> b.timestamp - a.timestamp)
xar = xars[0]
atom.confirm
message: "Install Package?"
detailedMessage: "Would you like to install package " + _path.relative(dir, xar.path) + '?'
buttons:
Yes: => @treeView.deploy(xar.path)
No: ->
)
@tooltips = new CompositeDisposable
atom.workspace.observeTextEditors((editor) =>
editor.onDidChangeCursorPosition((ev) =>
# return if @editTag(editor, ev)
@markInScopeVars(editor, ev)
)
editor.getBuffer().onDidChange((ev) =>
@closeTag(ev)
# editor.getBuffer()._ast = null
)
)
@emitter.emit("activated")
deactivate: ->
@sync.destroy();
@projectConfig.destroy()
@subscriptions.dispose()
@symbolsView.destroy()
@importsView.destroy()
@treeView.destroy()
@emitter.dispose()
@tooltips.dispose()
@startTagMarker.destroy() if @startTagMarker?
@endTagMarker.destroy() if @endTagMarker?
serialize: ->
if @treeView?
@treeView.serialize()
else
@state
gotoFileSymbol: ->
editor = atom.workspace.getActiveTextEditor()
@symbolsView.show(editor)
importModule: ->
editor = atom.workspace.getActiveTextEditor()
@importsView.show(editor)
run: (editor) ->
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
notifTimeout =
setTimeout(
-> atom.notifications.addInfo("Running query ..."),
500
)
@updateStatus("Running query ...")
chunk = XQUtils.getText(editor)
connection = @projectConfig.getConnection(editor, @projectConfig.activeServer)
options =
uri: "#{connection.server}/apps/atom-editor/execute"
method: "POST"
qs: { "qu": chunk.text, "base": collectionPaths.basePath, "output": "adaptive", "count": 10 }
strictSSL: false
auth:
user: connection.user
pass: connection.password || ""
sendImmediately: true
request(
options,
(error, response, body) =>
clearTimeout(notifTimeout)
@updateStatus("")
if error? or response.statusCode != 200
html = $.parseXML(body)
message = $(html).find(".description").text()
atom.notifications.addError("Query execution failed: #{$(html).find(".message").text()} (#{status})",
{ detail: message, dismissable: true })
else
promise = atom.workspace.open("query-results", { split: "down", activatePane: false })
promise.then((newEditor) ->
grammar = atom.grammars.grammarForScopeName("text.xml")
newEditor.setGrammar(grammar)
newEditor.setText(body)
elapsed = response.headers["x-elapsed"]
results = response.headers["x-result-count"]
atom.notifications.addSuccess("Query found #{results} results in #{elapsed}s")
)
)
gotoDefinition: (signature, editor) ->
if @gotoLocalDefinition(signature, editor)
return
params = util.modules(@projectConfig, editor, false)
id = editor.getBuffer().getId()
console.log("getting definitions for %s", signature)
if id.startsWith("exist:")
connection = @projectConfig.getConnection(id)
else
connection = @projectConfig.getConnection(editor)
self = this
$.ajax
url: connection.server +
"/apps/atom-editor/atom-autocomplete.xql?signature=" + encodeURIComponent(signature) + "&" +
params.join("&")
username: connection.user
password: connection.password
success: (data) ->
name = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in data
if item.name == name
path = item.path
if path.indexOf("xmldb:exist://") == 0
path = path.substring(path.indexOf("/db"))
console.log("Loading %s", path)
self.open(editor, path, (newEditor) ->
self.gotoLocalDefinition(name, newEditor)
)
return
gotoLocalDefinition: (signature, editor) ->
signature = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in util.parseLocalFunctions(editor)
if item.name == signature
editor.scrollToBufferPosition([item.line, 0])
editor.setCursorBufferPosition([item.line, 0])
return true
false
gotoVarDefinition: (reference, editor) ->
varName = XQUtils.getValue(reference)
name = varName.substring(1) if varName.charAt(0) == "$"
def = XQUtils.getVariableDef(name, reference)
if def?
editor.scrollToBufferPosition([def.pos.sl, 0])
editor.setCursorBufferPosition([def.pos.sl, def.pos.sc])
else
varName = if varName.charAt(0) == "$" then varName else "$#{varName}"
@gotoDefinition(varName, editor)
open: (editor, uri, onOpen) ->
if editor.getBuffer()._remote?
if uri.indexOf("xmldb:exist://") == 0
uri = uri.substring(uri.indexOf("/db"))
@treeView.open(path: uri, util.parseURI(editor.getBuffer().getId()).server, onOpen)
else
rootCol = "#{@projectConfig.getConfig(editor).sync.root}/"
xmldbRoot = "xmldb:exist://#{rootCol}"
if uri.indexOf(xmldbRoot) is 0
uri = uri.substring(xmldbRoot.length)
else if uri.indexOf(rootCol) is 0
uri = uri.substring(rootCol.length)
projectPath = atom.project.relativizePath(editor.getPath())[0]
uri = _path.resolve(projectPath, uri)
console.log("opening file: %s", uri)
promise = atom.workspace.open(uri)
promise.then((newEditor) -> onOpen?(newEditor))
updateStatus: (message) ->
if @busySignal?
if @busyLastMessage?
@busyLastMessage.dispose()
if (message && message != '')
@busyLastMessage = @busySignal.reportBusy(message)
closeTag: (ev) ->
editor = atom.workspace.getActiveTextEditor()
return unless editor? and ev.newText == '/' and editor.getBuffer()._ast?
grammar = editor.getGrammar()
return unless grammar.scopeName == "source.xq"
cursorPos = editor.getLastCursor().getBufferPosition()
translatedPos = cursorPos.translate([0, -2])
lastTwo = editor.getTextInBufferRange([translatedPos, cursorPos])
return unless lastTwo == '</'
node = XQUtils.findNode(editor.getBuffer()._ast, { line: ev.oldRange.start.row, col: ev.oldRange.start.column })
return unless node?
constructor = XQUtils.getAncestor("DirElemConstructor", node)
while constructor?
qname = XQUtils.findChild(constructor, "QName")
if qname?
editor.insertText(qname.value + ">")
break
constructor = XQUtils.getAncestor("DirElemConstructor", constructor)
editTag: (editor, ev) =>
reset = =>
# clear markers
@tagSubscriptions.dispose()
@startTagMarker.destroy()
@endTagMarker.destroy()
@startTagMarker = null
@endTagMarker = null
@inTag = false
false
pos = ev.cursor.getBufferPosition()
if @inTag and !(@startTagMarker.getBufferRange().containsPoint(pos) or @endTagMarker.getBufferRange().containsPoint(pos))
reset()
return false if @inTag
return false unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
return false if editor.hasMultipleCursors()
selRange = editor.getSelectedBufferRange()
return false unless selRange.isEmpty()
self = this
node = XQUtils.findNode(editor.getBuffer()._ast, { line: pos.row, col: pos.column })
return unless node?
if node.name == "QName" and node.getParent?.name == "DirElemConstructor"
tags = XQUtils.findChildren(node.getParent, "QName")
if tags? and tags.length == 2 and tags[0].value == tags[1].value
@inTag = true
@startTagMarker = editor.markBufferRange(new Range([tags[0].pos.sl, tags[0].pos.sc], [tags[0].pos.el, tags[0].pos.ec]))
@endTagMarker = editor.markBufferRange(new Range([tags[1].pos.sl, tags[1].pos.sc], [tags[1].pos.el, tags[1].pos.ec]))
@tagSubscriptions = new CompositeDisposable()
inChange = false
@tagSubscriptions.add(@startTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@startTagMarker.getBufferRange())
# if whitespace was added: starting attribute list: reset
return reset() if /^\w+\s+/.test(newTag)
inChange = true
editor.setTextInBufferRange(@endTagMarker.getBufferRange(), newTag)
inChange = false
))
@tagSubscriptions.add(@endTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@endTagMarker.getBufferRange())
inChange = true
editor.setTextInBufferRange(@startTagMarker.getBufferRange(), newTag)
inChange = false
))
return false
markInScopeVars: (editor, ev) ->
return unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
selRange = editor.getSelectedBufferRange()
return unless selRange.isEmpty()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
marker.destroy()
scope = editor.scopeDescriptorForBufferPosition(ev.newBufferPosition)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
node = XQUtils.findNode(ast, { line: ev.newBufferPosition.row, col: ev.newBufferPosition.column })
if node?
varName = node.value
parent = node.getParent
if parent.name in ["VarRef", "VarName", "Param"]
scope = XQUtils.getVariableScope(varName, parent)
# it might be a global variable, so scan the entire ast if scope is not set
scope ?= ast
visitor = new VariableReferences(node, scope)
vars = visitor.getReferences()
if vars?
for v in vars when v.name == varName
marker = editor.markBufferRange(v.range, persistent: false)
editor.decorateMarker(marker, type: "highlight", class: "var-reference")
renameVariable: () ->
editor = atom.workspace.getActiveTextEditor()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
editor.addSelectionForBufferRange(marker.getBufferRange())
expandSelection: () ->
editor = atom.workspace.getActiveTextEditor()
ast = editor.getBuffer()._ast
return unless ast?
selRange = editor.getSelectedBufferRange()
# try to determine the ast node where the cursor is located
if selRange.isEmpty()
astNode = XQUtils.findNode(ast, { line: selRange.start.row, col: selRange.start.column })
expand = false
else
astNode = XQUtils.findNodeForRange(ast, { line: selRange.start.row, col: selRange.start.column },
{ line: selRange.end.row, col: selRange.end.column })
expand = true
if astNode
if expand
parent = astNode.getParent
while parent and (XQUtils.samePosition(astNode.pos, parent.pos) or parent.name in ["StatementsAndOptionalExpr", "LetBinding", "FunctionDecl"])
parent = parent.getParent
else
parent = astNode
if parent?
if parent.name == "AnnotatedDecl"
p = parent.getParent.children.indexOf(parent)
separator = parent.getParent.children[p + 1]
range = new Range([parent.pos.sl, parent.pos.sc], [separator.pos.el, separator.pos.ec])
else
range = new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec])
editor.setSelectedBufferRange(range)
provide: ->
return @provider
provideHyperclick: ->
self = this
providerName: 'hyperclick-xquery'
getSuggestionForWord: (editor, text, range) ->
return unless editor.getGrammar().scopeName == "source.xq"
scope = editor.scopeDescriptorForBufferPosition(range.start)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: range.start.row, col: range.start.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
return {
range: new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec]),
callback: ->
self.gotoVarDefinition(parent, editor)
}
else
def = XQUtils.getFunctionDefinition(editor, range.end)
if def?
return {
range: def.range,
callback: ->
self.gotoDefinition(def.signature, editor)
}
else
console.log("no function found at cursor position: #{text}")
provideLinter: ->
provider =
name: 'xqlint'
grammarScopes: ['source.xq']
scope: 'file'
lintsOnChange: true
# lintOnFlyInterval: 200,
lint: (textEditor) =>
return @lintOpenFile(textEditor)
lintOpenFile: (editor) ->
chunk = XQUtils.getText(editor)
return [] unless chunk.text.length > 0 and @projectConfig
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
return new Promise (resolve) ->
messages = []
self.xqlint(editor, chunk, messages)
id = editor.getBuffer().getId()
if id.startsWith("exist:")
connection = self.projectConfig.getConnection(id)
else
connection = self.projectConfig.getConnection(editor, self.treeView.getActiveServer())
$.ajax
type: "PUT"
url: connection.server + "/apps/atom-editor/compile.xql"
dataType: "json"
data: chunk.text
headers:
"X-BasePath": collectionPaths.basePath
contentType: "application/octet-stream"
username: connection.user
password: connection.password
error: (xhr, status) ->
resolve(messages)
success: (data) ->
if data.result == "fail"
error = self.parseErrMsg(data.error)
range = null
if error.line > -1
line = (error.line - chunk.prologOffset) + chunk.offset
text = editor.lineTextForBufferRow(line)
if (text?)
end = text.length
range = new Range(
[line, error.column - 1],
[line, end - 1]
)
if not range?
text = editor.lineTextForBufferRow(0)
range = new Range([0, 0], [0, text.length - 1])
message = {
severity: "error",
excerpt: error.msg,
location: {
position: range,
file: editor.getPath()
},
solutions: quickfix.getSolutions(editor, error.msg, range, self.importsView)
}
messages.push(message)
resolve(messages)
xqlint: (editor, chunk, messages) ->
if !chunk.isSnippet
xqlint = XQUtils.xqlint(editor)
markers = xqlint?.getWarnings()
errors = xqlint?.getErrors()
if errors? and errors.length > 0
console.log("errors: %o", errors)
if markers?
for marker in markers
m = /^\[(.*)].*$/.exec(marker.message)
code = if m.length == 2 then m[1] else ""
message = {
severity: "warning"
location:
file: editor.getPath()
position: new Range([marker.pos.sl, marker.pos.sc], [marker.pos.el, marker.pos.ec])
excerpt: marker.message
}
messages.push(message)
parseErrMsg: (error) ->
if error.line?
msg = error["#text"]
else
msg = error
str = COMPILE_MSG_RE.exec(msg)
line = -1
if str?
line = parseInt(str[1]) - 1
else if error.line
line = parseInt(error.line) - 1
column = error.column || 0
return { line: line, column: parseInt(column), msg: msg }
consumeBusySignal: (api) ->
@busySignal = api
@subscriptions.add(@busySignal)
| 175849 | EXistTreeView = require './existdb-tree-view'
SymbolsView = require './symbols.js'
ImportsView = require './imports.js'
quickfix = require './quickfix.js'
Config = require './project-config'
{CompositeDisposable, Range, Emitter} = require 'atom'
request = require 'request'
Provider = require "./provider"
Sync = require './sync.js'
util = require "./util"
_path = require 'path'
cp = require 'child_process'
fs = require 'fs-plus';
$ = require 'jquery'
XQUtils = require './xquery-helper'
InScopeVariables = require './var-visitor'
VariableReferences = require './ref-visitor'
COMPILE_MSG_RE = /.*line:?\s(\d+)/i
module.exports = Existdb =
subscriptions: null
projectConfig: null
provider: undefined
# symbolsView: undefined
treeView: undefined
startTagMarker: undefined
endTagMarker: undefined
activate: (@state) ->
console.log "Activating eXistdb"
require('atom-package-deps').install("existdb").then(
() ->
console.log("Initializing provider")
)
@emitter = new Emitter()
@projectConfig = new Config()
@sync = new Sync(@projectConfig);
@sync.on("status", (message) => @updateStatus(message))
@treeView = new EXistTreeView(@state, @projectConfig)
@treeView.on("status", (msg) => @updateStatus(msg))
@provider = new Provider(@projectConfig)
@symbolsView = new SymbolsView(@projectConfig, @)
@importsView = new ImportsView(@projectConfig)
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable()
@tagSubscriptions = new CompositeDisposable()
# @subscriptions.add atom.commands.add 'atom-workspace', 'existdb:sync-project': =>
# p = $('.tree-view .selected').map(() ->
# if this.getPath? then this.getPath() else ''
# ).get()[0]
# console.log("sync: %o", p)
# conf = @projectConfig.getProjectConfig(p)
# @watcherControl.sync(conf) if conf?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:run': => @run(atom.workspace.getActiveTextEditor())
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:file-symbols': => @gotoFileSymbol()
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:import-module': => @importModule()
@subscriptions.add atom.commands.add 'atom-workspace', 'existdb:toggle-tree-view': => @treeView.toggle()
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:rename-variable': @renameVariable
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:expand-selection': @expandSelection
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:goto-definition': =>
editor = atom.workspace.getActiveTextEditor()
pos = editor.getCursorBufferPosition()
scope = editor.scopeDescriptorForBufferPosition(pos)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: pos.row, col: pos.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
@gotoVarDefinition(parent, editor)
else
def = XQUtils.getFunctionDefinition(editor, pos)
@gotoDefinition(def.signature, editor) if def?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:deploy-latest': =>
regex = /\.xar$/;
dir = atom.project.getPaths()?[0]
xars = []
fs.traverseTree(dir,
(file) =>
if regex.test(file)
xars.push({ path: file, timestamp: fs.statSync(file).mtime })
(dir) => return true
=>
if xars.length > 0
xars.sort((a, b) -> b.timestamp - a.timestamp)
xar = xars[0]
atom.confirm
message: "Install Package?"
detailedMessage: "Would you like to install package " + _path.relative(dir, xar.path) + '?'
buttons:
Yes: => @treeView.deploy(xar.path)
No: ->
)
@tooltips = new CompositeDisposable
atom.workspace.observeTextEditors((editor) =>
editor.onDidChangeCursorPosition((ev) =>
# return if @editTag(editor, ev)
@markInScopeVars(editor, ev)
)
editor.getBuffer().onDidChange((ev) =>
@closeTag(ev)
# editor.getBuffer()._ast = null
)
)
@emitter.emit("activated")
deactivate: ->
@sync.destroy();
@projectConfig.destroy()
@subscriptions.dispose()
@symbolsView.destroy()
@importsView.destroy()
@treeView.destroy()
@emitter.dispose()
@tooltips.dispose()
@startTagMarker.destroy() if @startTagMarker?
@endTagMarker.destroy() if @endTagMarker?
serialize: ->
if @treeView?
@treeView.serialize()
else
@state
gotoFileSymbol: ->
editor = atom.workspace.getActiveTextEditor()
@symbolsView.show(editor)
importModule: ->
editor = atom.workspace.getActiveTextEditor()
@importsView.show(editor)
run: (editor) ->
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
notifTimeout =
setTimeout(
-> atom.notifications.addInfo("Running query ..."),
500
)
@updateStatus("Running query ...")
chunk = XQUtils.getText(editor)
connection = @projectConfig.getConnection(editor, @projectConfig.activeServer)
options =
uri: "#{connection.server}/apps/atom-editor/execute"
method: "POST"
qs: { "qu": chunk.text, "base": collectionPaths.basePath, "output": "adaptive", "count": 10 }
strictSSL: false
auth:
user: connection.user
pass: <PASSWORD> || ""
sendImmediately: true
request(
options,
(error, response, body) =>
clearTimeout(notifTimeout)
@updateStatus("")
if error? or response.statusCode != 200
html = $.parseXML(body)
message = $(html).find(".description").text()
atom.notifications.addError("Query execution failed: #{$(html).find(".message").text()} (#{status})",
{ detail: message, dismissable: true })
else
promise = atom.workspace.open("query-results", { split: "down", activatePane: false })
promise.then((newEditor) ->
grammar = atom.grammars.grammarForScopeName("text.xml")
newEditor.setGrammar(grammar)
newEditor.setText(body)
elapsed = response.headers["x-elapsed"]
results = response.headers["x-result-count"]
atom.notifications.addSuccess("Query found #{results} results in #{elapsed}s")
)
)
gotoDefinition: (signature, editor) ->
if @gotoLocalDefinition(signature, editor)
return
params = util.modules(@projectConfig, editor, false)
id = editor.getBuffer().getId()
console.log("getting definitions for %s", signature)
if id.startsWith("exist:")
connection = @projectConfig.getConnection(id)
else
connection = @projectConfig.getConnection(editor)
self = this
$.ajax
url: connection.server +
"/apps/atom-editor/atom-autocomplete.xql?signature=" + encodeURIComponent(signature) + "&" +
params.join("&")
username: connection.user
password: <PASSWORD>
success: (data) ->
name = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in data
if item.name == name
path = item.path
if path.indexOf("xmldb:exist://") == 0
path = path.substring(path.indexOf("/db"))
console.log("Loading %s", path)
self.open(editor, path, (newEditor) ->
self.gotoLocalDefinition(name, newEditor)
)
return
gotoLocalDefinition: (signature, editor) ->
signature = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in util.parseLocalFunctions(editor)
if item.name == signature
editor.scrollToBufferPosition([item.line, 0])
editor.setCursorBufferPosition([item.line, 0])
return true
false
gotoVarDefinition: (reference, editor) ->
varName = XQUtils.getValue(reference)
name = varName.substring(1) if varName.charAt(0) == "$"
def = XQUtils.getVariableDef(name, reference)
if def?
editor.scrollToBufferPosition([def.pos.sl, 0])
editor.setCursorBufferPosition([def.pos.sl, def.pos.sc])
else
varName = if varName.charAt(0) == "$" then varName else "$#{varName}"
@gotoDefinition(varName, editor)
open: (editor, uri, onOpen) ->
if editor.getBuffer()._remote?
if uri.indexOf("xmldb:exist://") == 0
uri = uri.substring(uri.indexOf("/db"))
@treeView.open(path: uri, util.parseURI(editor.getBuffer().getId()).server, onOpen)
else
rootCol = "#{@projectConfig.getConfig(editor).sync.root}/"
xmldbRoot = "xmldb:exist://#{rootCol}"
if uri.indexOf(xmldbRoot) is 0
uri = uri.substring(xmldbRoot.length)
else if uri.indexOf(rootCol) is 0
uri = uri.substring(rootCol.length)
projectPath = atom.project.relativizePath(editor.getPath())[0]
uri = _path.resolve(projectPath, uri)
console.log("opening file: %s", uri)
promise = atom.workspace.open(uri)
promise.then((newEditor) -> onOpen?(newEditor))
updateStatus: (message) ->
if @busySignal?
if @busyLastMessage?
@busyLastMessage.dispose()
if (message && message != '')
@busyLastMessage = @busySignal.reportBusy(message)
closeTag: (ev) ->
editor = atom.workspace.getActiveTextEditor()
return unless editor? and ev.newText == '/' and editor.getBuffer()._ast?
grammar = editor.getGrammar()
return unless grammar.scopeName == "source.xq"
cursorPos = editor.getLastCursor().getBufferPosition()
translatedPos = cursorPos.translate([0, -2])
lastTwo = editor.getTextInBufferRange([translatedPos, cursorPos])
return unless lastTwo == '</'
node = XQUtils.findNode(editor.getBuffer()._ast, { line: ev.oldRange.start.row, col: ev.oldRange.start.column })
return unless node?
constructor = XQUtils.getAncestor("DirElemConstructor", node)
while constructor?
qname = XQUtils.findChild(constructor, "QName")
if qname?
editor.insertText(qname.value + ">")
break
constructor = XQUtils.getAncestor("DirElemConstructor", constructor)
editTag: (editor, ev) =>
reset = =>
# clear markers
@tagSubscriptions.dispose()
@startTagMarker.destroy()
@endTagMarker.destroy()
@startTagMarker = null
@endTagMarker = null
@inTag = false
false
pos = ev.cursor.getBufferPosition()
if @inTag and !(@startTagMarker.getBufferRange().containsPoint(pos) or @endTagMarker.getBufferRange().containsPoint(pos))
reset()
return false if @inTag
return false unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
return false if editor.hasMultipleCursors()
selRange = editor.getSelectedBufferRange()
return false unless selRange.isEmpty()
self = this
node = XQUtils.findNode(editor.getBuffer()._ast, { line: pos.row, col: pos.column })
return unless node?
if node.name == "QName" and node.getParent?.name == "DirElemConstructor"
tags = XQUtils.findChildren(node.getParent, "QName")
if tags? and tags.length == 2 and tags[0].value == tags[1].value
@inTag = true
@startTagMarker = editor.markBufferRange(new Range([tags[0].pos.sl, tags[0].pos.sc], [tags[0].pos.el, tags[0].pos.ec]))
@endTagMarker = editor.markBufferRange(new Range([tags[1].pos.sl, tags[1].pos.sc], [tags[1].pos.el, tags[1].pos.ec]))
@tagSubscriptions = new CompositeDisposable()
inChange = false
@tagSubscriptions.add(@startTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@startTagMarker.getBufferRange())
# if whitespace was added: starting attribute list: reset
return reset() if /^\w+\s+/.test(newTag)
inChange = true
editor.setTextInBufferRange(@endTagMarker.getBufferRange(), newTag)
inChange = false
))
@tagSubscriptions.add(@endTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@endTagMarker.getBufferRange())
inChange = true
editor.setTextInBufferRange(@startTagMarker.getBufferRange(), newTag)
inChange = false
))
return false
markInScopeVars: (editor, ev) ->
return unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
selRange = editor.getSelectedBufferRange()
return unless selRange.isEmpty()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
marker.destroy()
scope = editor.scopeDescriptorForBufferPosition(ev.newBufferPosition)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
node = XQUtils.findNode(ast, { line: ev.newBufferPosition.row, col: ev.newBufferPosition.column })
if node?
varName = node.value
parent = node.getParent
if parent.name in ["VarRef", "VarName", "Param"]
scope = XQUtils.getVariableScope(varName, parent)
# it might be a global variable, so scan the entire ast if scope is not set
scope ?= ast
visitor = new VariableReferences(node, scope)
vars = visitor.getReferences()
if vars?
for v in vars when v.name == varName
marker = editor.markBufferRange(v.range, persistent: false)
editor.decorateMarker(marker, type: "highlight", class: "var-reference")
renameVariable: () ->
editor = atom.workspace.getActiveTextEditor()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
editor.addSelectionForBufferRange(marker.getBufferRange())
expandSelection: () ->
editor = atom.workspace.getActiveTextEditor()
ast = editor.getBuffer()._ast
return unless ast?
selRange = editor.getSelectedBufferRange()
# try to determine the ast node where the cursor is located
if selRange.isEmpty()
astNode = XQUtils.findNode(ast, { line: selRange.start.row, col: selRange.start.column })
expand = false
else
astNode = XQUtils.findNodeForRange(ast, { line: selRange.start.row, col: selRange.start.column },
{ line: selRange.end.row, col: selRange.end.column })
expand = true
if astNode
if expand
parent = astNode.getParent
while parent and (XQUtils.samePosition(astNode.pos, parent.pos) or parent.name in ["StatementsAndOptionalExpr", "LetBinding", "FunctionDecl"])
parent = parent.getParent
else
parent = astNode
if parent?
if parent.name == "AnnotatedDecl"
p = parent.getParent.children.indexOf(parent)
separator = parent.getParent.children[p + 1]
range = new Range([parent.pos.sl, parent.pos.sc], [separator.pos.el, separator.pos.ec])
else
range = new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec])
editor.setSelectedBufferRange(range)
provide: ->
return @provider
provideHyperclick: ->
self = this
providerName: 'hyperclick-xquery'
getSuggestionForWord: (editor, text, range) ->
return unless editor.getGrammar().scopeName == "source.xq"
scope = editor.scopeDescriptorForBufferPosition(range.start)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: range.start.row, col: range.start.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
return {
range: new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec]),
callback: ->
self.gotoVarDefinition(parent, editor)
}
else
def = XQUtils.getFunctionDefinition(editor, range.end)
if def?
return {
range: def.range,
callback: ->
self.gotoDefinition(def.signature, editor)
}
else
console.log("no function found at cursor position: #{text}")
provideLinter: ->
provider =
name: 'xqlint'
grammarScopes: ['source.xq']
scope: 'file'
lintsOnChange: true
# lintOnFlyInterval: 200,
lint: (textEditor) =>
return @lintOpenFile(textEditor)
lintOpenFile: (editor) ->
chunk = XQUtils.getText(editor)
return [] unless chunk.text.length > 0 and @projectConfig
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
return new Promise (resolve) ->
messages = []
self.xqlint(editor, chunk, messages)
id = editor.getBuffer().getId()
if id.startsWith("exist:")
connection = self.projectConfig.getConnection(id)
else
connection = self.projectConfig.getConnection(editor, self.treeView.getActiveServer())
$.ajax
type: "PUT"
url: connection.server + "/apps/atom-editor/compile.xql"
dataType: "json"
data: chunk.text
headers:
"X-BasePath": collectionPaths.basePath
contentType: "application/octet-stream"
username: connection.user
password: <PASSWORD>
error: (xhr, status) ->
resolve(messages)
success: (data) ->
if data.result == "fail"
error = self.parseErrMsg(data.error)
range = null
if error.line > -1
line = (error.line - chunk.prologOffset) + chunk.offset
text = editor.lineTextForBufferRow(line)
if (text?)
end = text.length
range = new Range(
[line, error.column - 1],
[line, end - 1]
)
if not range?
text = editor.lineTextForBufferRow(0)
range = new Range([0, 0], [0, text.length - 1])
message = {
severity: "error",
excerpt: error.msg,
location: {
position: range,
file: editor.getPath()
},
solutions: quickfix.getSolutions(editor, error.msg, range, self.importsView)
}
messages.push(message)
resolve(messages)
xqlint: (editor, chunk, messages) ->
if !chunk.isSnippet
xqlint = XQUtils.xqlint(editor)
markers = xqlint?.getWarnings()
errors = xqlint?.getErrors()
if errors? and errors.length > 0
console.log("errors: %o", errors)
if markers?
for marker in markers
m = /^\[(.*)].*$/.exec(marker.message)
code = if m.length == 2 then m[1] else ""
message = {
severity: "warning"
location:
file: editor.getPath()
position: new Range([marker.pos.sl, marker.pos.sc], [marker.pos.el, marker.pos.ec])
excerpt: marker.message
}
messages.push(message)
parseErrMsg: (error) ->
if error.line?
msg = error["#text"]
else
msg = error
str = COMPILE_MSG_RE.exec(msg)
line = -1
if str?
line = parseInt(str[1]) - 1
else if error.line
line = parseInt(error.line) - 1
column = error.column || 0
return { line: line, column: parseInt(column), msg: msg }
consumeBusySignal: (api) ->
@busySignal = api
@subscriptions.add(@busySignal)
| true | EXistTreeView = require './existdb-tree-view'
SymbolsView = require './symbols.js'
ImportsView = require './imports.js'
quickfix = require './quickfix.js'
Config = require './project-config'
{CompositeDisposable, Range, Emitter} = require 'atom'
request = require 'request'
Provider = require "./provider"
Sync = require './sync.js'
util = require "./util"
_path = require 'path'
cp = require 'child_process'
fs = require 'fs-plus';
$ = require 'jquery'
XQUtils = require './xquery-helper'
InScopeVariables = require './var-visitor'
VariableReferences = require './ref-visitor'
COMPILE_MSG_RE = /.*line:?\s(\d+)/i
module.exports = Existdb =
subscriptions: null
projectConfig: null
provider: undefined
# symbolsView: undefined
treeView: undefined
startTagMarker: undefined
endTagMarker: undefined
activate: (@state) ->
console.log "Activating eXistdb"
require('atom-package-deps').install("existdb").then(
() ->
console.log("Initializing provider")
)
@emitter = new Emitter()
@projectConfig = new Config()
@sync = new Sync(@projectConfig);
@sync.on("status", (message) => @updateStatus(message))
@treeView = new EXistTreeView(@state, @projectConfig)
@treeView.on("status", (msg) => @updateStatus(msg))
@provider = new Provider(@projectConfig)
@symbolsView = new SymbolsView(@projectConfig, @)
@importsView = new ImportsView(@projectConfig)
# Events subscribed to in atom's system can be easily cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable()
@tagSubscriptions = new CompositeDisposable()
# @subscriptions.add atom.commands.add 'atom-workspace', 'existdb:sync-project': =>
# p = $('.tree-view .selected').map(() ->
# if this.getPath? then this.getPath() else ''
# ).get()[0]
# console.log("sync: %o", p)
# conf = @projectConfig.getProjectConfig(p)
# @watcherControl.sync(conf) if conf?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:run': => @run(atom.workspace.getActiveTextEditor())
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:file-symbols': => @gotoFileSymbol()
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:import-module': => @importModule()
@subscriptions.add atom.commands.add 'atom-workspace', 'existdb:toggle-tree-view': => @treeView.toggle()
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:rename-variable': @renameVariable
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:expand-selection': @expandSelection
@subscriptions.add atom.commands.add 'atom-text-editor[data-grammar="source xq"]', 'existdb:goto-definition': =>
editor = atom.workspace.getActiveTextEditor()
pos = editor.getCursorBufferPosition()
scope = editor.scopeDescriptorForBufferPosition(pos)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: pos.row, col: pos.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
@gotoVarDefinition(parent, editor)
else
def = XQUtils.getFunctionDefinition(editor, pos)
@gotoDefinition(def.signature, editor) if def?
@subscriptions.add atom.commands.add 'atom-text-editor', 'existdb:deploy-latest': =>
regex = /\.xar$/;
dir = atom.project.getPaths()?[0]
xars = []
fs.traverseTree(dir,
(file) =>
if regex.test(file)
xars.push({ path: file, timestamp: fs.statSync(file).mtime })
(dir) => return true
=>
if xars.length > 0
xars.sort((a, b) -> b.timestamp - a.timestamp)
xar = xars[0]
atom.confirm
message: "Install Package?"
detailedMessage: "Would you like to install package " + _path.relative(dir, xar.path) + '?'
buttons:
Yes: => @treeView.deploy(xar.path)
No: ->
)
@tooltips = new CompositeDisposable
atom.workspace.observeTextEditors((editor) =>
editor.onDidChangeCursorPosition((ev) =>
# return if @editTag(editor, ev)
@markInScopeVars(editor, ev)
)
editor.getBuffer().onDidChange((ev) =>
@closeTag(ev)
# editor.getBuffer()._ast = null
)
)
@emitter.emit("activated")
deactivate: ->
@sync.destroy();
@projectConfig.destroy()
@subscriptions.dispose()
@symbolsView.destroy()
@importsView.destroy()
@treeView.destroy()
@emitter.dispose()
@tooltips.dispose()
@startTagMarker.destroy() if @startTagMarker?
@endTagMarker.destroy() if @endTagMarker?
serialize: ->
if @treeView?
@treeView.serialize()
else
@state
gotoFileSymbol: ->
editor = atom.workspace.getActiveTextEditor()
@symbolsView.show(editor)
importModule: ->
editor = atom.workspace.getActiveTextEditor()
@importsView.show(editor)
run: (editor) ->
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
notifTimeout =
setTimeout(
-> atom.notifications.addInfo("Running query ..."),
500
)
@updateStatus("Running query ...")
chunk = XQUtils.getText(editor)
connection = @projectConfig.getConnection(editor, @projectConfig.activeServer)
options =
uri: "#{connection.server}/apps/atom-editor/execute"
method: "POST"
qs: { "qu": chunk.text, "base": collectionPaths.basePath, "output": "adaptive", "count": 10 }
strictSSL: false
auth:
user: connection.user
pass: PI:PASSWORD:<PASSWORD>END_PI || ""
sendImmediately: true
request(
options,
(error, response, body) =>
clearTimeout(notifTimeout)
@updateStatus("")
if error? or response.statusCode != 200
html = $.parseXML(body)
message = $(html).find(".description").text()
atom.notifications.addError("Query execution failed: #{$(html).find(".message").text()} (#{status})",
{ detail: message, dismissable: true })
else
promise = atom.workspace.open("query-results", { split: "down", activatePane: false })
promise.then((newEditor) ->
grammar = atom.grammars.grammarForScopeName("text.xml")
newEditor.setGrammar(grammar)
newEditor.setText(body)
elapsed = response.headers["x-elapsed"]
results = response.headers["x-result-count"]
atom.notifications.addSuccess("Query found #{results} results in #{elapsed}s")
)
)
gotoDefinition: (signature, editor) ->
if @gotoLocalDefinition(signature, editor)
return
params = util.modules(@projectConfig, editor, false)
id = editor.getBuffer().getId()
console.log("getting definitions for %s", signature)
if id.startsWith("exist:")
connection = @projectConfig.getConnection(id)
else
connection = @projectConfig.getConnection(editor)
self = this
$.ajax
url: connection.server +
"/apps/atom-editor/atom-autocomplete.xql?signature=" + encodeURIComponent(signature) + "&" +
params.join("&")
username: connection.user
password: PI:PASSWORD:<PASSWORD>END_PI
success: (data) ->
name = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in data
if item.name == name
path = item.path
if path.indexOf("xmldb:exist://") == 0
path = path.substring(path.indexOf("/db"))
console.log("Loading %s", path)
self.open(editor, path, (newEditor) ->
self.gotoLocalDefinition(name, newEditor)
)
return
gotoLocalDefinition: (signature, editor) ->
signature = if signature.charAt(0) == "$" then signature.substring(1) else signature
for item in util.parseLocalFunctions(editor)
if item.name == signature
editor.scrollToBufferPosition([item.line, 0])
editor.setCursorBufferPosition([item.line, 0])
return true
false
gotoVarDefinition: (reference, editor) ->
varName = XQUtils.getValue(reference)
name = varName.substring(1) if varName.charAt(0) == "$"
def = XQUtils.getVariableDef(name, reference)
if def?
editor.scrollToBufferPosition([def.pos.sl, 0])
editor.setCursorBufferPosition([def.pos.sl, def.pos.sc])
else
varName = if varName.charAt(0) == "$" then varName else "$#{varName}"
@gotoDefinition(varName, editor)
open: (editor, uri, onOpen) ->
if editor.getBuffer()._remote?
if uri.indexOf("xmldb:exist://") == 0
uri = uri.substring(uri.indexOf("/db"))
@treeView.open(path: uri, util.parseURI(editor.getBuffer().getId()).server, onOpen)
else
rootCol = "#{@projectConfig.getConfig(editor).sync.root}/"
xmldbRoot = "xmldb:exist://#{rootCol}"
if uri.indexOf(xmldbRoot) is 0
uri = uri.substring(xmldbRoot.length)
else if uri.indexOf(rootCol) is 0
uri = uri.substring(rootCol.length)
projectPath = atom.project.relativizePath(editor.getPath())[0]
uri = _path.resolve(projectPath, uri)
console.log("opening file: %s", uri)
promise = atom.workspace.open(uri)
promise.then((newEditor) -> onOpen?(newEditor))
updateStatus: (message) ->
if @busySignal?
if @busyLastMessage?
@busyLastMessage.dispose()
if (message && message != '')
@busyLastMessage = @busySignal.reportBusy(message)
closeTag: (ev) ->
editor = atom.workspace.getActiveTextEditor()
return unless editor? and ev.newText == '/' and editor.getBuffer()._ast?
grammar = editor.getGrammar()
return unless grammar.scopeName == "source.xq"
cursorPos = editor.getLastCursor().getBufferPosition()
translatedPos = cursorPos.translate([0, -2])
lastTwo = editor.getTextInBufferRange([translatedPos, cursorPos])
return unless lastTwo == '</'
node = XQUtils.findNode(editor.getBuffer()._ast, { line: ev.oldRange.start.row, col: ev.oldRange.start.column })
return unless node?
constructor = XQUtils.getAncestor("DirElemConstructor", node)
while constructor?
qname = XQUtils.findChild(constructor, "QName")
if qname?
editor.insertText(qname.value + ">")
break
constructor = XQUtils.getAncestor("DirElemConstructor", constructor)
editTag: (editor, ev) =>
reset = =>
# clear markers
@tagSubscriptions.dispose()
@startTagMarker.destroy()
@endTagMarker.destroy()
@startTagMarker = null
@endTagMarker = null
@inTag = false
false
pos = ev.cursor.getBufferPosition()
if @inTag and !(@startTagMarker.getBufferRange().containsPoint(pos) or @endTagMarker.getBufferRange().containsPoint(pos))
reset()
return false if @inTag
return false unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
return false if editor.hasMultipleCursors()
selRange = editor.getSelectedBufferRange()
return false unless selRange.isEmpty()
self = this
node = XQUtils.findNode(editor.getBuffer()._ast, { line: pos.row, col: pos.column })
return unless node?
if node.name == "QName" and node.getParent?.name == "DirElemConstructor"
tags = XQUtils.findChildren(node.getParent, "QName")
if tags? and tags.length == 2 and tags[0].value == tags[1].value
@inTag = true
@startTagMarker = editor.markBufferRange(new Range([tags[0].pos.sl, tags[0].pos.sc], [tags[0].pos.el, tags[0].pos.ec]))
@endTagMarker = editor.markBufferRange(new Range([tags[1].pos.sl, tags[1].pos.sc], [tags[1].pos.el, tags[1].pos.ec]))
@tagSubscriptions = new CompositeDisposable()
inChange = false
@tagSubscriptions.add(@startTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@startTagMarker.getBufferRange())
# if whitespace was added: starting attribute list: reset
return reset() if /^\w+\s+/.test(newTag)
inChange = true
editor.setTextInBufferRange(@endTagMarker.getBufferRange(), newTag)
inChange = false
))
@tagSubscriptions.add(@endTagMarker.onDidChange((ev) =>
return if inChange
newTag = editor.getTextInBufferRange(@endTagMarker.getBufferRange())
inChange = true
editor.setTextInBufferRange(@startTagMarker.getBufferRange(), newTag)
inChange = false
))
return false
markInScopeVars: (editor, ev) ->
return unless editor.getGrammar().scopeName == "source.xq" and editor.getBuffer()._ast?
selRange = editor.getSelectedBufferRange()
return unless selRange.isEmpty()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
marker.destroy()
scope = editor.scopeDescriptorForBufferPosition(ev.newBufferPosition)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
node = XQUtils.findNode(ast, { line: ev.newBufferPosition.row, col: ev.newBufferPosition.column })
if node?
varName = node.value
parent = node.getParent
if parent.name in ["VarRef", "VarName", "Param"]
scope = XQUtils.getVariableScope(varName, parent)
# it might be a global variable, so scan the entire ast if scope is not set
scope ?= ast
visitor = new VariableReferences(node, scope)
vars = visitor.getReferences()
if vars?
for v in vars when v.name == varName
marker = editor.markBufferRange(v.range, persistent: false)
editor.decorateMarker(marker, type: "highlight", class: "var-reference")
renameVariable: () ->
editor = atom.workspace.getActiveTextEditor()
for decoration in editor.getDecorations(class: "var-reference")
marker = decoration.getMarker()
editor.addSelectionForBufferRange(marker.getBufferRange())
expandSelection: () ->
editor = atom.workspace.getActiveTextEditor()
ast = editor.getBuffer()._ast
return unless ast?
selRange = editor.getSelectedBufferRange()
# try to determine the ast node where the cursor is located
if selRange.isEmpty()
astNode = XQUtils.findNode(ast, { line: selRange.start.row, col: selRange.start.column })
expand = false
else
astNode = XQUtils.findNodeForRange(ast, { line: selRange.start.row, col: selRange.start.column },
{ line: selRange.end.row, col: selRange.end.column })
expand = true
if astNode
if expand
parent = astNode.getParent
while parent and (XQUtils.samePosition(astNode.pos, parent.pos) or parent.name in ["StatementsAndOptionalExpr", "LetBinding", "FunctionDecl"])
parent = parent.getParent
else
parent = astNode
if parent?
if parent.name == "AnnotatedDecl"
p = parent.getParent.children.indexOf(parent)
separator = parent.getParent.children[p + 1]
range = new Range([parent.pos.sl, parent.pos.sc], [separator.pos.el, separator.pos.ec])
else
range = new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec])
editor.setSelectedBufferRange(range)
provide: ->
return @provider
provideHyperclick: ->
self = this
providerName: 'hyperclick-xquery'
getSuggestionForWord: (editor, text, range) ->
return unless editor.getGrammar().scopeName == "source.xq"
scope = editor.scopeDescriptorForBufferPosition(range.start)
if scope.getScopesArray().indexOf("meta.definition.variable.name.xquery") > -1
ast = editor.getBuffer()._ast
return unless ast?
def = XQUtils.findNode(ast, { line: range.start.row, col: range.start.column })
if def?
parent = def.getParent
if parent.name == "VarRef" or parent.name == "VarName"
return {
range: new Range([parent.pos.sl, parent.pos.sc], [parent.pos.el, parent.pos.ec]),
callback: ->
self.gotoVarDefinition(parent, editor)
}
else
def = XQUtils.getFunctionDefinition(editor, range.end)
if def?
return {
range: def.range,
callback: ->
self.gotoDefinition(def.signature, editor)
}
else
console.log("no function found at cursor position: #{text}")
provideLinter: ->
provider =
name: 'xqlint'
grammarScopes: ['source.xq']
scope: 'file'
lintsOnChange: true
# lintOnFlyInterval: 200,
lint: (textEditor) =>
return @lintOpenFile(textEditor)
lintOpenFile: (editor) ->
chunk = XQUtils.getText(editor)
return [] unless chunk.text.length > 0 and @projectConfig
collectionPaths = util.getCollectionPaths(editor, @projectConfig)
self = this
return new Promise (resolve) ->
messages = []
self.xqlint(editor, chunk, messages)
id = editor.getBuffer().getId()
if id.startsWith("exist:")
connection = self.projectConfig.getConnection(id)
else
connection = self.projectConfig.getConnection(editor, self.treeView.getActiveServer())
$.ajax
type: "PUT"
url: connection.server + "/apps/atom-editor/compile.xql"
dataType: "json"
data: chunk.text
headers:
"X-BasePath": collectionPaths.basePath
contentType: "application/octet-stream"
username: connection.user
password: PI:PASSWORD:<PASSWORD>END_PI
error: (xhr, status) ->
resolve(messages)
success: (data) ->
if data.result == "fail"
error = self.parseErrMsg(data.error)
range = null
if error.line > -1
line = (error.line - chunk.prologOffset) + chunk.offset
text = editor.lineTextForBufferRow(line)
if (text?)
end = text.length
range = new Range(
[line, error.column - 1],
[line, end - 1]
)
if not range?
text = editor.lineTextForBufferRow(0)
range = new Range([0, 0], [0, text.length - 1])
message = {
severity: "error",
excerpt: error.msg,
location: {
position: range,
file: editor.getPath()
},
solutions: quickfix.getSolutions(editor, error.msg, range, self.importsView)
}
messages.push(message)
resolve(messages)
xqlint: (editor, chunk, messages) ->
if !chunk.isSnippet
xqlint = XQUtils.xqlint(editor)
markers = xqlint?.getWarnings()
errors = xqlint?.getErrors()
if errors? and errors.length > 0
console.log("errors: %o", errors)
if markers?
for marker in markers
m = /^\[(.*)].*$/.exec(marker.message)
code = if m.length == 2 then m[1] else ""
message = {
severity: "warning"
location:
file: editor.getPath()
position: new Range([marker.pos.sl, marker.pos.sc], [marker.pos.el, marker.pos.ec])
excerpt: marker.message
}
messages.push(message)
parseErrMsg: (error) ->
if error.line?
msg = error["#text"]
else
msg = error
str = COMPILE_MSG_RE.exec(msg)
line = -1
if str?
line = parseInt(str[1]) - 1
else if error.line
line = parseInt(error.line) - 1
column = error.column || 0
return { line: line, column: parseInt(column), msg: msg }
consumeBusySignal: (api) ->
@busySignal = api
@subscriptions.add(@busySignal)
|
[
{
"context": " pairs = {}\n for i in [1..10]\n key = 'fixedkey_'+ i\n value = Math.random()\n cache.",
"end": 549,
"score": 0.9259041547775269,
"start": 541,
"tag": "KEY",
"value": "fixedkey"
}
] | test/cache-test.coffee | snowyu/secondary-cache.js | 1 | isEmpty = require('abstract-object/lib/util/isEmpty')
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
Cache = require '../src/cache'
setImmediate = setImmediate || process.nextTick
chai.use(sinonChai)
describe "Cache", ->
#before (done)->
#after (done)->
describe "Unlimited Cache", ->
cache = Cache()
fillDataTo = (cache)->
pairs = {}
for i in [1..10]
key = 'fixedkey_'+ i
value = Math.random()
cache.setFixed key, value
pairs[key] = value
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
pairs
it 'should add a value to cache', ->
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value
cache.get('key').should.be.equal value
it 'should add to cache with expires', (done)->
value = Math.random()
should.not.exist cache.get('expiresKey')
cache.set 'expiresKey', value, 50
cache.get('expiresKey').should.be.equal value
setTimeout ->
should.not.exist cache.get('expiresKey')
done()
, 50
it 'should update a value to cache', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
it 'should delete key in cache', ->
cache.del("NotFind").should.be.false
cache.set 'key', "1"
cache.del("key").should.be.true
it 'should clear cache', ->
pairs = fillDataTo cache
for k,v of pairs
cache.get(k).should.be.equal v
cache.clear()
for k of pairs
should.not.exist cache.get(k)
return
notEmpty = false
cache.forEach (v,k,cache)->
notEmpty = true
notEmpty.should.be.false
it 'should forEach cache', ->
pairs = fillDataTo cache
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal Object.keys(pairs).length
it 'should emit the del event when free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
count = 0
vCache.on 'del', (k,v)->
++count
v.should.be.equal pairs[k]
vCache.free()
count.should.be.equal Object.keys(pairs).length
it 'should free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
vCache.free()
count = 0
vCache.forEach (v,k,cache)->
++count
count.should.be.equal 0
describe "Unlimited Fixed Cache", ->
cache = Cache()
it 'should add to the first level fixed cache via .setFixed', ->
value = Math.random()
should.not.exist cache.get('key')
cache.setFixed 'key', value
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should add to the first level fixed cache via .set with options.fixed=true', ->
cache.clear()
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value, fixed: true
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should set to the first level cache if the key is exists in it', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
result = cache.getLRU 'key'
should.not.exist result
it 'should get to the first level cache if the key is exists both in two caches', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.getFixed('key').should.be.equal oldValue
cache.setLRU 'key', value
result = cache.getLRU 'key'
result.should.be.equal value
result = cache.get('key')
result.should.not.be.equal value
result.should.be.equal oldValue
it 'should del to the first level cache', ->
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.be.true
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.not.be.true
cache.has("key").should.not.be.true
cache.del("key").should.be.false
describe "Fixed Cache with capacity", ->
cache = Cache fixedCapacity: 2
it 'should throw error when adding exceed fixed cache capacity', ->
cache.setFixed 'a', 1
cache.setFixed 'b', 2
should.throw cache.setFixed.bind(cache, 'c', 3), /max capacity exceed/
it 'should add to cache after deleting', ->
cache.delFixed 'a'
cache.setFixed 'c', 3
describe "LRU Cache", ->
cache = Cache(2)
it 'should least recently set', ->
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
it 'should lru recently gotten', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
it 'should lru recently gotten 2', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
describe "Events on Fixed Cache", ->
cache = Cache()
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected, fixed: true
cache.getFixed('key').should.be.equal expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
cache.getFixed('key').should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
cache.hasFixed('key').should.be.false
describe "Events on LRU Cache", ->
cache = Cache(2)
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
describe "MaxAge(options.expires) Cache", ->
cache = Cache expires: 50
it 'should expires all items', (done)->
pairs = {}
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal 10
setTimeout ->
isEmpty = true
cache.forEach ->isEmpty=false
for k,v of pairs
should.not.exist cache.get k
isEmpty.should.be.true
done()
, 50
| 114927 | isEmpty = require('abstract-object/lib/util/isEmpty')
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
Cache = require '../src/cache'
setImmediate = setImmediate || process.nextTick
chai.use(sinonChai)
describe "Cache", ->
#before (done)->
#after (done)->
describe "Unlimited Cache", ->
cache = Cache()
fillDataTo = (cache)->
pairs = {}
for i in [1..10]
key = '<KEY>_'+ i
value = Math.random()
cache.setFixed key, value
pairs[key] = value
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
pairs
it 'should add a value to cache', ->
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value
cache.get('key').should.be.equal value
it 'should add to cache with expires', (done)->
value = Math.random()
should.not.exist cache.get('expiresKey')
cache.set 'expiresKey', value, 50
cache.get('expiresKey').should.be.equal value
setTimeout ->
should.not.exist cache.get('expiresKey')
done()
, 50
it 'should update a value to cache', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
it 'should delete key in cache', ->
cache.del("NotFind").should.be.false
cache.set 'key', "1"
cache.del("key").should.be.true
it 'should clear cache', ->
pairs = fillDataTo cache
for k,v of pairs
cache.get(k).should.be.equal v
cache.clear()
for k of pairs
should.not.exist cache.get(k)
return
notEmpty = false
cache.forEach (v,k,cache)->
notEmpty = true
notEmpty.should.be.false
it 'should forEach cache', ->
pairs = fillDataTo cache
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal Object.keys(pairs).length
it 'should emit the del event when free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
count = 0
vCache.on 'del', (k,v)->
++count
v.should.be.equal pairs[k]
vCache.free()
count.should.be.equal Object.keys(pairs).length
it 'should free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
vCache.free()
count = 0
vCache.forEach (v,k,cache)->
++count
count.should.be.equal 0
describe "Unlimited Fixed Cache", ->
cache = Cache()
it 'should add to the first level fixed cache via .setFixed', ->
value = Math.random()
should.not.exist cache.get('key')
cache.setFixed 'key', value
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should add to the first level fixed cache via .set with options.fixed=true', ->
cache.clear()
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value, fixed: true
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should set to the first level cache if the key is exists in it', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
result = cache.getLRU 'key'
should.not.exist result
it 'should get to the first level cache if the key is exists both in two caches', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.getFixed('key').should.be.equal oldValue
cache.setLRU 'key', value
result = cache.getLRU 'key'
result.should.be.equal value
result = cache.get('key')
result.should.not.be.equal value
result.should.be.equal oldValue
it 'should del to the first level cache', ->
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.be.true
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.not.be.true
cache.has("key").should.not.be.true
cache.del("key").should.be.false
describe "Fixed Cache with capacity", ->
cache = Cache fixedCapacity: 2
it 'should throw error when adding exceed fixed cache capacity', ->
cache.setFixed 'a', 1
cache.setFixed 'b', 2
should.throw cache.setFixed.bind(cache, 'c', 3), /max capacity exceed/
it 'should add to cache after deleting', ->
cache.delFixed 'a'
cache.setFixed 'c', 3
describe "LRU Cache", ->
cache = Cache(2)
it 'should least recently set', ->
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
it 'should lru recently gotten', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
it 'should lru recently gotten 2', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
describe "Events on Fixed Cache", ->
cache = Cache()
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected, fixed: true
cache.getFixed('key').should.be.equal expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
cache.getFixed('key').should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
cache.hasFixed('key').should.be.false
describe "Events on LRU Cache", ->
cache = Cache(2)
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
describe "MaxAge(options.expires) Cache", ->
cache = Cache expires: 50
it 'should expires all items', (done)->
pairs = {}
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal 10
setTimeout ->
isEmpty = true
cache.forEach ->isEmpty=false
for k,v of pairs
should.not.exist cache.get k
isEmpty.should.be.true
done()
, 50
| true | isEmpty = require('abstract-object/lib/util/isEmpty')
chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
Cache = require '../src/cache'
setImmediate = setImmediate || process.nextTick
chai.use(sinonChai)
describe "Cache", ->
#before (done)->
#after (done)->
describe "Unlimited Cache", ->
cache = Cache()
fillDataTo = (cache)->
pairs = {}
for i in [1..10]
key = 'PI:KEY:<KEY>END_PI_'+ i
value = Math.random()
cache.setFixed key, value
pairs[key] = value
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
pairs
it 'should add a value to cache', ->
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value
cache.get('key').should.be.equal value
it 'should add to cache with expires', (done)->
value = Math.random()
should.not.exist cache.get('expiresKey')
cache.set 'expiresKey', value, 50
cache.get('expiresKey').should.be.equal value
setTimeout ->
should.not.exist cache.get('expiresKey')
done()
, 50
it 'should update a value to cache', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
it 'should delete key in cache', ->
cache.del("NotFind").should.be.false
cache.set 'key', "1"
cache.del("key").should.be.true
it 'should clear cache', ->
pairs = fillDataTo cache
for k,v of pairs
cache.get(k).should.be.equal v
cache.clear()
for k of pairs
should.not.exist cache.get(k)
return
notEmpty = false
cache.forEach (v,k,cache)->
notEmpty = true
notEmpty.should.be.false
it 'should forEach cache', ->
pairs = fillDataTo cache
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal Object.keys(pairs).length
it 'should emit the del event when free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
count = 0
vCache.on 'del', (k,v)->
++count
v.should.be.equal pairs[k]
vCache.free()
count.should.be.equal Object.keys(pairs).length
it 'should free cache', ->
vCache = new Cache()
pairs = fillDataTo vCache
vCache.free()
count = 0
vCache.forEach (v,k,cache)->
++count
count.should.be.equal 0
describe "Unlimited Fixed Cache", ->
cache = Cache()
it 'should add to the first level fixed cache via .setFixed', ->
value = Math.random()
should.not.exist cache.get('key')
cache.setFixed 'key', value
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should add to the first level fixed cache via .set with options.fixed=true', ->
cache.clear()
value = Math.random()
should.not.exist cache.get('key')
cache.set 'key', value, fixed: true
cache.get('key').should.be.equal value
cache.getFixed('key').should.be.equal value
it 'should set to the first level cache if the key is exists in it', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.set 'key', value
result = cache.get('key')
result.should.be.equal value
result.should.be.not.equal oldValue
result = cache.getLRU 'key'
should.not.exist result
it 'should get to the first level cache if the key is exists both in two caches', ->
value = Math.random()
oldValue = cache.get('key')
should.exist oldValue
cache.getFixed('key').should.be.equal oldValue
cache.setLRU 'key', value
result = cache.getLRU 'key'
result.should.be.equal value
result = cache.get('key')
result.should.not.be.equal value
result.should.be.equal oldValue
it 'should del to the first level cache', ->
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.be.true
cache.del("key").should.be.true
cache.hasFixed("key").should.not.be.true
cache.hasLRU("key").should.not.be.true
cache.has("key").should.not.be.true
cache.del("key").should.be.false
describe "Fixed Cache with capacity", ->
cache = Cache fixedCapacity: 2
it 'should throw error when adding exceed fixed cache capacity', ->
cache.setFixed 'a', 1
cache.setFixed 'b', 2
should.throw cache.setFixed.bind(cache, 'c', 3), /max capacity exceed/
it 'should add to cache after deleting', ->
cache.delFixed 'a'
cache.setFixed 'c', 3
describe "LRU Cache", ->
cache = Cache(2)
it 'should least recently set', ->
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
it 'should lru recently gotten', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
it 'should lru recently gotten 2', ->
cache.clear()
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('b').should.be.equal 'B'
should.not.exist cache.get('a')
cache.set 'a', 'A'
cache.set 'b', 'B'
cache.get 'a'
cache.set 'c', 'C'
cache.get('c').should.be.equal 'C'
cache.get('a').should.be.equal 'A'
should.not.exist cache.get('b')
describe "Events on Fixed Cache", ->
cache = Cache()
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected, fixed: true
cache.getFixed('key').should.be.equal expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
cache.getFixed('key').should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
cache.hasFixed('key').should.be.false
describe "Events on LRU Cache", ->
cache = Cache(2)
it 'should listen to "add" event', (done)->
expected = Math.random()
cache.on 'add', (key, value)->
key.should.be.equal 'key'
value.should.be.equal expected
done()
cache.set 'key', expected
it 'should listen to "update" event', (done)->
newValue = Math.random()
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'update', (key, value, aOldValue)->
key.should.be.equal 'key'
value.should.be.equal newValue
aOldValue.should.be.equal oldValue
done()
cache.set 'key', newValue
oldValue = cache.get 'key'
oldValue.should.be.equal newValue
it 'should listen to "del" event', (done)->
oldValue = cache.get 'key'
should.exist oldValue
cache.on 'del', (key, value)->
key.should.be.equal 'key'
value.should.be.equal oldValue
done()
cache.del 'key'
cache.has('key').should.be.false
describe "MaxAge(options.expires) Cache", ->
cache = Cache expires: 50
it 'should expires all items', (done)->
pairs = {}
for i in [1..10]
key = 'key_'+ i
value = Math.random()
cache.set key, value
pairs[key] = value
count = 0
cache.forEach (v,k,cache)->
++count
v.should.be.equal pairs[k]
count.should.be.equal 10
setTimeout ->
isEmpty = true
cache.forEach ->isEmpty=false
for k,v of pairs
should.not.exist cache.get k
isEmpty.should.be.true
done()
, 50
|
[
{
"context": "aders).to.deep.equal({\n\t\t\t'Authorization': 'Bearer secretkey'\n\t\t\t'Content-Type': 'application/x-ndjson'\n\t\t\t'Co",
"end": 1865,
"score": 0.5225074887275696,
"start": 1856,
"tag": "KEY",
"value": "secretkey"
}
] | test/12-logger.spec.coffee | hippolyt/resin-supervisor | 0 | https = require 'https'
stream = require 'stream'
zlib = require 'zlib'
Promise = require 'bluebird'
m = require 'mochainon'
{ expect } = m.chai
{ stub } = m.sinon
{ Logger } = require '../src/logger'
describe 'Logger', ->
beforeEach ->
@_req = new stream.PassThrough()
@_req.flushHeaders = m.sinon.spy()
@_req.end = m.sinon.spy()
@_req.body = ''
@_req
.pipe(zlib.createGunzip())
.on 'data', (chunk) =>
@_req.body += chunk
stub(https, 'request').returns(@_req)
@fakeEventTracker = {
track: m.sinon.spy()
}
@logger = new Logger({eventTracker: @fakeEventTracker})
@logger.init({
apiEndpoint: 'https://example.com'
uuid: 'deadbeef'
deviceApiKey: 'secretkey'
offlineMode: false
enableLogs: true
})
afterEach ->
https.request.restore()
it 'waits the grace period before sending any logs', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(4999)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.body).to.equal('')
it 'tears down the connection after inactivity', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(61000)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.end.calledOnce).to.be.true
it 'sends logs as gzipped ndjson', ->
clock = m.sinon.useFakeTimers()
@logger.log({ message: 'foobar', serviceId: 15 })
@logger.log({ timestamp: 1337, message: 'foobar', serviceId: 15 })
@logger.log({ message: 'foobar' }) # shold be ignored
clock.tick(10000)
clock.restore()
expect(https.request.calledOnce).to.be.true
opts = https.request.firstCall.args[0]
expect(opts.href).to.equal('https://example.com/device/v2/deadbeef/log-stream')
expect(opts.method).to.equal('POST')
expect(opts.headers).to.deep.equal({
'Authorization': 'Bearer secretkey'
'Content-Type': 'application/x-ndjson'
'Content-Encoding': 'gzip'
})
# small delay for the streams to propagate data
Promise.delay(10)
.then =>
lines = @_req.body.split('\n')
expect(lines.length).to.equal(3)
expect(lines[2]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ timestamp: 0, message: 'foobar', serviceId: 15 })
msg = JSON.parse(lines[1])
expect(msg).to.deep.equal({ timestamp: 1337, message: 'foobar', serviceId: 15 })
it 'allows logging system messages which are also reported to the eventTracker', ->
clock = m.sinon.useFakeTimers()
@logger.logSystemMessage('Hello there!', { someProp: 'someVal' }, 'Some event name')
clock.tick(10000)
clock.restore()
Promise.delay(10)
.then =>
expect(@fakeEventTracker.track).to.be.calledWith('Some event name', { someProp: 'someVal' })
lines = @_req.body.split('\n')
expect(lines.length).to.equal(2)
expect(lines[1]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ message: 'Hello there!', timestamp: 0, isSystem: true })
it 'should support non-tty log lines', ->
message = '\u0001\u0000\u0000\u0000\u0000\u0000\u0000?2018-09-21T12:37:09.819134000Z this is the message'
buffer = Buffer.from(message)
expect(Logger.extractContainerMessage(buffer)).to.deep.equal({
message: 'this is the message',
timestamp: 1537533429819
})
| 129054 | https = require 'https'
stream = require 'stream'
zlib = require 'zlib'
Promise = require 'bluebird'
m = require 'mochainon'
{ expect } = m.chai
{ stub } = m.sinon
{ Logger } = require '../src/logger'
describe 'Logger', ->
beforeEach ->
@_req = new stream.PassThrough()
@_req.flushHeaders = m.sinon.spy()
@_req.end = m.sinon.spy()
@_req.body = ''
@_req
.pipe(zlib.createGunzip())
.on 'data', (chunk) =>
@_req.body += chunk
stub(https, 'request').returns(@_req)
@fakeEventTracker = {
track: m.sinon.spy()
}
@logger = new Logger({eventTracker: @fakeEventTracker})
@logger.init({
apiEndpoint: 'https://example.com'
uuid: 'deadbeef'
deviceApiKey: 'secretkey'
offlineMode: false
enableLogs: true
})
afterEach ->
https.request.restore()
it 'waits the grace period before sending any logs', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(4999)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.body).to.equal('')
it 'tears down the connection after inactivity', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(61000)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.end.calledOnce).to.be.true
it 'sends logs as gzipped ndjson', ->
clock = m.sinon.useFakeTimers()
@logger.log({ message: 'foobar', serviceId: 15 })
@logger.log({ timestamp: 1337, message: 'foobar', serviceId: 15 })
@logger.log({ message: 'foobar' }) # shold be ignored
clock.tick(10000)
clock.restore()
expect(https.request.calledOnce).to.be.true
opts = https.request.firstCall.args[0]
expect(opts.href).to.equal('https://example.com/device/v2/deadbeef/log-stream')
expect(opts.method).to.equal('POST')
expect(opts.headers).to.deep.equal({
'Authorization': 'Bearer <KEY>'
'Content-Type': 'application/x-ndjson'
'Content-Encoding': 'gzip'
})
# small delay for the streams to propagate data
Promise.delay(10)
.then =>
lines = @_req.body.split('\n')
expect(lines.length).to.equal(3)
expect(lines[2]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ timestamp: 0, message: 'foobar', serviceId: 15 })
msg = JSON.parse(lines[1])
expect(msg).to.deep.equal({ timestamp: 1337, message: 'foobar', serviceId: 15 })
it 'allows logging system messages which are also reported to the eventTracker', ->
clock = m.sinon.useFakeTimers()
@logger.logSystemMessage('Hello there!', { someProp: 'someVal' }, 'Some event name')
clock.tick(10000)
clock.restore()
Promise.delay(10)
.then =>
expect(@fakeEventTracker.track).to.be.calledWith('Some event name', { someProp: 'someVal' })
lines = @_req.body.split('\n')
expect(lines.length).to.equal(2)
expect(lines[1]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ message: 'Hello there!', timestamp: 0, isSystem: true })
it 'should support non-tty log lines', ->
message = '\u0001\u0000\u0000\u0000\u0000\u0000\u0000?2018-09-21T12:37:09.819134000Z this is the message'
buffer = Buffer.from(message)
expect(Logger.extractContainerMessage(buffer)).to.deep.equal({
message: 'this is the message',
timestamp: 1537533429819
})
| true | https = require 'https'
stream = require 'stream'
zlib = require 'zlib'
Promise = require 'bluebird'
m = require 'mochainon'
{ expect } = m.chai
{ stub } = m.sinon
{ Logger } = require '../src/logger'
describe 'Logger', ->
beforeEach ->
@_req = new stream.PassThrough()
@_req.flushHeaders = m.sinon.spy()
@_req.end = m.sinon.spy()
@_req.body = ''
@_req
.pipe(zlib.createGunzip())
.on 'data', (chunk) =>
@_req.body += chunk
stub(https, 'request').returns(@_req)
@fakeEventTracker = {
track: m.sinon.spy()
}
@logger = new Logger({eventTracker: @fakeEventTracker})
@logger.init({
apiEndpoint: 'https://example.com'
uuid: 'deadbeef'
deviceApiKey: 'secretkey'
offlineMode: false
enableLogs: true
})
afterEach ->
https.request.restore()
it 'waits the grace period before sending any logs', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(4999)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.body).to.equal('')
it 'tears down the connection after inactivity', ->
clock = m.sinon.useFakeTimers()
@logger.log({message: 'foobar', serviceId: 15})
clock.tick(61000)
clock.restore()
Promise.delay(10)
.then =>
expect(@_req.end.calledOnce).to.be.true
it 'sends logs as gzipped ndjson', ->
clock = m.sinon.useFakeTimers()
@logger.log({ message: 'foobar', serviceId: 15 })
@logger.log({ timestamp: 1337, message: 'foobar', serviceId: 15 })
@logger.log({ message: 'foobar' }) # shold be ignored
clock.tick(10000)
clock.restore()
expect(https.request.calledOnce).to.be.true
opts = https.request.firstCall.args[0]
expect(opts.href).to.equal('https://example.com/device/v2/deadbeef/log-stream')
expect(opts.method).to.equal('POST')
expect(opts.headers).to.deep.equal({
'Authorization': 'Bearer PI:KEY:<KEY>END_PI'
'Content-Type': 'application/x-ndjson'
'Content-Encoding': 'gzip'
})
# small delay for the streams to propagate data
Promise.delay(10)
.then =>
lines = @_req.body.split('\n')
expect(lines.length).to.equal(3)
expect(lines[2]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ timestamp: 0, message: 'foobar', serviceId: 15 })
msg = JSON.parse(lines[1])
expect(msg).to.deep.equal({ timestamp: 1337, message: 'foobar', serviceId: 15 })
it 'allows logging system messages which are also reported to the eventTracker', ->
clock = m.sinon.useFakeTimers()
@logger.logSystemMessage('Hello there!', { someProp: 'someVal' }, 'Some event name')
clock.tick(10000)
clock.restore()
Promise.delay(10)
.then =>
expect(@fakeEventTracker.track).to.be.calledWith('Some event name', { someProp: 'someVal' })
lines = @_req.body.split('\n')
expect(lines.length).to.equal(2)
expect(lines[1]).to.equal('')
msg = JSON.parse(lines[0])
expect(msg).to.deep.equal({ message: 'Hello there!', timestamp: 0, isSystem: true })
it 'should support non-tty log lines', ->
message = '\u0001\u0000\u0000\u0000\u0000\u0000\u0000?2018-09-21T12:37:09.819134000Z this is the message'
buffer = Buffer.from(message)
expect(Logger.extractContainerMessage(buffer)).to.deep.equal({
message: 'this is the message',
timestamp: 1537533429819
})
|
[
{
"context": "sers);\n users = [\n {\n name: 'Lia Lugo'\n avatar: 'svg-1'\n content:",
"end": 193,
"score": 0.9998815059661865,
"start": 185,
"tag": "NAME",
"value": "Lia Lugo"
},
{
"context": "ncashire.'\n }\n {\n n... | app/scripts/layout/userservice.coffee | bionikspoon/gem-store | 0 | 'use strict'
class UserService
@$inject = [ '$q' ]
constructor: ($q)->
@q = $q
loadAllUsers: ->
@q.when(users);
users = [
{
name: 'Lia Lugo'
avatar: 'svg-1'
content: 'I love cheese, especially airedale queso. Cheese and biscuits halloumi cauliflower cheese cottage cheese swiss boursin fondue caerphilly. Cow port-salut camembert de normandie macaroni cheese feta who moved my cheese babybel boursin. Red leicester roquefort boursin squirty cheese jarlsberg blue castello caerphilly chalk and cheese. Lancashire.'
}
{
name: 'George Duke'
avatar: 'svg-2'
content: 'Zombie ipsum reversus ab viral inferno, nam rick grimes malum cerebro. De carne lumbering animata corpora quaeritis. Summus brains sit, morbo vel maleficia? De apocalypsi gorger omero undead survivor dictum mauris.'
}
{
name: 'Gener Delosreyes'
avatar: 'svg-3'
content: "Raw denim pour-over readymade Etsy Pitchfork. Four dollar toast pickled locavore bitters McSweeney's blog. Try-hard art party Shoreditch selfies. Odd Future butcher VHS, disrupt pop-up Thundercats chillwave vinyl jean shorts taxidermy master cleanse letterpress Wes Anderson mustache Helvetica. Schlitz bicycle rights chillwave irony lumberhungry Kickstarter next level sriracha typewriter Intelligentsia, migas kogi heirloom tousled. Disrupt 3 wolf moon lomo four loko. Pug mlkshk fanny pack literally hoodie bespoke, put a bird on it Marfa messenger bag kogi VHS."
}
{
name: 'Lawrence Ray'
avatar: 'svg-4'
content: 'Scratch the furniture spit up on light gray carpet instead of adjacent linoleum so eat a plant, kill a hand pelt around the house and up and down stairs chasing phantoms run in circles, or claw drapes. Always hungry pelt around the house and up and down stairs chasing phantoms.'
}
{
name: 'Ernesto Urbina'
avatar: 'svg-5'
content: 'Webtwo ipsum dolor sit amet, eskobo chumby doostang bebo. Bubbli greplin stypi prezi mzinga heroku wakoopa, shopify airbnb dogster dopplr gooru jumo, reddit plickers edmodo stypi zillow etsy.'
}
{
name: 'Gani Ferrer'
avatar: 'svg-6'
content: "Lebowski ipsum yeah? What do you think happens when you get rad? You turn in your library card? Get a new driver's license? Stop being awesome? Dolor sit amet, consectetur adipiscing elit praesent ac magna justo pellentesque ac lectus. You don't go out and make a living dressed like that in the middle of a weekday. Quis elit blandit fringilla a ut turpis praesent felis ligula, malesuada suscipit malesuada."
}
]
###*
# @ngdoc service
# @name gemStoreApp.UserService
# @description
# # UserService
# Service in the gemStoreApp.
###
angular.module 'layoutModule'
.service 'UserService', UserService
# AngularJS will instantiate a singleton by calling "new" on this function
| 164763 | 'use strict'
class UserService
@$inject = [ '$q' ]
constructor: ($q)->
@q = $q
loadAllUsers: ->
@q.when(users);
users = [
{
name: '<NAME>'
avatar: 'svg-1'
content: 'I love cheese, especially airedale queso. Cheese and biscuits halloumi cauliflower cheese cottage cheese swiss boursin fondue caerphilly. Cow port-salut camembert de normandie macaroni cheese feta who moved my cheese babybel boursin. Red leicester roquefort boursin squirty cheese jarlsberg blue castello caerphilly chalk and cheese. Lancashire.'
}
{
name: '<NAME>'
avatar: 'svg-2'
content: 'Zombie ipsum reversus ab viral inferno, nam rick grimes malum cerebro. De carne lumbering animata corpora quaeritis. Summus brains sit, morbo vel maleficia? De apocalypsi gorger omero undead survivor dictum mauris.'
}
{
name: '<NAME>'
avatar: 'svg-3'
content: "Raw denim pour-over readymade Etsy Pitchfork. Four dollar toast pickled locavore bitters McSweeney's blog. Try-hard art party Shoreditch selfies. Odd Future butcher VHS, disrupt pop-up Thundercats chillwave vinyl jean shorts taxidermy master cleanse letterpress Wes Anderson mustache Helvetica. Schlitz bicycle rights chillwave irony lumberhungry Kickstarter next level sriracha typewriter Intelligentsia, migas kogi heirloom tousled. Disrupt 3 wolf moon lomo four loko. Pug mlkshk fanny pack literally hoodie bespoke, put a bird on it Marfa messenger bag kogi VHS."
}
{
name: '<NAME>'
avatar: 'svg-4'
content: 'Scratch the furniture spit up on light gray carpet instead of adjacent linoleum so eat a plant, kill a hand pelt around the house and up and down stairs chasing phantoms run in circles, or claw drapes. Always hungry pelt around the house and up and down stairs chasing phantoms.'
}
{
name: '<NAME>'
avatar: 'svg-5'
content: 'Webtwo ipsum dolor sit amet, eskobo chumby doostang bebo. Bubbli greplin stypi prezi mzinga heroku wakoopa, shopify airbnb dogster dopplr gooru jumo, reddit plickers edmodo stypi zillow etsy.'
}
{
name: '<NAME>'
avatar: 'svg-6'
content: "Lebowski ipsum yeah? What do you think happens when you get rad? You turn in your library card? Get a new driver's license? Stop being awesome? Dolor sit amet, consectetur adipiscing elit praesent ac magna justo pellentesque ac lectus. You don't go out and make a living dressed like that in the middle of a weekday. Quis elit blandit fringilla a ut turpis praesent felis ligula, malesuada suscipit malesuada."
}
]
###*
# @ngdoc service
# @name gemStoreApp.UserService
# @description
# # UserService
# Service in the gemStoreApp.
###
angular.module 'layoutModule'
.service 'UserService', UserService
# AngularJS will instantiate a singleton by calling "new" on this function
| true | 'use strict'
class UserService
@$inject = [ '$q' ]
constructor: ($q)->
@q = $q
loadAllUsers: ->
@q.when(users);
users = [
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-1'
content: 'I love cheese, especially airedale queso. Cheese and biscuits halloumi cauliflower cheese cottage cheese swiss boursin fondue caerphilly. Cow port-salut camembert de normandie macaroni cheese feta who moved my cheese babybel boursin. Red leicester roquefort boursin squirty cheese jarlsberg blue castello caerphilly chalk and cheese. Lancashire.'
}
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-2'
content: 'Zombie ipsum reversus ab viral inferno, nam rick grimes malum cerebro. De carne lumbering animata corpora quaeritis. Summus brains sit, morbo vel maleficia? De apocalypsi gorger omero undead survivor dictum mauris.'
}
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-3'
content: "Raw denim pour-over readymade Etsy Pitchfork. Four dollar toast pickled locavore bitters McSweeney's blog. Try-hard art party Shoreditch selfies. Odd Future butcher VHS, disrupt pop-up Thundercats chillwave vinyl jean shorts taxidermy master cleanse letterpress Wes Anderson mustache Helvetica. Schlitz bicycle rights chillwave irony lumberhungry Kickstarter next level sriracha typewriter Intelligentsia, migas kogi heirloom tousled. Disrupt 3 wolf moon lomo four loko. Pug mlkshk fanny pack literally hoodie bespoke, put a bird on it Marfa messenger bag kogi VHS."
}
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-4'
content: 'Scratch the furniture spit up on light gray carpet instead of adjacent linoleum so eat a plant, kill a hand pelt around the house and up and down stairs chasing phantoms run in circles, or claw drapes. Always hungry pelt around the house and up and down stairs chasing phantoms.'
}
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-5'
content: 'Webtwo ipsum dolor sit amet, eskobo chumby doostang bebo. Bubbli greplin stypi prezi mzinga heroku wakoopa, shopify airbnb dogster dopplr gooru jumo, reddit plickers edmodo stypi zillow etsy.'
}
{
name: 'PI:NAME:<NAME>END_PI'
avatar: 'svg-6'
content: "Lebowski ipsum yeah? What do you think happens when you get rad? You turn in your library card? Get a new driver's license? Stop being awesome? Dolor sit amet, consectetur adipiscing elit praesent ac magna justo pellentesque ac lectus. You don't go out and make a living dressed like that in the middle of a weekday. Quis elit blandit fringilla a ut turpis praesent felis ligula, malesuada suscipit malesuada."
}
]
###*
# @ngdoc service
# @name gemStoreApp.UserService
# @description
# # UserService
# Service in the gemStoreApp.
###
angular.module 'layoutModule'
.service 'UserService', UserService
# AngularJS will instantiate a singleton by calling "new" on this function
|
[
{
"context": ", \"#fff\", \"#000\"]\n new Element\n key: \"testElement\"\n stage: cacheMode\n [\n new",
"end": 1575,
"score": 0.9369153380393982,
"start": 1564,
"tag": "USERNAME",
"value": "testElement"
},
{
"context": "olor: \"#f00\"\n new... | test/tests/Art.Engine/Core/Drawing/CachingAndStaging/CacheDraw.coffee | Imikimi-LLC/art.engine | 4 | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{compareDownsampledRedChannel} = require "../../CoreHelper"
{point, matrix, Matrix, rect} = Atomic
{m, clone,inspect, nextTick, eq, log, isFunction} = Foundation
{Element} = Engine.Core
{RectangleElement, BitmapElement, TextElement, ShadowElement} = Engine
imageDataEqual = (a, b) ->
a = a.data
b = b.data
if a.length == b.length
for av, i in a when av != b[i]
return false
true
else
false
doPropChangeTest = (resetsCache, testName, propChangeFunction, wrapperElement) ->
wrapperElement.onNextReady ->
testElement = wrapperElement.find("testElement")[0]
throw new Error "testElement not found" unless testElement
wrapperElement.toBitmapWithInfo {}
.then (firstRendered) ->
firstCache = testElement._drawCacheBitmap
firstImageData = firstCache.getImageData()
assert.eq true, !!firstCache
propChangeFunction testElement
wrapperElement.toBitmapBasic {}
.then (bitmap) ->
secondCache = testElement._drawCacheBitmap
log {bitmap, testName, firstCache, secondCache}
secondImageData = secondCache.getImageData()
assert.eq resetsCache, !imageDataEqual firstImageData, secondImageData
newPropChangeTestElements = (cacheMode = true)->
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "testElement"
stage: cacheMode
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
propChangeTest = (resetsCache, propName, propValue, cacheMode = true)->
testName = "changing #{propName} " + if resetsCache
"DOES reset cache"
else
"does NOT reset cache"
propChangeFunction = if isFunction propValue then propValue else (el) -> el[propName] = propValue
test testName, ->
wrapperElement = newPropChangeTestElements cacheMode
doPropChangeTest resetsCache, testName, propChangeFunction, wrapperElement
{stateEpochTest} = StateEpochTestHelper
module.exports = Engine.Config.config.drawCacheEnabled && suite:
true: ->
test "cacheDraw: true caches on next draw-cycle", ->
drawCount = 0
el = new Element
cacheDraw: true
size: point 100, 50
drawOrder: rectangle: ->
drawCount++
rect()
[new Element draw: "red"]
el.toBitmapWithInfo()
.then (rendered) ->
firstCached = el._drawCacheBitmap
assert.eq true, !!result = el._drawCacheBitmap
assert.eq drawCount, 2
el.toBitmapBasic()
.then -> el.toBitmapBasic()
.then ->
assert.eq el._drawCacheBitmap, firstCached
assert.eq drawCount, 2
test "regression", ->
drawCount = 0
e = new Element
size: ps: 1
cacheDraw: true
new Element
drawOrder: [
"blue"
(target, elementToTargetMatrix, currentDrawArea, currentPath, element) ->
drawCount++
]
size: 50
e.toBitmapBasic()
.then (bitmap) ->
log {bitmap}
assert.eq drawCount, 2
e.toBitmapBasic()
.then ->
assert.eq drawCount, 2
do ->
test testName = "cacheDraw: true, no change, then setting cacheDraw = false resets cache", ->
wrapperElement =
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "testElement"
cacheDraw: true
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
testElement = wrapperElement.find("testElement")[0]
assert.eq true, !!testElement._drawCacheBitmap
testElement.cacheDraw = false
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
log
result: rendered
test: testName
assert.eq false, !!testElement._drawCacheBitmap
tools: ->
test "flushCache", ->
el = new Element
cacheDraw: true
size: 100
draw: "red"
el.toBitmapWithInfo()
.then ->
Engine.validateCacheByteSize()
assert.gt Engine.getCacheInfo().cacheByteSize, 0
Engine.flushCache()
Engine.validateCacheByteSize()
assert.eq Engine.getCacheInfo().cacheByteSize, 0
partialInitialDraw: ->
test "move Element doesn't redraw whole screen", ->
el = new Element
size: 4
clip: true
cachedEl = new Element
location: 2
cacheDraw: true
new RectangleElement color: "#800"
el.toBitmapWithInfo {}
.then ->
compareDownsampledRedChannel "partialRedraw_initialDraw", cachedEl._drawCacheBitmap, [
8, 8, 0, 0
8, 8, 0, 0
0, 0, 0, 0
0, 0, 0, 0
]
assert.eq cachedEl._dirtyDrawAreas, [rect(2, 0, 2, 4), rect 0, 2, 2, 2]
propChanges: ->
propChangeTest false, "opacity", .5
# propChangeTest false, "visible", false
propChangeTest false, "compositeMode", "add"
propChangeTest false, "location", 10
propChangeTest false, "scale", .5
propChangeTest false, "angle", Math.PI/4
propChangeTest false, "axis", .5
propChangeTest true, "size", ps: .5
propChangeTest true, "child's color", (el) -> el.find("testChild")[0].color = "#f0f"
propChangeTest false, "elementToParentMatrix", (el) -> el.elementToParentMatrix = Matrix.translate(el.currentSize.ccNeg).rotate(Math.PI/6).translate(el.currentSize.cc)
stagingBitmaps:
getNeedsStagingBitmap: ->
testNsb = (needsIt, name, tester) ->
test "#{if needsIt then 'NEEDED' else 'NOT NEEDED'} when #{name}", ->
tester()
.onNextReady (e) -> assert.eq e.getNeedsStagingBitmap(e.elementToParentMatrix), needsIt, "getNeedsStagingBitmap() should be #{needsIt}"
testNsb false, "default", -> new Element()
testNsb false, "ONLY clip", -> new Element clip: true
testNsb false, "ONLY rotation", -> new Element angle: .1
testNsb false, "ONLY has Children", -> new Element {}, new Element()
testNsb false, "ONLY opacity < 1", -> new Element opacity: .9
testNsb false, "ONLY compositeMode: 'add'", -> new Element compositeMode: 'add'
testNsb true, "isMask", -> new Element isMask: true
testNsb true, "clip AND rotation", -> new Element clip: true, angle: .1
testNsb true, "has Children AND opacity < 1", -> new Element opacity: .9, new Element()
testNsb true, "has Children AND compositeMode: 'add'", -> new Element compositeMode: 'add', new Element()
testNsb true, "childRequiresParentStagingBitmap", -> new Element {}, new ShadowElement
# test "clipping with rotation", ->
# new Element
drawing: ->
test "staging bitmap should persist across two immediate draws", ->
standardTextProps = textProps =
fontSize: 16
fontFamily: "sans-serif"
color: "#fffc"
align: .5
size: ps: 1
padding: 10
standardShadowProps =
color: "#0007"
blur: 20
offset: y: 5
e = new Element
size: 100
clip: true
parent = new Element
axis: .5
location: ps: .5
[
new RectangleElement color: "green", shadow: standardShadowProps
needsStagingElement = new Element
clip: true
[new TextElement m standardTextProps, text: "hi!"]
]
initialStagingBitmapsCreated = Element.stats.stagingBitmapsCreated
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
parent.angle = (Math.PI/180) * -5
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 1"
parent.angle = (Math.PI/180) * -10
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 2"
| 196563 | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{compareDownsampledRedChannel} = require "../../CoreHelper"
{point, matrix, Matrix, rect} = Atomic
{m, clone,inspect, nextTick, eq, log, isFunction} = Foundation
{Element} = Engine.Core
{RectangleElement, BitmapElement, TextElement, ShadowElement} = Engine
imageDataEqual = (a, b) ->
a = a.data
b = b.data
if a.length == b.length
for av, i in a when av != b[i]
return false
true
else
false
doPropChangeTest = (resetsCache, testName, propChangeFunction, wrapperElement) ->
wrapperElement.onNextReady ->
testElement = wrapperElement.find("testElement")[0]
throw new Error "testElement not found" unless testElement
wrapperElement.toBitmapWithInfo {}
.then (firstRendered) ->
firstCache = testElement._drawCacheBitmap
firstImageData = firstCache.getImageData()
assert.eq true, !!firstCache
propChangeFunction testElement
wrapperElement.toBitmapBasic {}
.then (bitmap) ->
secondCache = testElement._drawCacheBitmap
log {bitmap, testName, firstCache, secondCache}
secondImageData = secondCache.getImageData()
assert.eq resetsCache, !imageDataEqual firstImageData, secondImageData
newPropChangeTestElements = (cacheMode = true)->
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "testElement"
stage: cacheMode
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
propChangeTest = (resetsCache, propName, propValue, cacheMode = true)->
testName = "changing #{propName} " + if resetsCache
"DOES reset cache"
else
"does NOT reset cache"
propChangeFunction = if isFunction propValue then propValue else (el) -> el[propName] = propValue
test testName, ->
wrapperElement = newPropChangeTestElements cacheMode
doPropChangeTest resetsCache, testName, propChangeFunction, wrapperElement
{stateEpochTest} = StateEpochTestHelper
module.exports = Engine.Config.config.drawCacheEnabled && suite:
true: ->
test "cacheDraw: true caches on next draw-cycle", ->
drawCount = 0
el = new Element
cacheDraw: true
size: point 100, 50
drawOrder: rectangle: ->
drawCount++
rect()
[new Element draw: "red"]
el.toBitmapWithInfo()
.then (rendered) ->
firstCached = el._drawCacheBitmap
assert.eq true, !!result = el._drawCacheBitmap
assert.eq drawCount, 2
el.toBitmapBasic()
.then -> el.toBitmapBasic()
.then ->
assert.eq el._drawCacheBitmap, firstCached
assert.eq drawCount, 2
test "regression", ->
drawCount = 0
e = new Element
size: ps: 1
cacheDraw: true
new Element
drawOrder: [
"blue"
(target, elementToTargetMatrix, currentDrawArea, currentPath, element) ->
drawCount++
]
size: 50
e.toBitmapBasic()
.then (bitmap) ->
log {bitmap}
assert.eq drawCount, 2
e.toBitmapBasic()
.then ->
assert.eq drawCount, 2
do ->
test testName = "cacheDraw: true, no change, then setting cacheDraw = false resets cache", ->
wrapperElement =
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "<KEY>"
cacheDraw: true
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
testElement = wrapperElement.find("testElement")[0]
assert.eq true, !!testElement._drawCacheBitmap
testElement.cacheDraw = false
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
log
result: rendered
test: testName
assert.eq false, !!testElement._drawCacheBitmap
tools: ->
test "flushCache", ->
el = new Element
cacheDraw: true
size: 100
draw: "red"
el.toBitmapWithInfo()
.then ->
Engine.validateCacheByteSize()
assert.gt Engine.getCacheInfo().cacheByteSize, 0
Engine.flushCache()
Engine.validateCacheByteSize()
assert.eq Engine.getCacheInfo().cacheByteSize, 0
partialInitialDraw: ->
test "move Element doesn't redraw whole screen", ->
el = new Element
size: 4
clip: true
cachedEl = new Element
location: 2
cacheDraw: true
new RectangleElement color: "#800"
el.toBitmapWithInfo {}
.then ->
compareDownsampledRedChannel "partialRedraw_initialDraw", cachedEl._drawCacheBitmap, [
8, 8, 0, 0
8, 8, 0, 0
0, 0, 0, 0
0, 0, 0, 0
]
assert.eq cachedEl._dirtyDrawAreas, [rect(2, 0, 2, 4), rect 0, 2, 2, 2]
propChanges: ->
propChangeTest false, "opacity", .5
# propChangeTest false, "visible", false
propChangeTest false, "compositeMode", "add"
propChangeTest false, "location", 10
propChangeTest false, "scale", .5
propChangeTest false, "angle", Math.PI/4
propChangeTest false, "axis", .5
propChangeTest true, "size", ps: .5
propChangeTest true, "child's color", (el) -> el.find("testChild")[0].color = "#f0f"
propChangeTest false, "elementToParentMatrix", (el) -> el.elementToParentMatrix = Matrix.translate(el.currentSize.ccNeg).rotate(Math.PI/6).translate(el.currentSize.cc)
stagingBitmaps:
getNeedsStagingBitmap: ->
testNsb = (needsIt, name, tester) ->
test "#{if needsIt then 'NEEDED' else 'NOT NEEDED'} when #{name}", ->
tester()
.onNextReady (e) -> assert.eq e.getNeedsStagingBitmap(e.elementToParentMatrix), needsIt, "getNeedsStagingBitmap() should be #{needsIt}"
testNsb false, "default", -> new Element()
testNsb false, "ONLY clip", -> new Element clip: true
testNsb false, "ONLY rotation", -> new Element angle: .1
testNsb false, "ONLY has Children", -> new Element {}, new Element()
testNsb false, "ONLY opacity < 1", -> new Element opacity: .9
testNsb false, "ONLY compositeMode: 'add'", -> new Element compositeMode: 'add'
testNsb true, "isMask", -> new Element isMask: true
testNsb true, "clip AND rotation", -> new Element clip: true, angle: .1
testNsb true, "has Children AND opacity < 1", -> new Element opacity: .9, new Element()
testNsb true, "has Children AND compositeMode: 'add'", -> new Element compositeMode: 'add', new Element()
testNsb true, "childRequiresParentStagingBitmap", -> new Element {}, new ShadowElement
# test "clipping with rotation", ->
# new Element
drawing: ->
test "staging bitmap should persist across two immediate draws", ->
standardTextProps = textProps =
fontSize: 16
fontFamily: "sans-serif"
color: "#fffc"
align: .5
size: ps: 1
padding: 10
standardShadowProps =
color: "#0007"
blur: 20
offset: y: 5
e = new Element
size: 100
clip: true
parent = new Element
axis: .5
location: ps: .5
[
new RectangleElement color: "green", shadow: standardShadowProps
needsStagingElement = new Element
clip: true
[new TextElement m standardTextProps, text: "hi!"]
]
initialStagingBitmapsCreated = Element.stats.stagingBitmapsCreated
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
parent.angle = (Math.PI/180) * -5
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 1"
parent.angle = (Math.PI/180) * -10
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 2"
| true | Foundation = require 'art-foundation'
Atomic = require 'art-atomic'
Canvas = require 'art-canvas'
Engine = require 'art-engine'
StateEpochTestHelper = require '../../StateEpochTestHelper'
{compareDownsampledRedChannel} = require "../../CoreHelper"
{point, matrix, Matrix, rect} = Atomic
{m, clone,inspect, nextTick, eq, log, isFunction} = Foundation
{Element} = Engine.Core
{RectangleElement, BitmapElement, TextElement, ShadowElement} = Engine
imageDataEqual = (a, b) ->
a = a.data
b = b.data
if a.length == b.length
for av, i in a when av != b[i]
return false
true
else
false
doPropChangeTest = (resetsCache, testName, propChangeFunction, wrapperElement) ->
wrapperElement.onNextReady ->
testElement = wrapperElement.find("testElement")[0]
throw new Error "testElement not found" unless testElement
wrapperElement.toBitmapWithInfo {}
.then (firstRendered) ->
firstCache = testElement._drawCacheBitmap
firstImageData = firstCache.getImageData()
assert.eq true, !!firstCache
propChangeFunction testElement
wrapperElement.toBitmapBasic {}
.then (bitmap) ->
secondCache = testElement._drawCacheBitmap
log {bitmap, testName, firstCache, secondCache}
secondImageData = secondCache.getImageData()
assert.eq resetsCache, !imageDataEqual firstImageData, secondImageData
newPropChangeTestElements = (cacheMode = true)->
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "testElement"
stage: cacheMode
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
propChangeTest = (resetsCache, propName, propValue, cacheMode = true)->
testName = "changing #{propName} " + if resetsCache
"DOES reset cache"
else
"does NOT reset cache"
propChangeFunction = if isFunction propValue then propValue else (el) -> el[propName] = propValue
test testName, ->
wrapperElement = newPropChangeTestElements cacheMode
doPropChangeTest resetsCache, testName, propChangeFunction, wrapperElement
{stateEpochTest} = StateEpochTestHelper
module.exports = Engine.Config.config.drawCacheEnabled && suite:
true: ->
test "cacheDraw: true caches on next draw-cycle", ->
drawCount = 0
el = new Element
cacheDraw: true
size: point 100, 50
drawOrder: rectangle: ->
drawCount++
rect()
[new Element draw: "red"]
el.toBitmapWithInfo()
.then (rendered) ->
firstCached = el._drawCacheBitmap
assert.eq true, !!result = el._drawCacheBitmap
assert.eq drawCount, 2
el.toBitmapBasic()
.then -> el.toBitmapBasic()
.then ->
assert.eq el._drawCacheBitmap, firstCached
assert.eq drawCount, 2
test "regression", ->
drawCount = 0
e = new Element
size: ps: 1
cacheDraw: true
new Element
drawOrder: [
"blue"
(target, elementToTargetMatrix, currentDrawArea, currentPath, element) ->
drawCount++
]
size: 50
e.toBitmapBasic()
.then (bitmap) ->
log {bitmap}
assert.eq drawCount, 2
e.toBitmapBasic()
.then ->
assert.eq drawCount, 2
do ->
test testName = "cacheDraw: true, no change, then setting cacheDraw = false resets cache", ->
wrapperElement =
new Element
size: point 100, 50
[
new RectangleElement colors: ["#000", "#fff", "#000"]
new Element
key: "PI:KEY:<KEY>END_PI"
cacheDraw: true
[
new RectangleElement color: "#f00"
new RectangleElement key: "testChild", color: "#ff0", padding: 10
]
]
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
testElement = wrapperElement.find("testElement")[0]
assert.eq true, !!testElement._drawCacheBitmap
testElement.cacheDraw = false
wrapperElement.toBitmapWithInfo {}
.then (rendered) ->
log
result: rendered
test: testName
assert.eq false, !!testElement._drawCacheBitmap
tools: ->
test "flushCache", ->
el = new Element
cacheDraw: true
size: 100
draw: "red"
el.toBitmapWithInfo()
.then ->
Engine.validateCacheByteSize()
assert.gt Engine.getCacheInfo().cacheByteSize, 0
Engine.flushCache()
Engine.validateCacheByteSize()
assert.eq Engine.getCacheInfo().cacheByteSize, 0
partialInitialDraw: ->
test "move Element doesn't redraw whole screen", ->
el = new Element
size: 4
clip: true
cachedEl = new Element
location: 2
cacheDraw: true
new RectangleElement color: "#800"
el.toBitmapWithInfo {}
.then ->
compareDownsampledRedChannel "partialRedraw_initialDraw", cachedEl._drawCacheBitmap, [
8, 8, 0, 0
8, 8, 0, 0
0, 0, 0, 0
0, 0, 0, 0
]
assert.eq cachedEl._dirtyDrawAreas, [rect(2, 0, 2, 4), rect 0, 2, 2, 2]
propChanges: ->
propChangeTest false, "opacity", .5
# propChangeTest false, "visible", false
propChangeTest false, "compositeMode", "add"
propChangeTest false, "location", 10
propChangeTest false, "scale", .5
propChangeTest false, "angle", Math.PI/4
propChangeTest false, "axis", .5
propChangeTest true, "size", ps: .5
propChangeTest true, "child's color", (el) -> el.find("testChild")[0].color = "#f0f"
propChangeTest false, "elementToParentMatrix", (el) -> el.elementToParentMatrix = Matrix.translate(el.currentSize.ccNeg).rotate(Math.PI/6).translate(el.currentSize.cc)
stagingBitmaps:
getNeedsStagingBitmap: ->
testNsb = (needsIt, name, tester) ->
test "#{if needsIt then 'NEEDED' else 'NOT NEEDED'} when #{name}", ->
tester()
.onNextReady (e) -> assert.eq e.getNeedsStagingBitmap(e.elementToParentMatrix), needsIt, "getNeedsStagingBitmap() should be #{needsIt}"
testNsb false, "default", -> new Element()
testNsb false, "ONLY clip", -> new Element clip: true
testNsb false, "ONLY rotation", -> new Element angle: .1
testNsb false, "ONLY has Children", -> new Element {}, new Element()
testNsb false, "ONLY opacity < 1", -> new Element opacity: .9
testNsb false, "ONLY compositeMode: 'add'", -> new Element compositeMode: 'add'
testNsb true, "isMask", -> new Element isMask: true
testNsb true, "clip AND rotation", -> new Element clip: true, angle: .1
testNsb true, "has Children AND opacity < 1", -> new Element opacity: .9, new Element()
testNsb true, "has Children AND compositeMode: 'add'", -> new Element compositeMode: 'add', new Element()
testNsb true, "childRequiresParentStagingBitmap", -> new Element {}, new ShadowElement
# test "clipping with rotation", ->
# new Element
drawing: ->
test "staging bitmap should persist across two immediate draws", ->
standardTextProps = textProps =
fontSize: 16
fontFamily: "sans-serif"
color: "#fffc"
align: .5
size: ps: 1
padding: 10
standardShadowProps =
color: "#0007"
blur: 20
offset: y: 5
e = new Element
size: 100
clip: true
parent = new Element
axis: .5
location: ps: .5
[
new RectangleElement color: "green", shadow: standardShadowProps
needsStagingElement = new Element
clip: true
[new TextElement m standardTextProps, text: "hi!"]
]
initialStagingBitmapsCreated = Element.stats.stagingBitmapsCreated
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
parent.angle = (Math.PI/180) * -5
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 1"
parent.angle = (Math.PI/180) * -10
e.toBitmapWithInfo()
.then ({bitmap}) ->
log clone {bitmap, stagingBitmapsCreated: Element.stats.stagingBitmapsCreated}
assert.eq Element.stats.stagingBitmapsCreated, initialStagingBitmapsCreated + 1, "test 2"
|
[
{
"context": "#\t> File Name: app.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@gmail.com\n#\t> Created Time: W",
"end": 40,
"score": 0.9968626499176025,
"start": 38,
"tag": "USERNAME",
"value": "LY"
},
{
"context": "#\t> File Name: app.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@g... | server/db/app.coffee | wiiliamking/miac-website | 0 | # > File Name: app.coffee
# > Author: LY
# > Mail: ly.franky@gmail.com
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/register', registerRoute
app.use '/log', logRoute
app.use '/about', aboutRoute
app.use '/article', articleRoute
app.use '/message', messageRoute
app.use '/works', worksRoute
app.use '/discuss', discussRoute
app.use '/user', userRoute
app.use '/album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
| 196431 | # > File Name: app.coffee
# > Author: LY
# > Mail: <EMAIL>
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/register', registerRoute
app.use '/log', logRoute
app.use '/about', aboutRoute
app.use '/article', articleRoute
app.use '/message', messageRoute
app.use '/works', worksRoute
app.use '/discuss', discussRoute
app.use '/user', userRoute
app.use '/album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
| true | # > File Name: app.coffee
# > Author: LY
# > Mail: PI:EMAIL:<EMAIL>END_PI
# > Created Time: Wednesday, November 19, 2014 AM10:41:51 CST
###
* import package's module that would use
###
express = require 'express'
path = require 'path'
bodyParser = require 'body-parser'
cookieParser = require 'cookie-parser'
favicon = require 'static-favicon'
busbody = require "connect-busboy"
session = require 'express-session'
logger = require 'morgan'
moment = require 'moment'
multer = require 'multer'
###
* import module that would use writen by laiy
###
db = require './db/db.coffee'
config = require './config.coffee'
util = require './common/util.coffee'
###
* import route module
###
indexRoute = require './routes/index.coffee'
registerRoute = require './routes/register.coffee'
logRoute = require './routes/log.coffee'
aboutRoute = require './routes/about.coffee'
articleRoute = require './routes/article.coffee'
messageRoute = require './routes/message.coffee'
worksRoute = require './routes/works.coffee'
discussRoute = require './routes/discuss.coffee'
userRoute = require './routes/user.coffee'
albumRoute = require './routes/album.coffee'
###
* create a application using MVC frame 'Express'
###
app = express()
###
* use modules
###
app.locals.moment = moment
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use busbody { immediate: true }
app.use express.static(path.join(__dirname, '/views'))
app.use cookieParser()
app.use session {
secret: config.SECRET_KEY
resave: yes
saveUninitialized: yes
}
app.use util.setLocalsUser
app.use multer({ dest: './views/assets/img/user' })
###
* set views
###
app.set 'views', path.join(__dirname, 'views')
app.set 'view engine', 'jade'
###
* use routes' handler
###
app.use '/', indexRoute
app.use '/register', registerRoute
app.use '/log', logRoute
app.use '/about', aboutRoute
app.use '/article', articleRoute
app.use '/message', messageRoute
app.use '/works', worksRoute
app.use '/discuss', discussRoute
app.use '/user', userRoute
app.use '/album', albumRoute
###
* init database
###
db.init()
###
* let server listening at port 2333
###
app.listen 2333
module.exports = app
|
[
{
"context": "ction\n model = new Stem.Model\n name: \"John\"\n collection.add model\n\n it \"should allow",
"end": 210,
"score": 0.9998304843902588,
"start": 206,
"tag": "NAME",
"value": "John"
},
{
"context": "on.bind \"*\", starCallback\n\n model.set name:... | source/javascripts/specs/collection.js.coffee | petalmd/stem | 0 | describe "Collection", ->
describe "a simple collectcion", ->
collection = null
model = null
beforeEach ->
collection = new Stem.Collection
model = new Stem.Model
name: "John"
collection.add model
it "should allow fetching its models by index", ->
expect(collection.at(0)).toBe model
it "should trigger its models events", ->
changeCallback = sinon.spy()
starCallback = sinon.spy()
collection.bind "change:name", changeCallback
collection.bind "*", starCallback
model.set name: "Peter"
expect(changeCallback).toHaveBeenCalledWith collection, model, "name", "Peter"
expect(starCallback).toHaveBeenCalledWith "change:name", collection, model, "name", "Peter"
it "should allow to reset its model array with a new array", ->
models = [
new Stem.Model
name: "Robert"
new Stem.Model
name: "Georges"
]
collection.reset models
expect(collection.size()).toEqual 2
expect(collection.at(0).get "name").toEqual "Robert"
expect(collection.at(1).get "name").toEqual "Georges"
it "should squash all arguments passed to add and reset", ->
models = [
new Stem.Model
name: "Robert"
new Stem.Model
name: "Georges"
]
collection.reset()
collection.add models
expect(collection.size()).toEqual 2
collection.reset()
collection.add.apply collection, models
expect(collection.size()).toEqual 2
collection.reset models
expect(collection.size()).toEqual 2
collection.reset.apply collection, models
expect(collection.size()).toEqual 2
it "should implement underscore.js's collection helpers", ->
expect(collection['map']).toBeDefined()
expect(collection['select']).toBeDefined()
describe "events", ->
it "should trigger an event when a model is added", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", callback
model = new Stem.Model {name: "John"}
collection.add model
expect(callback).toHaveBeenCalledWith collection, model
it "should trigger an event when it is reset", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "reset", callback
collection.reset()
expect(callback).toHaveBeenCalledWith collection
it "should trigger add events and a reset event when it is reset with new models", ->
addCallback = sinon.spy()
resetCallback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", addCallback
collection.bind "reset", resetCallback
collection.reset {name: "John"}, {name: "Peter"}
expect(addCallback).toHaveBeenCalledTwice()
expect(resetCallback).toHaveBeenCalledOnce()
describe "adding and removing", ->
it "should allow to create a new Stem.Model subclass from attributes when adding (wrapped constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: -> SampleModel
collection = new SampleCollection
name: "Peter"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "Peter"
it "should allow to create a new Stem.Model subclass from attributes when adding (direct constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: SampleModel
collection = new SampleCollection
name: "Peter"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "Peter"
it "should allow removing", ->
collection = new Stem.Collection
model = new Stem.Model {name: "John"}
collection.add model
expect(collection.indexOf(model)).toEqual 0
collection.remove model
expect(collection.indexOf(model)).toEqual -1
it "should allow polymorphism", ->
class ModelA extends Stem.Model
class ModelB extends Stem.Model
class PolymorphicCollection extends Stem.Collection
model: (attributes) ->
if attributes["model"] == "ModelA" then ModelA else ModelB
collection = new PolymorphicCollection [
{model: "ModelA", name: "Peter"},
{model: "ModelB", name: "John"}
]
expect(collection.size()).toEqual 2
expect(collection.at(0) instanceof ModelA).toBeTruthy()
expect(collection.at(1) instanceof ModelB).toBeTruthy()
expect(collection.at(0).get "name").toEqual "Peter"
expect(collection.at(1).get "name").toEqual "John" | 62985 | describe "Collection", ->
describe "a simple collectcion", ->
collection = null
model = null
beforeEach ->
collection = new Stem.Collection
model = new Stem.Model
name: "<NAME>"
collection.add model
it "should allow fetching its models by index", ->
expect(collection.at(0)).toBe model
it "should trigger its models events", ->
changeCallback = sinon.spy()
starCallback = sinon.spy()
collection.bind "change:name", changeCallback
collection.bind "*", starCallback
model.set name: "<NAME>"
expect(changeCallback).toHaveBeenCalledWith collection, model, "name", "<NAME>"
expect(starCallback).toHaveBeenCalledWith "change:name", collection, model, "name", "<NAME>"
it "should allow to reset its model array with a new array", ->
models = [
new Stem.Model
name: "<NAME>"
new Stem.Model
name: "<NAME>"
]
collection.reset models
expect(collection.size()).toEqual 2
expect(collection.at(0).get "name").toEqual "<NAME>"
expect(collection.at(1).get "name").toEqual "<NAME>"
it "should squash all arguments passed to add and reset", ->
models = [
new Stem.Model
name: "<NAME>"
new Stem.Model
name: "<NAME>"
]
collection.reset()
collection.add models
expect(collection.size()).toEqual 2
collection.reset()
collection.add.apply collection, models
expect(collection.size()).toEqual 2
collection.reset models
expect(collection.size()).toEqual 2
collection.reset.apply collection, models
expect(collection.size()).toEqual 2
it "should implement underscore.js's collection helpers", ->
expect(collection['map']).toBeDefined()
expect(collection['select']).toBeDefined()
describe "events", ->
it "should trigger an event when a model is added", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", callback
model = new Stem.Model {name: "<NAME>"}
collection.add model
expect(callback).toHaveBeenCalledWith collection, model
it "should trigger an event when it is reset", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "reset", callback
collection.reset()
expect(callback).toHaveBeenCalledWith collection
it "should trigger add events and a reset event when it is reset with new models", ->
addCallback = sinon.spy()
resetCallback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", addCallback
collection.bind "reset", resetCallback
collection.reset {name: "<NAME>"}, {name: "<NAME>"}
expect(addCallback).toHaveBeenCalledTwice()
expect(resetCallback).toHaveBeenCalledOnce()
describe "adding and removing", ->
it "should allow to create a new Stem.Model subclass from attributes when adding (wrapped constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: -> SampleModel
collection = new SampleCollection
name: "<NAME>"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "<NAME>"
it "should allow to create a new Stem.Model subclass from attributes when adding (direct constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: SampleModel
collection = new SampleCollection
name: "<NAME>"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "<NAME>"
it "should allow removing", ->
collection = new Stem.Collection
model = new Stem.Model {name: "<NAME>"}
collection.add model
expect(collection.indexOf(model)).toEqual 0
collection.remove model
expect(collection.indexOf(model)).toEqual -1
it "should allow polymorphism", ->
class ModelA extends Stem.Model
class ModelB extends Stem.Model
class PolymorphicCollection extends Stem.Collection
model: (attributes) ->
if attributes["model"] == "ModelA" then ModelA else ModelB
collection = new PolymorphicCollection [
{model: "ModelA", name: "<NAME>"},
{model: "ModelB", name: "<NAME>"}
]
expect(collection.size()).toEqual 2
expect(collection.at(0) instanceof ModelA).toBeTruthy()
expect(collection.at(1) instanceof ModelB).toBeTruthy()
expect(collection.at(0).get "name").toEqual "<NAME>"
expect(collection.at(1).get "name").toEqual "<NAME>" | true | describe "Collection", ->
describe "a simple collectcion", ->
collection = null
model = null
beforeEach ->
collection = new Stem.Collection
model = new Stem.Model
name: "PI:NAME:<NAME>END_PI"
collection.add model
it "should allow fetching its models by index", ->
expect(collection.at(0)).toBe model
it "should trigger its models events", ->
changeCallback = sinon.spy()
starCallback = sinon.spy()
collection.bind "change:name", changeCallback
collection.bind "*", starCallback
model.set name: "PI:NAME:<NAME>END_PI"
expect(changeCallback).toHaveBeenCalledWith collection, model, "name", "PI:NAME:<NAME>END_PI"
expect(starCallback).toHaveBeenCalledWith "change:name", collection, model, "name", "PI:NAME:<NAME>END_PI"
it "should allow to reset its model array with a new array", ->
models = [
new Stem.Model
name: "PI:NAME:<NAME>END_PI"
new Stem.Model
name: "PI:NAME:<NAME>END_PI"
]
collection.reset models
expect(collection.size()).toEqual 2
expect(collection.at(0).get "name").toEqual "PI:NAME:<NAME>END_PI"
expect(collection.at(1).get "name").toEqual "PI:NAME:<NAME>END_PI"
it "should squash all arguments passed to add and reset", ->
models = [
new Stem.Model
name: "PI:NAME:<NAME>END_PI"
new Stem.Model
name: "PI:NAME:<NAME>END_PI"
]
collection.reset()
collection.add models
expect(collection.size()).toEqual 2
collection.reset()
collection.add.apply collection, models
expect(collection.size()).toEqual 2
collection.reset models
expect(collection.size()).toEqual 2
collection.reset.apply collection, models
expect(collection.size()).toEqual 2
it "should implement underscore.js's collection helpers", ->
expect(collection['map']).toBeDefined()
expect(collection['select']).toBeDefined()
describe "events", ->
it "should trigger an event when a model is added", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", callback
model = new Stem.Model {name: "PI:NAME:<NAME>END_PI"}
collection.add model
expect(callback).toHaveBeenCalledWith collection, model
it "should trigger an event when it is reset", ->
callback = sinon.spy()
collection = new Stem.Collection
collection.bind "reset", callback
collection.reset()
expect(callback).toHaveBeenCalledWith collection
it "should trigger add events and a reset event when it is reset with new models", ->
addCallback = sinon.spy()
resetCallback = sinon.spy()
collection = new Stem.Collection
collection.bind "add", addCallback
collection.bind "reset", resetCallback
collection.reset {name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}
expect(addCallback).toHaveBeenCalledTwice()
expect(resetCallback).toHaveBeenCalledOnce()
describe "adding and removing", ->
it "should allow to create a new Stem.Model subclass from attributes when adding (wrapped constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: -> SampleModel
collection = new SampleCollection
name: "PI:NAME:<NAME>END_PI"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "PI:NAME:<NAME>END_PI"
it "should allow to create a new Stem.Model subclass from attributes when adding (direct constructor)", ->
class SampleModel extends Stem.Model
class SampleCollection extends Stem.Collection
model: SampleModel
collection = new SampleCollection
name: "PI:NAME:<NAME>END_PI"
expect(collection.size()).toEqual 1
expect(collection.at(0) instanceof SampleModel).toBeTruthy()
expect(collection.at(0).get "name").toEqual "PI:NAME:<NAME>END_PI"
it "should allow removing", ->
collection = new Stem.Collection
model = new Stem.Model {name: "PI:NAME:<NAME>END_PI"}
collection.add model
expect(collection.indexOf(model)).toEqual 0
collection.remove model
expect(collection.indexOf(model)).toEqual -1
it "should allow polymorphism", ->
class ModelA extends Stem.Model
class ModelB extends Stem.Model
class PolymorphicCollection extends Stem.Collection
model: (attributes) ->
if attributes["model"] == "ModelA" then ModelA else ModelB
collection = new PolymorphicCollection [
{model: "ModelA", name: "PI:NAME:<NAME>END_PI"},
{model: "ModelB", name: "PI:NAME:<NAME>END_PI"}
]
expect(collection.size()).toEqual 2
expect(collection.at(0) instanceof ModelA).toBeTruthy()
expect(collection.at(1) instanceof ModelB).toBeTruthy()
expect(collection.at(0).get "name").toEqual "PI:NAME:<NAME>END_PI"
expect(collection.at(1).get "name").toEqual "PI:NAME:<NAME>END_PI" |
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999151229858398,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/input-handler.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @InputHandler
@CANCEL = 'cancel'
@SUBMIT = 'submit'
@KEY_ENTER = 13
@KEY_ESC = 27
@textarea: (callback) =>
(event) =>
if event.keyCode == @KEY_ESC
type = @CANCEL
else if event.keyCode == @KEY_ENTER && !event.shiftKey && osu.isDesktop()
event.preventDefault()
type = @SUBMIT
callback?(type, event)
| 119661 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @InputHandler
@CANCEL = 'cancel'
@SUBMIT = 'submit'
@KEY_ENTER = 13
@KEY_ESC = 27
@textarea: (callback) =>
(event) =>
if event.keyCode == @KEY_ESC
type = @CANCEL
else if event.keyCode == @KEY_ENTER && !event.shiftKey && osu.isDesktop()
event.preventDefault()
type = @SUBMIT
callback?(type, event)
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @InputHandler
@CANCEL = 'cancel'
@SUBMIT = 'submit'
@KEY_ENTER = 13
@KEY_ESC = 27
@textarea: (callback) =>
(event) =>
if event.keyCode == @KEY_ESC
type = @CANCEL
else if event.keyCode == @KEY_ENTER && !event.shiftKey && osu.isDesktop()
event.preventDefault()
type = @SUBMIT
callback?(type, event)
|
[
{
"context": "orts.rfc_2144_B1 = (T,cb) ->\n key = Buffer.from \"0123456712345678234567893456789A\", \"hex\"\n plaintext = Buffer.from \"0123456789ABCD",
"end": 323,
"score": 0.9997613430023193,
"start": 291,
"tag": "KEY",
"value": "0123456712345678234567893456789A"
},
{
"context":... | test/files/cast5.iced | samkenxstream/kbpgp | 464 | {CAST5} = require '../../lib/openpgp/cast5'
{WordArray} = require 'triplesec'
{encrypt,decrypt} = require '../../lib/openpgp/cfb'
buf_to_words = (b) -> (WordArray.from_buffer b).words
words_to_buf = (w) -> (new WordArray w).to_buffer()
exports.rfc_2144_B1 = (T,cb) ->
key = Buffer.from "0123456712345678234567893456789A", "hex"
plaintext = Buffer.from "0123456789ABCDEF", "hex"
ciphertext = Buffer.from "238B4FE5847E44B2", "hex"
cast5 = new CAST5 WordArray.from_buffer key
out_wa = cast5.encryptBlock buf_to_words plaintext
out_buf = words_to_buf out_wa
T.equal out_buf.toString('hex'), ciphertext.toString('hex'), 'encryption worked'
pt2_wa = cast5.decryptBlock out_wa
pt2_buf = words_to_buf pt2_wa
T.equal plaintext.toString('hex'), pt2_buf.toString('hex'), 'decryption worked'
cb()
exports.cfb = (T,cb) ->
key = Buffer.from "583d18c32d8857a627ea3e86d6feada8", "hex"
iv = Buffer.from 'fe40e836b0e9b193', 'hex'
dat = "i8xDA+KyfRK5q6c2h5YHgt+6LQOJsQB2TP98obYZJO8DAR02EyJPRTuA4sVsOJQGbbzC+6mYhAwYT2w21Qx9rfbH85kw6M/68O8WGTqb5cH418+Ff/jK9211+a4CQGJTjZKCUkRTWB08mDiniFp3c5ohkFjJ/542DR31PyFr7Qc="
plaintext = Buffer.from dat, "base64"
ciphertext = encrypt { block_cipher_class : CAST5, key, iv, plaintext }
pt2 = decrypt { block_cipher_class : CAST5, key, iv, ciphertext }
T.equal pt2.toString('base64'), dat, "in and out with cfb"
cb()
| 96047 | {CAST5} = require '../../lib/openpgp/cast5'
{WordArray} = require 'triplesec'
{encrypt,decrypt} = require '../../lib/openpgp/cfb'
buf_to_words = (b) -> (WordArray.from_buffer b).words
words_to_buf = (w) -> (new WordArray w).to_buffer()
exports.rfc_2144_B1 = (T,cb) ->
key = Buffer.from "<KEY>", "hex"
plaintext = Buffer.from "0123456789ABCDEF", "hex"
ciphertext = Buffer.from "238B4FE5847E44B2", "hex"
cast5 = new CAST5 WordArray.from_buffer key
out_wa = cast5.encryptBlock buf_to_words plaintext
out_buf = words_to_buf out_wa
T.equal out_buf.toString('hex'), ciphertext.toString('hex'), 'encryption worked'
pt2_wa = cast5.decryptBlock out_wa
pt2_buf = words_to_buf pt2_wa
T.equal plaintext.toString('hex'), pt2_buf.toString('hex'), 'decryption worked'
cb()
exports.cfb = (T,cb) ->
key = Buffer.from "<KEY>", "hex"
iv = Buffer.from 'fe40e836b0e9b193', 'hex'
dat = "<KEY>ohkFj<KEY>/<KEY>
plaintext = Buffer.from dat, "base64"
ciphertext = encrypt { block_cipher_class : CAST5, key, iv, plaintext }
pt2 = decrypt { block_cipher_class : CAST5, key, iv, ciphertext }
T.equal pt2.toString('base64'), dat, "in and out with cfb"
cb()
| true | {CAST5} = require '../../lib/openpgp/cast5'
{WordArray} = require 'triplesec'
{encrypt,decrypt} = require '../../lib/openpgp/cfb'
buf_to_words = (b) -> (WordArray.from_buffer b).words
words_to_buf = (w) -> (new WordArray w).to_buffer()
exports.rfc_2144_B1 = (T,cb) ->
key = Buffer.from "PI:KEY:<KEY>END_PI", "hex"
plaintext = Buffer.from "0123456789ABCDEF", "hex"
ciphertext = Buffer.from "238B4FE5847E44B2", "hex"
cast5 = new CAST5 WordArray.from_buffer key
out_wa = cast5.encryptBlock buf_to_words plaintext
out_buf = words_to_buf out_wa
T.equal out_buf.toString('hex'), ciphertext.toString('hex'), 'encryption worked'
pt2_wa = cast5.decryptBlock out_wa
pt2_buf = words_to_buf pt2_wa
T.equal plaintext.toString('hex'), pt2_buf.toString('hex'), 'decryption worked'
cb()
exports.cfb = (T,cb) ->
key = Buffer.from "PI:KEY:<KEY>END_PI", "hex"
iv = Buffer.from 'fe40e836b0e9b193', 'hex'
dat = "PI:KEY:<KEY>END_PIohkFjPI:KEY:<KEY>END_PI/PI:KEY:<KEY>END_PI
plaintext = Buffer.from dat, "base64"
ciphertext = encrypt { block_cipher_class : CAST5, key, iv, plaintext }
pt2 = decrypt { block_cipher_class : CAST5, key, iv, ciphertext }
T.equal pt2.toString('base64'), dat, "in and out with cfb"
cb()
|
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.9999223351478577,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/directives/v2/controllers/inline-exp-name-editor.controller.js.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'InlineExpNameEditorCtrl', [
'$scope'
'focus'
'Experiment'
'$stateParams'
'expName'
'Protocol'
'Status'
'$timeout'
($scope, focus, Experiment, $stateParams, expName, Protocol, Status, $timeout) ->
if !Experiment.getCurrentExperiment()
Experiment.get {id: $stateParams.id}, (data) ->
Experiment.setCurrentExperiment data.experiment
$scope.experiment = data.experiment
$scope.experimentOrig = angular.copy data.experiment
else
$scope.experiment = Experiment.getCurrentExperiment()
$scope.experimentOrig = angular.copy $scope.experiment
$scope.editExpNameMode = false
$scope.ori_experiment_name = ''
$scope.adjustTextWidth = () ->
$timeout (() ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
), 10
return
$scope.focusExpName = ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
$scope.ori_experiment_name = $scope.experiment.name
$scope.removeMessages()
$scope.editExpNameMode = true
focus('editExpNameMode')
document.getElementById('inline_exp_name_field').select()
$scope.cancelExpName = ->
$scope.editModeOff()
$scope.experiment.name = $scope.ori_experiment_name
$scope.removeMessages = ->
$scope.successName = null
$scope.errors = null
$scope.editModeOff = ->
angular.element(document.getElementById('inline_exp_name_plat')).css('width', 'auto')
$scope.editExpNameMode = false
$scope.saveExperiment = (exp)->
return if $scope.expForm.$invalid
promise = Experiment.update({id: $scope.experiment.id}, experiment: $scope.experiment).$promise
promise.then ->
$scope.ori_experiment_name = $scope.experiment.name
expName.updateName($scope.experiment.name)
$timeout (() ->
$scope.successName = null
), 2000
promise.catch (resp) ->
$scope.errors = resp.data.errors
$scope.experiment = angular.copy $scope.experimentOrig
$scope.experiment.name = $scope.ori_experiment_name
promise.finally ->
if !$scope.errors
$scope.editModeOff()
$scope.$on 'window:resize', ->
$scope.adjustTextWidth()
]
| 79666 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'InlineExpNameEditorCtrl', [
'$scope'
'focus'
'Experiment'
'$stateParams'
'expName'
'Protocol'
'Status'
'$timeout'
($scope, focus, Experiment, $stateParams, expName, Protocol, Status, $timeout) ->
if !Experiment.getCurrentExperiment()
Experiment.get {id: $stateParams.id}, (data) ->
Experiment.setCurrentExperiment data.experiment
$scope.experiment = data.experiment
$scope.experimentOrig = angular.copy data.experiment
else
$scope.experiment = Experiment.getCurrentExperiment()
$scope.experimentOrig = angular.copy $scope.experiment
$scope.editExpNameMode = false
$scope.ori_experiment_name = ''
$scope.adjustTextWidth = () ->
$timeout (() ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
), 10
return
$scope.focusExpName = ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
$scope.ori_experiment_name = $scope.experiment.name
$scope.removeMessages()
$scope.editExpNameMode = true
focus('editExpNameMode')
document.getElementById('inline_exp_name_field').select()
$scope.cancelExpName = ->
$scope.editModeOff()
$scope.experiment.name = $scope.ori_experiment_name
$scope.removeMessages = ->
$scope.successName = null
$scope.errors = null
$scope.editModeOff = ->
angular.element(document.getElementById('inline_exp_name_plat')).css('width', 'auto')
$scope.editExpNameMode = false
$scope.saveExperiment = (exp)->
return if $scope.expForm.$invalid
promise = Experiment.update({id: $scope.experiment.id}, experiment: $scope.experiment).$promise
promise.then ->
$scope.ori_experiment_name = $scope.experiment.name
expName.updateName($scope.experiment.name)
$timeout (() ->
$scope.successName = null
), 2000
promise.catch (resp) ->
$scope.errors = resp.data.errors
$scope.experiment = angular.copy $scope.experimentOrig
$scope.experiment.name = $scope.ori_experiment_name
promise.finally ->
if !$scope.errors
$scope.editModeOff()
$scope.$on 'window:resize', ->
$scope.adjustTextWidth()
]
| true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.ChaiBioTech.ngApp.controller 'InlineExpNameEditorCtrl', [
'$scope'
'focus'
'Experiment'
'$stateParams'
'expName'
'Protocol'
'Status'
'$timeout'
($scope, focus, Experiment, $stateParams, expName, Protocol, Status, $timeout) ->
if !Experiment.getCurrentExperiment()
Experiment.get {id: $stateParams.id}, (data) ->
Experiment.setCurrentExperiment data.experiment
$scope.experiment = data.experiment
$scope.experimentOrig = angular.copy data.experiment
else
$scope.experiment = Experiment.getCurrentExperiment()
$scope.experimentOrig = angular.copy $scope.experiment
$scope.editExpNameMode = false
$scope.ori_experiment_name = ''
$scope.adjustTextWidth = () ->
$timeout (() ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
), 10
return
$scope.focusExpName = ->
field_width = Math.max(document.getElementById('inline_exp_name_plat').offsetWidth + 20, 150)
field_width = Math.min(document.getElementsByClassName('inline-exp-name')[0].offsetWidth - 110, field_width)
angular.element(document.getElementById('inline_exp_name_field')).css('width', (field_width) + 'px')
$scope.ori_experiment_name = $scope.experiment.name
$scope.removeMessages()
$scope.editExpNameMode = true
focus('editExpNameMode')
document.getElementById('inline_exp_name_field').select()
$scope.cancelExpName = ->
$scope.editModeOff()
$scope.experiment.name = $scope.ori_experiment_name
$scope.removeMessages = ->
$scope.successName = null
$scope.errors = null
$scope.editModeOff = ->
angular.element(document.getElementById('inline_exp_name_plat')).css('width', 'auto')
$scope.editExpNameMode = false
$scope.saveExperiment = (exp)->
return if $scope.expForm.$invalid
promise = Experiment.update({id: $scope.experiment.id}, experiment: $scope.experiment).$promise
promise.then ->
$scope.ori_experiment_name = $scope.experiment.name
expName.updateName($scope.experiment.name)
$timeout (() ->
$scope.successName = null
), 2000
promise.catch (resp) ->
$scope.errors = resp.data.errors
$scope.experiment = angular.copy $scope.experimentOrig
$scope.experiment.name = $scope.ori_experiment_name
promise.finally ->
if !$scope.errors
$scope.editModeOff()
$scope.$on 'window:resize', ->
$scope.adjustTextWidth()
]
|
[
{
"context": "til\n\n The MIT License (MIT)\n\n Copyright (c) 2014 Yasuhiro Okuno\n\n Permission is hereby granted, free of charge, ",
"end": 88,
"score": 0.9998679757118225,
"start": 74,
"tag": "NAME",
"value": "Yasuhiro Okuno"
}
] | coffee_lib/crowdutil/subcmd/create-user.coffee | koma75/crowdutil | 1 | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 Yasuhiro Okuno
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
crhelp = require '../helper/crhelper'
help = require '../helper/helper'
isOptOK = (opts) ->
rc = true
if(
!help.opIsType(opts, '-f', 'string') ||
!help.isName(opts['-f'][0], false)
)
rc = false
logger.error 'first name not valid'
console.log 'E, first name not valid'
if(
!help.opIsType(opts, '-l', 'string') ||
!help.isName(opts['-l'][0], false)
)
rc = false
logger.error 'last name not supplied'
console.log 'E, last name not supplied'
if(
!help.opIsType(opts, '-d', 'string') ||
!help.isName(opts['-d'][0], true)
)
logger.info 'disp name not supplied'
console.log 'I, disp name not supplied'
opts['-d'] = []
opts['-d'][0] = opts['-f'][0] +
' ' + opts['-l'][0]
if(
!help.opIsType(opts, '-e', 'string') ||
!help.isEmail(opts['-e'][0])
)
rc = false
logger.error 'email not supplied or invalid'
console.log 'E, email not supplied or invalid'
if(
!help.opIsType(opts, '-u', 'string') ||
!help.isName(opts['-u'][0], false)
)
rc = false
logger.error 'uid not supplied'
console.log 'E, uid not supplied'
if(
!help.opIsType(opts, 'p', 'string') ||
!help.isPass(opts['-p'][0])
)
logger.info 'password not supplied. using a random password.'
console.log 'I, password not supplied. using a random password.'
opts['-p'] = []
opts['-p'][0] = help.randPass()
console.log "I, using a random password: #{opts['-p'][0]}"
return rc
###
###
exports.run = (options) ->
logger.trace 'running : create-user\n\n\n'
logger.debug options
if !isOptOK(options)
return
logger.debug 'creating user with:\n' + JSON.stringify(options,null,2)
crowd = options['crowd']
crowd.user.create(
options['-f'][0],
options['-l'][0],
options['-d'][0],
options['-e'][0],
options['-u'][0],
options['-p'][0],
(err) ->
if err
logger.error err.message
console.log "E, user creation failed. See the log for details"
else
# check if user really was created
crhelp.findUser(crowd, {
uid: options['-u'][0]
}, (err, res) ->
if err
console.log "W, user creation returned success but could not be found."
console.log "W, Confirm at the Crowd admin console for assurance."
logger.warn err.message
return
logger.info JSON.stringify(res)
console.log "I, user created successfully:"
console.log JSON.stringify(res,null,2)
)
)
| 24514 | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
crhelp = require '../helper/crhelper'
help = require '../helper/helper'
isOptOK = (opts) ->
rc = true
if(
!help.opIsType(opts, '-f', 'string') ||
!help.isName(opts['-f'][0], false)
)
rc = false
logger.error 'first name not valid'
console.log 'E, first name not valid'
if(
!help.opIsType(opts, '-l', 'string') ||
!help.isName(opts['-l'][0], false)
)
rc = false
logger.error 'last name not supplied'
console.log 'E, last name not supplied'
if(
!help.opIsType(opts, '-d', 'string') ||
!help.isName(opts['-d'][0], true)
)
logger.info 'disp name not supplied'
console.log 'I, disp name not supplied'
opts['-d'] = []
opts['-d'][0] = opts['-f'][0] +
' ' + opts['-l'][0]
if(
!help.opIsType(opts, '-e', 'string') ||
!help.isEmail(opts['-e'][0])
)
rc = false
logger.error 'email not supplied or invalid'
console.log 'E, email not supplied or invalid'
if(
!help.opIsType(opts, '-u', 'string') ||
!help.isName(opts['-u'][0], false)
)
rc = false
logger.error 'uid not supplied'
console.log 'E, uid not supplied'
if(
!help.opIsType(opts, 'p', 'string') ||
!help.isPass(opts['-p'][0])
)
logger.info 'password not supplied. using a random password.'
console.log 'I, password not supplied. using a random password.'
opts['-p'] = []
opts['-p'][0] = help.randPass()
console.log "I, using a random password: #{opts['-p'][0]}"
return rc
###
###
exports.run = (options) ->
logger.trace 'running : create-user\n\n\n'
logger.debug options
if !isOptOK(options)
return
logger.debug 'creating user with:\n' + JSON.stringify(options,null,2)
crowd = options['crowd']
crowd.user.create(
options['-f'][0],
options['-l'][0],
options['-d'][0],
options['-e'][0],
options['-u'][0],
options['-p'][0],
(err) ->
if err
logger.error err.message
console.log "E, user creation failed. See the log for details"
else
# check if user really was created
crhelp.findUser(crowd, {
uid: options['-u'][0]
}, (err, res) ->
if err
console.log "W, user creation returned success but could not be found."
console.log "W, Confirm at the Crowd admin console for assurance."
logger.warn err.message
return
logger.info JSON.stringify(res)
console.log "I, user created successfully:"
console.log JSON.stringify(res,null,2)
)
)
| true | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
crhelp = require '../helper/crhelper'
help = require '../helper/helper'
isOptOK = (opts) ->
rc = true
if(
!help.opIsType(opts, '-f', 'string') ||
!help.isName(opts['-f'][0], false)
)
rc = false
logger.error 'first name not valid'
console.log 'E, first name not valid'
if(
!help.opIsType(opts, '-l', 'string') ||
!help.isName(opts['-l'][0], false)
)
rc = false
logger.error 'last name not supplied'
console.log 'E, last name not supplied'
if(
!help.opIsType(opts, '-d', 'string') ||
!help.isName(opts['-d'][0], true)
)
logger.info 'disp name not supplied'
console.log 'I, disp name not supplied'
opts['-d'] = []
opts['-d'][0] = opts['-f'][0] +
' ' + opts['-l'][0]
if(
!help.opIsType(opts, '-e', 'string') ||
!help.isEmail(opts['-e'][0])
)
rc = false
logger.error 'email not supplied or invalid'
console.log 'E, email not supplied or invalid'
if(
!help.opIsType(opts, '-u', 'string') ||
!help.isName(opts['-u'][0], false)
)
rc = false
logger.error 'uid not supplied'
console.log 'E, uid not supplied'
if(
!help.opIsType(opts, 'p', 'string') ||
!help.isPass(opts['-p'][0])
)
logger.info 'password not supplied. using a random password.'
console.log 'I, password not supplied. using a random password.'
opts['-p'] = []
opts['-p'][0] = help.randPass()
console.log "I, using a random password: #{opts['-p'][0]}"
return rc
###
###
exports.run = (options) ->
logger.trace 'running : create-user\n\n\n'
logger.debug options
if !isOptOK(options)
return
logger.debug 'creating user with:\n' + JSON.stringify(options,null,2)
crowd = options['crowd']
crowd.user.create(
options['-f'][0],
options['-l'][0],
options['-d'][0],
options['-e'][0],
options['-u'][0],
options['-p'][0],
(err) ->
if err
logger.error err.message
console.log "E, user creation failed. See the log for details"
else
# check if user really was created
crhelp.findUser(crowd, {
uid: options['-u'][0]
}, (err, res) ->
if err
console.log "W, user creation returned success but could not be found."
console.log "W, Confirm at the Crowd admin console for assurance."
logger.warn err.message
return
logger.info JSON.stringify(res)
console.log "I, user created successfully:"
console.log JSON.stringify(res,null,2)
)
)
|
[
{
"context": "# # Handle github timeline\n#\n# Copyright (c) 2013 JeongHoon Byun aka \"Outsider\", <http://blog.outsider.ne.kr/>\n# L",
"end": 64,
"score": 0.9998775124549866,
"start": 50,
"tag": "NAME",
"value": "JeongHoon Byun"
},
{
"context": "meline\n#\n# Copyright (c) 2013 Jeong... | src/timeline.coffee | uppalapatisujitha/CodingConventionofCommitHistory | 421 | # # Handle github timeline
#
# Copyright (c) 2013 JeongHoon Byun aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
restler = require 'restler'
path = require 'path'
fs = require 'fs'
helpers = require './helpers'
logger = helpers.logger
githubHost = 'https://api.github.com'
# github.json contained token should be in .tokens directory
# ex: { "cliendId": "", "clientSecret": "" }
# WARRING: MUST NOT commit github.json file
tokenPath = path.resolve "#{__dirname}", "../.tokens"
token = JSON.parse(fs.readFileSync "#{tokenPath}/github.json", 'utf8')
postfix = "?client_id=#{token.cliendId}&client_secret=#{token.clientSecret}"
tl = module.exports =
getCommitUrls: (timeline) ->
# GET /repos/:owner/:repo/commits/:sha
timeline = JSON.parse timeline if 'string' is helpers.extractType timeline
repo = timeline.repository
"/repos/#{repo.owner}/#{repo.name}/commits/#{sha[0]}" for sha in timeline.payload.shas
getCommitInfo: (url, callback) ->
restler.get(generateApiUrl url)
.on 'success', (data, res) ->
#'x-ratelimit-limit': '5000',
#'x-ratelimit-remaining': '4986',
logger.info "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] % 50 is 0
logger.error "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] < '10'
callback null, data, res
.on 'fail', (data, res) ->
callback data
.on 'complete', (err) ->
callback err if err instanceof Error
checkApiLimit: (callback) ->
restler.get(generateApiUrl "/users/whatever")
.on 'success', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
.on 'fail', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
# private
generateApiUrl = (url) ->
"#{githubHost}#{url}#{postfix}"
| 21891 | # # Handle github timeline
#
# Copyright (c) 2013 <NAME> aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
restler = require 'restler'
path = require 'path'
fs = require 'fs'
helpers = require './helpers'
logger = helpers.logger
githubHost = 'https://api.github.com'
# github.json contained token should be in .tokens directory
# ex: { "cliendId": "", "clientSecret": "" }
# WARRING: MUST NOT commit github.json file
tokenPath = path.resolve "#{__dirname}", "../.tokens"
token = JSON.parse(fs.readFileSync "#{tokenPath}/github.json", 'utf8')
postfix = "?client_id=#{token.cliendId}&client_secret=#{token.clientSecret}"
tl = module.exports =
getCommitUrls: (timeline) ->
# GET /repos/:owner/:repo/commits/:sha
timeline = JSON.parse timeline if 'string' is helpers.extractType timeline
repo = timeline.repository
"/repos/#{repo.owner}/#{repo.name}/commits/#{sha[0]}" for sha in timeline.payload.shas
getCommitInfo: (url, callback) ->
restler.get(generateApiUrl url)
.on 'success', (data, res) ->
#'x-ratelimit-limit': '5000',
#'x-ratelimit-remaining': '4986',
logger.info "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] % 50 is 0
logger.error "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] < '10'
callback null, data, res
.on 'fail', (data, res) ->
callback data
.on 'complete', (err) ->
callback err if err instanceof Error
checkApiLimit: (callback) ->
restler.get(generateApiUrl "/users/whatever")
.on 'success', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
.on 'fail', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
# private
generateApiUrl = (url) ->
"#{githubHost}#{url}#{postfix}"
| true | # # Handle github timeline
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PI aka "Outsider", <http://blog.outsider.ne.kr/>
# Licensed under the MIT license.
# <http://outsider.mit-license.org/>
restler = require 'restler'
path = require 'path'
fs = require 'fs'
helpers = require './helpers'
logger = helpers.logger
githubHost = 'https://api.github.com'
# github.json contained token should be in .tokens directory
# ex: { "cliendId": "", "clientSecret": "" }
# WARRING: MUST NOT commit github.json file
tokenPath = path.resolve "#{__dirname}", "../.tokens"
token = JSON.parse(fs.readFileSync "#{tokenPath}/github.json", 'utf8')
postfix = "?client_id=#{token.cliendId}&client_secret=#{token.clientSecret}"
tl = module.exports =
getCommitUrls: (timeline) ->
# GET /repos/:owner/:repo/commits/:sha
timeline = JSON.parse timeline if 'string' is helpers.extractType timeline
repo = timeline.repository
"/repos/#{repo.owner}/#{repo.name}/commits/#{sha[0]}" for sha in timeline.payload.shas
getCommitInfo: (url, callback) ->
restler.get(generateApiUrl url)
.on 'success', (data, res) ->
#'x-ratelimit-limit': '5000',
#'x-ratelimit-remaining': '4986',
logger.info "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] % 50 is 0
logger.error "github api limit: #{res.headers['x-ratelimit-remaining']}" if res.headers['x-ratelimit-remaining'] < '10'
callback null, data, res
.on 'fail', (data, res) ->
callback data
.on 'complete', (err) ->
callback err if err instanceof Error
checkApiLimit: (callback) ->
restler.get(generateApiUrl "/users/whatever")
.on 'success', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
.on 'fail', (data, res) ->
logger.debug "API rate ramained #{res.headers['x-ratelimit-remaining']}"
callback res.headers['x-ratelimit-remaining']
# private
generateApiUrl = (url) ->
"#{githubHost}#{url}#{postfix}"
|
[
{
"context": "o'\n 'lib/enviro'\n], ($, Enviro) ->\n\n ENV_KEY = 'StahkPhotos-Env'\n\n EnvElement = (env) ->\n \"\"\"<div id=\"StahkPh",
"end": 81,
"score": 0.9988880753517151,
"start": 66,
"tag": "KEY",
"value": "StahkPhotos-Env"
}
] | src/frontend/js/setup.coffee | geekjuice/StahkPhotos | 8 | define [
'zepto'
'lib/enviro'
], ($, Enviro) ->
ENV_KEY = 'StahkPhotos-Env'
EnvElement = (env) ->
"""<div id="StahkPhotos-Env">#{env}</div>"""
Setup = ->
switch
when Enviro.isQA(ENV_KEY)
$('body').append EnvElement('QA')
when Enviro.isLocal(ENV_KEY)
$('body').append EnvElement('Local')
| 157340 | define [
'zepto'
'lib/enviro'
], ($, Enviro) ->
ENV_KEY = '<KEY>'
EnvElement = (env) ->
"""<div id="StahkPhotos-Env">#{env}</div>"""
Setup = ->
switch
when Enviro.isQA(ENV_KEY)
$('body').append EnvElement('QA')
when Enviro.isLocal(ENV_KEY)
$('body').append EnvElement('Local')
| true | define [
'zepto'
'lib/enviro'
], ($, Enviro) ->
ENV_KEY = 'PI:KEY:<KEY>END_PI'
EnvElement = (env) ->
"""<div id="StahkPhotos-Env">#{env}</div>"""
Setup = ->
switch
when Enviro.isQA(ENV_KEY)
$('body').append EnvElement('QA')
when Enviro.isLocal(ENV_KEY)
$('body').append EnvElement('Local')
|
[
{
"context": "ature\n geometry: point1\n name: 'Platform'\n payloadFeature.setStyle balloonStyle\n ",
"end": 3305,
"score": 0.9949459433555603,
"start": 3297,
"tag": "NAME",
"value": "Platform"
}
] | app/assets/javascripts/tracker_new/tracker_new.js.coffee | AdlerFarHorizons/FarHorizonsApp | 1 | hostname = window.location.hostname
port = window.location.port
image_port = $('#data').data( 'imagePort' )
image_root = "http://" + hostname + ":" + image_port + "/"
start_lat = 41.1000322 # degrees
start_lon = -87.9167100 # degrees
start_zoom = 11
max_zoom = 16
# Enable pusher logging - don't include this in production
#Pusher.log = (message) ->
# if window.console and window.console.log
# window.console.log message
# return
pusher = new Pusher('c33f8eaa482b900bae75',
wsHost: hostname
wsPort: '8080'
wssPort: '8080'
enabledTransports: [
'ws'
'flash'
]
)
strokeRed = new ol.style.Stroke( color: 'red', width: 2 )
strokeBlue = new ol.style.Stroke(color: 'blue', width: 1, lineDash: [5,20] )
strokeCyan = new ol.style.Stroke( color: 'cyan', width: 2 )
fillRed = new ol.style.Fill(color: 'red')
fillBlue = new ol.style.Fill(color: 'blue')
balloonStyle = new ol.style.Style
image: new ol.style.Icon
anchor: [ 0.5, 1 ]
anchorXUnits: 'fraction'
anchorYUnits: 'fraction'
opacity: 1.00
scale: 1.25
src: image_root + 'balloon.png'
crossStyle = new ol.style.Style
image: new ol.style.RegularShape
stroke: strokeRed
points: 4
radius: 10
radius2: 0
angle: 0
deltaLineStyle = new ol.style.Style
stroke: strokeBlue
trackLineStyle = new ol.style.Style
stroke: strokeCyan
# Server data
platforms = $('#data').data('platforms')
platformIds = ( x.id for x in platforms )
platformIdents = ( x.ident for x in platforms )
platformBeacons = $('#data').data('beacons')
urlPlatformTracks = '/platform_tracks/'
urlVehicleLoc = '/chase_vehicle_location/'
thisVehicleId = $('#data').data( 'thisVehicle' ).id
thisVehicleIdent = $('#data').data( 'thisVehicle' ).ident
locDev = $('#data').data( 'locDev' )
# platformTracks is an array of hashes, one for each platform,
# where key:values are track idents:point array associated with that platform.
# platformTrack lines is similarly structured where the values
# are ol3 LineString objects
platformTracks = []
platformTrackLines = []
vectorSource = new ol.source.Vector( features: [] )
for x in platformIds
platformTracks.push( {} )
platformTrackLines.push( {} )
for x, index in platformTracks
for y in platformBeacons[index]
x[y.ident] = []
platformTrackLines[index][y.ident] =
new ol.geom.LineString( [] )
vectorSource.addFeature(
new ol.Feature(
ident: y.ident,
geometry: platformTrackLines[index][y.ident]
style: trackLineStyle
)
)
beaconRcvrs = $('#data').data('beaconRcvrs')
# Vector layer objects
point1 = null
point2 = null
deltaLine = null
payloadFeature = null
chaseFeature = null
deltaFeature = null
featuresLayer = new ol.layer.Vector( source: vectorSource )
for platformId, index in platformIds
pIndex = index # 'index' doesn't pass into function properly
$.getJSON urlPlatformTracks + platformId, (data) ->
console.log "get tracks: data:", data
if data
for track, num in data
ident = track.ident
temp = track.points[ track.points.length - 1 ]
point1 = new ol.geom.Point(
ol.proj.transform( [ temp.lon, temp.lat ], "EPSG:4326", "EPSG:3857")
)
payloadFeature = new ol.Feature
geometry: point1
name: 'Platform'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
platformTracks[pIndex][ident] = track
for point in track.points
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform([ point.lon, point.lat ], "EPSG:4326", "EPSG:3857")
)
temp = point
console.log "platformTrackLines:", platformTrackLines
console.log "vectorSource features:", vectorSource.getFeatures()
satLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/aerial/{z}/{x}/{y}.png'
layer: "sat"
maxZoom: max_zoom
)
visible: false
)
streetLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/streets/{z}/{x}/{y}.png'
layer: "street"
maxZoom: max_zoom
)
visible: true
)
map = new ol.Map(
interactions: ol.interaction.defaults().extend([new ol.interaction.Select(style: new ol.style.Style(image: new ol.style.Circle(
radius: 5
fill: new ol.style.Fill(color: "#FF0000")
)))])
target: "map"
layers: [streetLayer, satLayer]
view: new ol.View(
center: ol.proj.transform([ start_lon, start_lat ], "EPSG:4326", "EPSG:3857")
zoom: start_zoom
)
)
map.addLayer featuresLayer
#Styles:
blueStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "blue")
stroke: new ol.style.Stroke(
color: "blue"
width: 3
)
)]
greenStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#00FF00")
stroke: new ol.style.Stroke(
color: "#00FF00"
width: 3
)
)]
redStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#FF0000")
stroke: new ol.style.Stroke(
color: "#FF0000"
width: 3
)
)]
#ol3 API docs has only has a title, but nothing in the body for styles
#pass for now
#there are actually functions to get and trace location of the mobile device (probably with GPS enabled) on map with setTracking(), and stuff like getAltitude(
#The following zoom slider actually works!
zoomSlider = new ol.control.ZoomSlider()
map.addControl zoomSlider
#-----------------
locChannel = pusher.subscribe( 'ChaseVehicle_' + thisVehicleIdent )
locChannel.bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
if not point2
point2 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point2.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not chaseFeature
chaseFeature = new ol.Feature
geometry: point2
name: 'Chase Vehicle'
chaseFeature.setStyle crossStyle
vectorSource.addFeature( chaseFeature )
if not deltaFeature
if point1
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
else
deltaLine.setCoordinates(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
return
console.log "platformTrackLines:", platformTrackLines
platformChannels = []
for id, index in platformIds
pIndex = index
platformChannels.push(
pusher.subscribe( 'Platform_' + id ).bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
ident = data.message.ident
if not point1
point1 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point1.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not payloadFeature
payloadFeature = new ol.Feature
geometry: point1
name: 'Platform'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
if not deltaFeature
if point2
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
return
)
isSim = false
for rcvr in beaconRcvrs
isSim = true if rcvr.driver[-3...] is 'sim'
isSim = true if locDev.driver[-3...] is 'sim'
$('#control #locSimStart').click ->
if isSim
#vehicleLayer.destroyFeatures()
thisVehicleLastTime = null
#thisVehiclePoint = new OpenLayers.Geometry.Point( null, null )
thisVehicleLocation = null
speedup = $("#simSpeed input[type='radio']:checked").val()
url = 'http://' + hostname + ':' + port + '/location_devices/start/' + locDev.id + '/' + speedup
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #locSimEnd').click ->
url = 'http://' + hostname + ':' + port + '/location_devices/stop/' + locDev.id
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #beaconSimStart').click ->
if isSim
platformLocations = ( [] for x in platformIds )
speedup = $("#simSpeed input[type='radio']:checked").val()
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/start/' + rcvr.id + '/' + speedup
$.post url, (data) ->
$('#control #beaconDriver').html(data)
$('#control #beaconSimEnd').click ->
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/stop/' + rcvr.id
$.post url, (data) ->
$('#control #beaconDriver').html(data)
| 202725 | hostname = window.location.hostname
port = window.location.port
image_port = $('#data').data( 'imagePort' )
image_root = "http://" + hostname + ":" + image_port + "/"
start_lat = 41.1000322 # degrees
start_lon = -87.9167100 # degrees
start_zoom = 11
max_zoom = 16
# Enable pusher logging - don't include this in production
#Pusher.log = (message) ->
# if window.console and window.console.log
# window.console.log message
# return
pusher = new Pusher('c33f8eaa482b900bae75',
wsHost: hostname
wsPort: '8080'
wssPort: '8080'
enabledTransports: [
'ws'
'flash'
]
)
strokeRed = new ol.style.Stroke( color: 'red', width: 2 )
strokeBlue = new ol.style.Stroke(color: 'blue', width: 1, lineDash: [5,20] )
strokeCyan = new ol.style.Stroke( color: 'cyan', width: 2 )
fillRed = new ol.style.Fill(color: 'red')
fillBlue = new ol.style.Fill(color: 'blue')
balloonStyle = new ol.style.Style
image: new ol.style.Icon
anchor: [ 0.5, 1 ]
anchorXUnits: 'fraction'
anchorYUnits: 'fraction'
opacity: 1.00
scale: 1.25
src: image_root + 'balloon.png'
crossStyle = new ol.style.Style
image: new ol.style.RegularShape
stroke: strokeRed
points: 4
radius: 10
radius2: 0
angle: 0
deltaLineStyle = new ol.style.Style
stroke: strokeBlue
trackLineStyle = new ol.style.Style
stroke: strokeCyan
# Server data
platforms = $('#data').data('platforms')
platformIds = ( x.id for x in platforms )
platformIdents = ( x.ident for x in platforms )
platformBeacons = $('#data').data('beacons')
urlPlatformTracks = '/platform_tracks/'
urlVehicleLoc = '/chase_vehicle_location/'
thisVehicleId = $('#data').data( 'thisVehicle' ).id
thisVehicleIdent = $('#data').data( 'thisVehicle' ).ident
locDev = $('#data').data( 'locDev' )
# platformTracks is an array of hashes, one for each platform,
# where key:values are track idents:point array associated with that platform.
# platformTrack lines is similarly structured where the values
# are ol3 LineString objects
platformTracks = []
platformTrackLines = []
vectorSource = new ol.source.Vector( features: [] )
for x in platformIds
platformTracks.push( {} )
platformTrackLines.push( {} )
for x, index in platformTracks
for y in platformBeacons[index]
x[y.ident] = []
platformTrackLines[index][y.ident] =
new ol.geom.LineString( [] )
vectorSource.addFeature(
new ol.Feature(
ident: y.ident,
geometry: platformTrackLines[index][y.ident]
style: trackLineStyle
)
)
beaconRcvrs = $('#data').data('beaconRcvrs')
# Vector layer objects
point1 = null
point2 = null
deltaLine = null
payloadFeature = null
chaseFeature = null
deltaFeature = null
featuresLayer = new ol.layer.Vector( source: vectorSource )
for platformId, index in platformIds
pIndex = index # 'index' doesn't pass into function properly
$.getJSON urlPlatformTracks + platformId, (data) ->
console.log "get tracks: data:", data
if data
for track, num in data
ident = track.ident
temp = track.points[ track.points.length - 1 ]
point1 = new ol.geom.Point(
ol.proj.transform( [ temp.lon, temp.lat ], "EPSG:4326", "EPSG:3857")
)
payloadFeature = new ol.Feature
geometry: point1
name: '<NAME>'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
platformTracks[pIndex][ident] = track
for point in track.points
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform([ point.lon, point.lat ], "EPSG:4326", "EPSG:3857")
)
temp = point
console.log "platformTrackLines:", platformTrackLines
console.log "vectorSource features:", vectorSource.getFeatures()
satLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/aerial/{z}/{x}/{y}.png'
layer: "sat"
maxZoom: max_zoom
)
visible: false
)
streetLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/streets/{z}/{x}/{y}.png'
layer: "street"
maxZoom: max_zoom
)
visible: true
)
map = new ol.Map(
interactions: ol.interaction.defaults().extend([new ol.interaction.Select(style: new ol.style.Style(image: new ol.style.Circle(
radius: 5
fill: new ol.style.Fill(color: "#FF0000")
)))])
target: "map"
layers: [streetLayer, satLayer]
view: new ol.View(
center: ol.proj.transform([ start_lon, start_lat ], "EPSG:4326", "EPSG:3857")
zoom: start_zoom
)
)
map.addLayer featuresLayer
#Styles:
blueStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "blue")
stroke: new ol.style.Stroke(
color: "blue"
width: 3
)
)]
greenStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#00FF00")
stroke: new ol.style.Stroke(
color: "#00FF00"
width: 3
)
)]
redStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#FF0000")
stroke: new ol.style.Stroke(
color: "#FF0000"
width: 3
)
)]
#ol3 API docs has only has a title, but nothing in the body for styles
#pass for now
#there are actually functions to get and trace location of the mobile device (probably with GPS enabled) on map with setTracking(), and stuff like getAltitude(
#The following zoom slider actually works!
zoomSlider = new ol.control.ZoomSlider()
map.addControl zoomSlider
#-----------------
locChannel = pusher.subscribe( 'ChaseVehicle_' + thisVehicleIdent )
locChannel.bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
if not point2
point2 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point2.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not chaseFeature
chaseFeature = new ol.Feature
geometry: point2
name: 'Chase Vehicle'
chaseFeature.setStyle crossStyle
vectorSource.addFeature( chaseFeature )
if not deltaFeature
if point1
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
else
deltaLine.setCoordinates(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
return
console.log "platformTrackLines:", platformTrackLines
platformChannels = []
for id, index in platformIds
pIndex = index
platformChannels.push(
pusher.subscribe( 'Platform_' + id ).bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
ident = data.message.ident
if not point1
point1 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point1.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not payloadFeature
payloadFeature = new ol.Feature
geometry: point1
name: 'Platform'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
if not deltaFeature
if point2
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
return
)
isSim = false
for rcvr in beaconRcvrs
isSim = true if rcvr.driver[-3...] is 'sim'
isSim = true if locDev.driver[-3...] is 'sim'
$('#control #locSimStart').click ->
if isSim
#vehicleLayer.destroyFeatures()
thisVehicleLastTime = null
#thisVehiclePoint = new OpenLayers.Geometry.Point( null, null )
thisVehicleLocation = null
speedup = $("#simSpeed input[type='radio']:checked").val()
url = 'http://' + hostname + ':' + port + '/location_devices/start/' + locDev.id + '/' + speedup
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #locSimEnd').click ->
url = 'http://' + hostname + ':' + port + '/location_devices/stop/' + locDev.id
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #beaconSimStart').click ->
if isSim
platformLocations = ( [] for x in platformIds )
speedup = $("#simSpeed input[type='radio']:checked").val()
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/start/' + rcvr.id + '/' + speedup
$.post url, (data) ->
$('#control #beaconDriver').html(data)
$('#control #beaconSimEnd').click ->
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/stop/' + rcvr.id
$.post url, (data) ->
$('#control #beaconDriver').html(data)
| true | hostname = window.location.hostname
port = window.location.port
image_port = $('#data').data( 'imagePort' )
image_root = "http://" + hostname + ":" + image_port + "/"
start_lat = 41.1000322 # degrees
start_lon = -87.9167100 # degrees
start_zoom = 11
max_zoom = 16
# Enable pusher logging - don't include this in production
#Pusher.log = (message) ->
# if window.console and window.console.log
# window.console.log message
# return
pusher = new Pusher('c33f8eaa482b900bae75',
wsHost: hostname
wsPort: '8080'
wssPort: '8080'
enabledTransports: [
'ws'
'flash'
]
)
strokeRed = new ol.style.Stroke( color: 'red', width: 2 )
strokeBlue = new ol.style.Stroke(color: 'blue', width: 1, lineDash: [5,20] )
strokeCyan = new ol.style.Stroke( color: 'cyan', width: 2 )
fillRed = new ol.style.Fill(color: 'red')
fillBlue = new ol.style.Fill(color: 'blue')
balloonStyle = new ol.style.Style
image: new ol.style.Icon
anchor: [ 0.5, 1 ]
anchorXUnits: 'fraction'
anchorYUnits: 'fraction'
opacity: 1.00
scale: 1.25
src: image_root + 'balloon.png'
crossStyle = new ol.style.Style
image: new ol.style.RegularShape
stroke: strokeRed
points: 4
radius: 10
radius2: 0
angle: 0
deltaLineStyle = new ol.style.Style
stroke: strokeBlue
trackLineStyle = new ol.style.Style
stroke: strokeCyan
# Server data
platforms = $('#data').data('platforms')
platformIds = ( x.id for x in platforms )
platformIdents = ( x.ident for x in platforms )
platformBeacons = $('#data').data('beacons')
urlPlatformTracks = '/platform_tracks/'
urlVehicleLoc = '/chase_vehicle_location/'
thisVehicleId = $('#data').data( 'thisVehicle' ).id
thisVehicleIdent = $('#data').data( 'thisVehicle' ).ident
locDev = $('#data').data( 'locDev' )
# platformTracks is an array of hashes, one for each platform,
# where key:values are track idents:point array associated with that platform.
# platformTrack lines is similarly structured where the values
# are ol3 LineString objects
platformTracks = []
platformTrackLines = []
vectorSource = new ol.source.Vector( features: [] )
for x in platformIds
platformTracks.push( {} )
platformTrackLines.push( {} )
for x, index in platformTracks
for y in platformBeacons[index]
x[y.ident] = []
platformTrackLines[index][y.ident] =
new ol.geom.LineString( [] )
vectorSource.addFeature(
new ol.Feature(
ident: y.ident,
geometry: platformTrackLines[index][y.ident]
style: trackLineStyle
)
)
beaconRcvrs = $('#data').data('beaconRcvrs')
# Vector layer objects
point1 = null
point2 = null
deltaLine = null
payloadFeature = null
chaseFeature = null
deltaFeature = null
featuresLayer = new ol.layer.Vector( source: vectorSource )
for platformId, index in platformIds
pIndex = index # 'index' doesn't pass into function properly
$.getJSON urlPlatformTracks + platformId, (data) ->
console.log "get tracks: data:", data
if data
for track, num in data
ident = track.ident
temp = track.points[ track.points.length - 1 ]
point1 = new ol.geom.Point(
ol.proj.transform( [ temp.lon, temp.lat ], "EPSG:4326", "EPSG:3857")
)
payloadFeature = new ol.Feature
geometry: point1
name: 'PI:NAME:<NAME>END_PI'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
platformTracks[pIndex][ident] = track
for point in track.points
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform([ point.lon, point.lat ], "EPSG:4326", "EPSG:3857")
)
temp = point
console.log "platformTrackLines:", platformTrackLines
console.log "vectorSource features:", vectorSource.getFeatures()
satLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/aerial/{z}/{x}/{y}.png'
layer: "sat"
maxZoom: max_zoom
)
visible: false
)
streetLayer = new ol.layer.Tile(
source: new ol.source.XYZ(
url: 'http://' + hostname + ':' + image_port + '/tiles/streets/{z}/{x}/{y}.png'
layer: "street"
maxZoom: max_zoom
)
visible: true
)
map = new ol.Map(
interactions: ol.interaction.defaults().extend([new ol.interaction.Select(style: new ol.style.Style(image: new ol.style.Circle(
radius: 5
fill: new ol.style.Fill(color: "#FF0000")
)))])
target: "map"
layers: [streetLayer, satLayer]
view: new ol.View(
center: ol.proj.transform([ start_lon, start_lat ], "EPSG:4326", "EPSG:3857")
zoom: start_zoom
)
)
map.addLayer featuresLayer
#Styles:
blueStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "blue")
stroke: new ol.style.Stroke(
color: "blue"
width: 3
)
)]
greenStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#00FF00")
stroke: new ol.style.Stroke(
color: "#00FF00"
width: 3
)
)]
redStyle = [new ol.style.Style(
fill: new ol.style.Fill(color: "#FF0000")
stroke: new ol.style.Stroke(
color: "#FF0000"
width: 3
)
)]
#ol3 API docs has only has a title, but nothing in the body for styles
#pass for now
#there are actually functions to get and trace location of the mobile device (probably with GPS enabled) on map with setTracking(), and stuff like getAltitude(
#The following zoom slider actually works!
zoomSlider = new ol.control.ZoomSlider()
map.addControl zoomSlider
#-----------------
locChannel = pusher.subscribe( 'ChaseVehicle_' + thisVehicleIdent )
locChannel.bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
if not point2
point2 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point2.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not chaseFeature
chaseFeature = new ol.Feature
geometry: point2
name: 'Chase Vehicle'
chaseFeature.setStyle crossStyle
vectorSource.addFeature( chaseFeature )
if not deltaFeature
if point1
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
else
deltaLine.setCoordinates(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
return
console.log "platformTrackLines:", platformTrackLines
platformChannels = []
for id, index in platformIds
pIndex = index
platformChannels.push(
pusher.subscribe( 'Platform_' + id ).bind 'new_point', (data) ->
point = [ data.message.lon, data.message.lat ]
ident = data.message.ident
if not point1
point1 = new ol.geom.Point( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
else
point1.setCoordinates( ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
if not payloadFeature
payloadFeature = new ol.Feature
geometry: point1
name: 'Platform'
payloadFeature.setStyle balloonStyle
vectorSource.addFeature( payloadFeature )
if not deltaFeature
if point2
deltaLine = new ol.geom.LineString(
[ point2.getCoordinates(), point1.getCoordinates() ]
)
deltaFeature = new ol.Feature( geometry: deltaLine )
deltaFeature.setStyle deltaLineStyle
vectorSource.addFeature( deltaFeature )
platformTrackLines[pIndex][ident].appendCoordinate(
ol.proj.transform( point, "EPSG:4326", "EPSG:3857") )
return
)
isSim = false
for rcvr in beaconRcvrs
isSim = true if rcvr.driver[-3...] is 'sim'
isSim = true if locDev.driver[-3...] is 'sim'
$('#control #locSimStart').click ->
if isSim
#vehicleLayer.destroyFeatures()
thisVehicleLastTime = null
#thisVehiclePoint = new OpenLayers.Geometry.Point( null, null )
thisVehicleLocation = null
speedup = $("#simSpeed input[type='radio']:checked").val()
url = 'http://' + hostname + ':' + port + '/location_devices/start/' + locDev.id + '/' + speedup
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #locSimEnd').click ->
url = 'http://' + hostname + ':' + port + '/location_devices/stop/' + locDev.id
$.post url, (data) ->
$('#control #locDriver').html(data)
$('#control #beaconSimStart').click ->
if isSim
platformLocations = ( [] for x in platformIds )
speedup = $("#simSpeed input[type='radio']:checked").val()
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/start/' + rcvr.id + '/' + speedup
$.post url, (data) ->
$('#control #beaconDriver').html(data)
$('#control #beaconSimEnd').click ->
for rcvr in beaconRcvrs
url = 'http://' + hostname + ':' + port + '/beacon_receivers/stop/' + rcvr.id
$.post url, (data) ->
$('#control #beaconDriver').html(data)
|
[
{
"context": "qiniu = require 'qiniu'\n\nqiniu.conf.ACCESS_KEY = 'BLESER2yUuhlxiHIwS6XXgS2gJarm9Ucoqbu5eE3'\nqiniu.conf.SECRET_KEY = 'CYWrIoTG-ZPzL-AFtwou6rF",
"end": 113,
"score": 0.9994227886199951,
"start": 73,
"tag": "KEY",
"value": "BLESER2yUuhlxiHIwS6XXgS2gJarm9Ucoqbu5eE3"
},
{
... | qiniu/qiniutest1.coffee | rankun203/ModernWebStudy | 0 | #!/usr/bin/env coffee
qiniu = require 'qiniu'
qiniu.conf.ACCESS_KEY = 'BLESER2yUuhlxiHIwS6XXgS2gJarm9Ucoqbu5eE3'
qiniu.conf.SECRET_KEY = 'CYWrIoTG-ZPzL-AFtwou6rFmxDqWMQ5NxOYhFFC9'
| 122669 | #!/usr/bin/env coffee
qiniu = require 'qiniu'
qiniu.conf.ACCESS_KEY = '<KEY>'
qiniu.conf.SECRET_KEY = '<KEY>'
| true | #!/usr/bin/env coffee
qiniu = require 'qiniu'
qiniu.conf.ACCESS_KEY = 'PI:KEY:<KEY>END_PI'
qiniu.conf.SECRET_KEY = 'PI:KEY:<KEY>END_PI'
|
[
{
"context": "base\"\n \t\"server\"\n ]\n\n homepage: \"\"\n\n author: \"Joseph Gentle <josephg@gmail.com>\"\n\n dependencies:\n # Trans",
"end": 435,
"score": 0.9998772740364075,
"start": 422,
"tag": "NAME",
"value": "Joseph Gentle"
},
{
"context": "r\"\n ]\n\n homepage: \... | package.coffee | daredevildave/ShareJS | 1 | # Package.json file in CoffeeScript
# Nicer to write and you can have comments
# Compile with "cake package"
module.exports =
name: "share"
# Change version with "cake [-V newversion] bump"
version: "0.6.1"
description: "A database for concurrent document editing"
keywords: [
"operational transformation"
"ot"
"concurrent"
"collaborative"
"database"
"server"
]
homepage: ""
author: "Joseph Gentle <josephg@gmail.com>"
dependencies:
# Transports
sockjs: ">= 0.3.1"
"socket.io": "~0.8"
"socket.io-client": "~0.8"
browserchannel: "*"
ws: "*"
# Server works with v1 or v2
connect: "<3.x"
# CouchDB Backend
request: ">= 2.1.1"
# Prevent upgrade failures like v1.3. Bump this when tested.
"coffee-script": "<1.6"
# Useragent hashing
hat: "*"
# Developer dependencies
devDependencies:
# Example server
express: "~ 3.x"
optimist: ">= 0.2.4"
# Tests
nodeunit: "*"
# Unixy shell stuff for Cakefile
shelljs: "*"
# Javascript compression
"uglify-js": "~1"
# SockJS
"websocket": "*"
engine: "node >= 0.6"
# Main file to execute
main: "index.js"
# Binaries to install
bin:
sharejs: "bin/sharejs"
"sharejs-exampleserver": "bin/exampleserver"
scripts:
build: "cake build"
test: "cake test"
prepublish: "cake webclient"
licenses: [
type: "BSD"
url: "http://www.freebsd.org/copyright/freebsd-license.html"
]
repository:
type: "git"
url: "http://github.com/josephg/sharejs.git"
| 105888 | # Package.json file in CoffeeScript
# Nicer to write and you can have comments
# Compile with "cake package"
module.exports =
name: "share"
# Change version with "cake [-V newversion] bump"
version: "0.6.1"
description: "A database for concurrent document editing"
keywords: [
"operational transformation"
"ot"
"concurrent"
"collaborative"
"database"
"server"
]
homepage: ""
author: "<NAME> <<EMAIL>>"
dependencies:
# Transports
sockjs: ">= 0.3.1"
"socket.io": "~0.8"
"socket.io-client": "~0.8"
browserchannel: "*"
ws: "*"
# Server works with v1 or v2
connect: "<3.x"
# CouchDB Backend
request: ">= 2.1.1"
# Prevent upgrade failures like v1.3. Bump this when tested.
"coffee-script": "<1.6"
# Useragent hashing
hat: "*"
# Developer dependencies
devDependencies:
# Example server
express: "~ 3.x"
optimist: ">= 0.2.4"
# Tests
nodeunit: "*"
# Unixy shell stuff for Cakefile
shelljs: "*"
# Javascript compression
"uglify-js": "~1"
# SockJS
"websocket": "*"
engine: "node >= 0.6"
# Main file to execute
main: "index.js"
# Binaries to install
bin:
sharejs: "bin/sharejs"
"sharejs-exampleserver": "bin/exampleserver"
scripts:
build: "cake build"
test: "cake test"
prepublish: "cake webclient"
licenses: [
type: "BSD"
url: "http://www.freebsd.org/copyright/freebsd-license.html"
]
repository:
type: "git"
url: "http://github.com/josephg/sharejs.git"
| true | # Package.json file in CoffeeScript
# Nicer to write and you can have comments
# Compile with "cake package"
module.exports =
name: "share"
# Change version with "cake [-V newversion] bump"
version: "0.6.1"
description: "A database for concurrent document editing"
keywords: [
"operational transformation"
"ot"
"concurrent"
"collaborative"
"database"
"server"
]
homepage: ""
author: "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"
dependencies:
# Transports
sockjs: ">= 0.3.1"
"socket.io": "~0.8"
"socket.io-client": "~0.8"
browserchannel: "*"
ws: "*"
# Server works with v1 or v2
connect: "<3.x"
# CouchDB Backend
request: ">= 2.1.1"
# Prevent upgrade failures like v1.3. Bump this when tested.
"coffee-script": "<1.6"
# Useragent hashing
hat: "*"
# Developer dependencies
devDependencies:
# Example server
express: "~ 3.x"
optimist: ">= 0.2.4"
# Tests
nodeunit: "*"
# Unixy shell stuff for Cakefile
shelljs: "*"
# Javascript compression
"uglify-js": "~1"
# SockJS
"websocket": "*"
engine: "node >= 0.6"
# Main file to execute
main: "index.js"
# Binaries to install
bin:
sharejs: "bin/sharejs"
"sharejs-exampleserver": "bin/exampleserver"
scripts:
build: "cake build"
test: "cake test"
prepublish: "cake webclient"
licenses: [
type: "BSD"
url: "http://www.freebsd.org/copyright/freebsd-license.html"
]
repository:
type: "git"
url: "http://github.com/josephg/sharejs.git"
|
[
{
"context": "=\n usernameField: 'email',\n passwordField: 'password'\n\n localStrategyCallback = (email, password, don",
"end": 702,
"score": 0.9991461634635925,
"start": 694,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "email: profile.emails[0].value\n us... | src/config/passport.coffee | thelucyclub/ticketman-tests | 81 |
mongoose = require('mongoose')
LocalStrategy = require('passport-local').Strategy
TwitterStrategy = require('passport-twitter').Strategy
FacebookStrategy = require('passport-facebook').Strategy
GitHubStrategy = require('passport-github').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
User = mongoose.model('User')
module.exports = (passport, config)->
# require('./initializer')
# serialize sessions
passport.serializeUser (user, done)-> done(null, user.id)
passport.deserializeUser (id, done)->
User.findOne({ _id: id }, (err, user)-> done(err, user))
# use local strategy
localStrategyConfig =
usernameField: 'email',
passwordField: 'password'
localStrategyCallback = (email, password, done)->
User.findOne { email: email }, (err, user)->
return done(err) if (err)
return done(null, false, { message: 'Unknown user' }) unless user
return done(null, false, { message: 'Invalid password' }) unless user.authenticate(password)
return done(null, user)
return
passport.use new LocalStrategy localStrategyConfig, localStrategyCallback
# use twitter strategy
#twitterSetting =
#consumerKey: config.twitter.clientID
#consumerSecret: config.twitter.clientSecret
#callbackURL: config.twitter.callbackURL
twitterStrategyConfig =
consumerKey: config.twitter.clientID
consumerSecret: config.twitter.clientSecret
callbackURL: config.twitter.callbackURL
twitterStrategyCallback = (token, tokenSecret, profile, done)->
User.findOne {'twitter.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
username: profile.username
provider: 'twitter'
twitter: profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new TwitterStrategy twitterStrategyConfig, twitterStrategyCallback
# use facebook strategy
facebookStrategyConfig =
clientID: config.facebook.clientID
clientSecret: config.facebook.clientSecret
callbackURL: config.facebook.callbackURL
facebookStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'facebook.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'facebook'
facebook: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new FacebookStrategy facebookStrategyConfig, facebookStrategyCallback
# use github strategy
gitHubStrategyConfig =
clientID: config.github.clientID
clientSecret: config.github.clientSecret
callbackURL: config.github.callbackUR
gitHubStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne { 'github.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'github'
github: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GitHubStrategy gitHubStrategyConfig, gitHubStrategyCallback
# use google strategy
googleStrategyConfig =
clientID: config.google.clientID
clientSecret: config.google.clientSecret
callbackURL: config.google.callbackURL
googleStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'google.id': profile.id}, (err, user)->
unless user
# make a new google profile without key start with $
new_profile = {}
new_profile.id = profile.id
new_profile.displayName = profile.displayName
new_profile.emails = profile.emails
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'google'
google: new_profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GoogleStrategy googleStrategyConfig, googleStrategyCallback
| 624 |
mongoose = require('mongoose')
LocalStrategy = require('passport-local').Strategy
TwitterStrategy = require('passport-twitter').Strategy
FacebookStrategy = require('passport-facebook').Strategy
GitHubStrategy = require('passport-github').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
User = mongoose.model('User')
module.exports = (passport, config)->
# require('./initializer')
# serialize sessions
passport.serializeUser (user, done)-> done(null, user.id)
passport.deserializeUser (id, done)->
User.findOne({ _id: id }, (err, user)-> done(err, user))
# use local strategy
localStrategyConfig =
usernameField: 'email',
passwordField: '<PASSWORD>'
localStrategyCallback = (email, password, done)->
User.findOne { email: email }, (err, user)->
return done(err) if (err)
return done(null, false, { message: 'Unknown user' }) unless user
return done(null, false, { message: 'Invalid password' }) unless user.authenticate(password)
return done(null, user)
return
passport.use new LocalStrategy localStrategyConfig, localStrategyCallback
# use twitter strategy
#twitterSetting =
#consumerKey: config.twitter.clientID
#consumerSecret: config.twitter.clientSecret
#callbackURL: config.twitter.callbackURL
twitterStrategyConfig =
consumerKey: config.twitter.clientID
consumerSecret: config.twitter.clientSecret
callbackURL: config.twitter.callbackURL
twitterStrategyCallback = (token, tokenSecret, profile, done)->
User.findOne {'twitter.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
username: profile.username
provider: 'twitter'
twitter: profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new TwitterStrategy twitterStrategyConfig, twitterStrategyCallback
# use facebook strategy
facebookStrategyConfig =
clientID: config.facebook.clientID
clientSecret: config.facebook.clientSecret
callbackURL: config.facebook.callbackURL
facebookStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'facebook.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'facebook'
facebook: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new FacebookStrategy facebookStrategyConfig, facebookStrategyCallback
# use github strategy
gitHubStrategyConfig =
clientID: config.github.clientID
clientSecret: config.github.clientSecret
callbackURL: config.github.callbackUR
gitHubStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne { 'github.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'github'
github: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GitHubStrategy gitHubStrategyConfig, gitHubStrategyCallback
# use google strategy
googleStrategyConfig =
clientID: config.google.clientID
clientSecret: config.google.clientSecret
callbackURL: config.google.callbackURL
googleStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'google.id': profile.id}, (err, user)->
unless user
# make a new google profile without key start with $
new_profile = {}
new_profile.id = profile.id
new_profile.displayName = profile.displayName
new_profile.emails = profile.emails
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'google'
google: new_profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GoogleStrategy googleStrategyConfig, googleStrategyCallback
| true |
mongoose = require('mongoose')
LocalStrategy = require('passport-local').Strategy
TwitterStrategy = require('passport-twitter').Strategy
FacebookStrategy = require('passport-facebook').Strategy
GitHubStrategy = require('passport-github').Strategy
GoogleStrategy = require('passport-google-oauth').OAuth2Strategy
User = mongoose.model('User')
module.exports = (passport, config)->
# require('./initializer')
# serialize sessions
passport.serializeUser (user, done)-> done(null, user.id)
passport.deserializeUser (id, done)->
User.findOne({ _id: id }, (err, user)-> done(err, user))
# use local strategy
localStrategyConfig =
usernameField: 'email',
passwordField: 'PI:PASSWORD:<PASSWORD>END_PI'
localStrategyCallback = (email, password, done)->
User.findOne { email: email }, (err, user)->
return done(err) if (err)
return done(null, false, { message: 'Unknown user' }) unless user
return done(null, false, { message: 'Invalid password' }) unless user.authenticate(password)
return done(null, user)
return
passport.use new LocalStrategy localStrategyConfig, localStrategyCallback
# use twitter strategy
#twitterSetting =
#consumerKey: config.twitter.clientID
#consumerSecret: config.twitter.clientSecret
#callbackURL: config.twitter.callbackURL
twitterStrategyConfig =
consumerKey: config.twitter.clientID
consumerSecret: config.twitter.clientSecret
callbackURL: config.twitter.callbackURL
twitterStrategyCallback = (token, tokenSecret, profile, done)->
User.findOne {'twitter.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
username: profile.username
provider: 'twitter'
twitter: profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new TwitterStrategy twitterStrategyConfig, twitterStrategyCallback
# use facebook strategy
facebookStrategyConfig =
clientID: config.facebook.clientID
clientSecret: config.facebook.clientSecret
callbackURL: config.facebook.callbackURL
facebookStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'facebook.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'facebook'
facebook: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new FacebookStrategy facebookStrategyConfig, facebookStrategyCallback
# use github strategy
gitHubStrategyConfig =
clientID: config.github.clientID
clientSecret: config.github.clientSecret
callbackURL: config.github.callbackUR
gitHubStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne { 'github.id': profile.id }, (err, user)->
return done(err) if err
unless user
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'github'
github: profile._json
user.save (err) ->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GitHubStrategy gitHubStrategyConfig, gitHubStrategyCallback
# use google strategy
googleStrategyConfig =
clientID: config.google.clientID
clientSecret: config.google.clientSecret
callbackURL: config.google.callbackURL
googleStrategyCallback = (accessToken, refreshToken, profile, done)->
User.findOne {'google.id': profile.id}, (err, user)->
unless user
# make a new google profile without key start with $
new_profile = {}
new_profile.id = profile.id
new_profile.displayName = profile.displayName
new_profile.emails = profile.emails
user = new User
name: profile.displayName
email: profile.emails[0].value
username: profile.username
provider: 'google'
google: new_profile._json
user.save (err)->
if err? then console.log err
return done(err, user)
else
return done(err, user)
return
return
passport.use new GoogleStrategy googleStrategyConfig, googleStrategyCallback
|
[
{
"context": "ord = process.env.CALIPER_ADMIN_PASSWORD or randomstring.generate()\napi_key = process.env.CALIPER_API_KEY or random",
"end": 732,
"score": 0.5967064499855042,
"start": 717,
"tag": "PASSWORD",
"value": "string.generate"
},
{
"context": "message: \"Incorrect Username\" u... | src/app.coffee | ksmirenko/breakpad-server-openshift | 0 | path = require 'path'
express = require 'express'
expressSession = require 'express-session'
methodOverride = require 'method-override'
bodyParser = require 'body-parser'
passport = require 'passport'
passportLocal = require 'passport-local'
randomstring = require 'randomstring'
# local requirements
reader = require './reader'
saver = require './saver'
Database = require './database'
SymbolDatabase = require './symbol-database'
WebHook = require './webhook'
symbols = require './symbols'
GitHub = require 'github-releases'
# setting up security keys
secret_session_string = process.env.OPENSHIFT_SECRET_TOKEN or randomstring.generate()
secret_admin_password = process.env.CALIPER_ADMIN_PASSWORD or randomstring.generate()
api_key = process.env.CALIPER_API_KEY or randomstring.generate()
# basic auth
localStrategy = new passportLocal.Strategy (username, password, callback) ->
return callback null, false, message: "Incorrect Username" unless username is "admin"
return callback null, false, message: "Incorrect Password" unless username is "admin" and password is secret_admin_password
return callback null, user: "this is the user object"
passport.use localStrategy
passport.serializeUser (user, callback) ->
callback null, user
passport.deserializeUser (user, callback) ->
callback null, user
# simple function to check if user is logged in
isLoggedIn = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect("/login_page")
# server setup and startup
app = express()
db = new Database
symbDb = new SymbolDatabase
webhook = new WebHook(symbDb)
startServer = () ->
ipaddress = process.env.OPENSHIFT_NODEJS_IP ? "127.0.0.1"
port = process.env.OPENSHIFT_NODEJS_PORT ? 80
app.listen port, ipaddress
console.log "Caliper started!"
console.log "Listening on port #{port}"
console.log "Using admin password: #{secret_admin_password}"
console.log "Using api_key: #{api_key}"
console.log "Using provided OpenShift server token" if process.env.OPENSHIFT_SECRET_TOKEN
console.log "Using randomly generated server token: #{secret_session_string}" if not process.env.OPENSHIFT_SECRET_TOKEN
db.on 'load', ->
console.log "crash db ready"
symbDb.on 'load', ->
console.log "symb db ready"
startServer()
app.set 'views', path.resolve(__dirname, '..', 'views')
app.set 'view engine', 'jade'
app.use bodyParser.json()
app.use bodyParser.urlencoded(extended : true)
app.use methodOverride()
app.use (err, req, res, next) ->
res.send 500, "Bad things happened:<br/> #{err.message}"
app.on 'error', (err)->
console.log "Whoops #{err}"
# set up session variables; this is needed for AUTH
app.use expressSession(secret: secret_session_string, resave: true, saveUninitialized: true)
app.use passport.initialize()
app.use passport.session()
# server logic
app.post '/webhook', (req, res, next) ->
webhook.onRequest req
console.log 'webhook requested', req.body.repository.full_name
res.end()
app.get '/fetch', (req, res, next) ->
return next "Invalid key" if req.query.key != api_key
github = new GitHub
repo: req.query.project
token: process.env.OPENSHIFT_SECRET_TOKEN
processRel = (rel, rest) ->
console.log "Processing symbols from #{rel.name}..."
webhook.downloadAssets {'repository': {'full_name': req.query.project}, 'release': rel}, (err)->
if err?
console.log "Failed to process #{rel.name}: #{err}" if err?
return
console.log "Processing symbols from #{rel.name}: Done..."
return if rest.length == 0
rel = rest.pop()
processRel rel, rest
github.getReleases {}, (err, rels)->
return next err if err?
return next "Error fetching releases from #{req.query.project}" if !rels?
rel = rels.pop()
processRel rel, rels
res.end()
app.post '/crash_upload', (req, res, next) ->
console.log "Crash upload request received."
saver.saveRequest req, db, (err, filename) ->
return next err if err?
console.log 'saved', filename
res.send path.basename(filename)
res.end()
# handle the symbol upload post command.
app.post '/symbol_upload', (req, res, next) ->
console.log "Symbol upload request received."
return symbols.saveSymbols req, symbDb, (error, destination) ->
if error?
console.log "Error saving symbol!"
console.log error
return next error
console.log "Saved symbol: #{destination}"
return res.end()
root =
if process.env.CALIPER_SERVER_ROOT?
"#{process.env.CALIPER_SERVER_ROOT}/"
else
''
app.post "/login", passport.authenticate("local", successRedirect:"/#{root}", failureRedirect:"/login_page")
app.get "/login_page", (req, res, next) ->
res.render 'login', {menu:'login', title: 'Login'}
app.get "/#{root}", isLoggedIn, (req, res, next) ->
res.render 'index', {menu: 'crash', title: 'Crash Reports', records: db.getAllRecords()}
app.get "/#{root}view/:id", isLoggedIn, (req, res, next) ->
db.restoreRecord req.params.id, (err, record) ->
return next err if err?
reader.getStackTraceFromRecord record, (err, report) ->
return next err if err?
fields = record.fields
res.render 'view', {menu: 'crash', title: 'Crash Report', report, fields}
app.get "/#{root}symbol/", isLoggedIn, (req, res, next) ->
res.render 'symbols', {menu: 'symbol', title: 'Symbols', symbols: symbDb.getAllRecords()}
| 193754 | path = require 'path'
express = require 'express'
expressSession = require 'express-session'
methodOverride = require 'method-override'
bodyParser = require 'body-parser'
passport = require 'passport'
passportLocal = require 'passport-local'
randomstring = require 'randomstring'
# local requirements
reader = require './reader'
saver = require './saver'
Database = require './database'
SymbolDatabase = require './symbol-database'
WebHook = require './webhook'
symbols = require './symbols'
GitHub = require 'github-releases'
# setting up security keys
secret_session_string = process.env.OPENSHIFT_SECRET_TOKEN or randomstring.generate()
secret_admin_password = process.env.CALIPER_ADMIN_PASSWORD or random<PASSWORD>()
api_key = process.env.CALIPER_API_KEY or randomstring.generate()
# basic auth
localStrategy = new passportLocal.Strategy (username, password, callback) ->
return callback null, false, message: "Incorrect Username" unless username is "admin"
return callback null, false, message: "Incorrect Password" unless username is "admin" and password is <PASSWORD>
return callback null, user: "this is the user object"
passport.use localStrategy
passport.serializeUser (user, callback) ->
callback null, user
passport.deserializeUser (user, callback) ->
callback null, user
# simple function to check if user is logged in
isLoggedIn = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect("/login_page")
# server setup and startup
app = express()
db = new Database
symbDb = new SymbolDatabase
webhook = new WebHook(symbDb)
startServer = () ->
ipaddress = process.env.OPENSHIFT_NODEJS_IP ? "127.0.0.1"
port = process.env.OPENSHIFT_NODEJS_PORT ? 80
app.listen port, ipaddress
console.log "Caliper started!"
console.log "Listening on port #{port}"
console.log "Using admin password: #{secret_admin_password}"
console.log "Using api_key: #{api_key}"
console.log "Using provided OpenShift server token" if process.env.OPENSHIFT_SECRET_TOKEN
console.log "Using randomly generated server token: #{secret_session_string}" if not process.env.OPENSHIFT_SECRET_TOKEN
db.on 'load', ->
console.log "crash db ready"
symbDb.on 'load', ->
console.log "symb db ready"
startServer()
app.set 'views', path.resolve(__dirname, '..', 'views')
app.set 'view engine', 'jade'
app.use bodyParser.json()
app.use bodyParser.urlencoded(extended : true)
app.use methodOverride()
app.use (err, req, res, next) ->
res.send 500, "Bad things happened:<br/> #{err.message}"
app.on 'error', (err)->
console.log "Whoops #{err}"
# set up session variables; this is needed for AUTH
app.use expressSession(secret: secret_session_string, resave: true, saveUninitialized: true)
app.use passport.initialize()
app.use passport.session()
# server logic
app.post '/webhook', (req, res, next) ->
webhook.onRequest req
console.log 'webhook requested', req.body.repository.full_name
res.end()
app.get '/fetch', (req, res, next) ->
return next "Invalid key" if req.query.key != api_key
github = new GitHub
repo: req.query.project
token: process.env.OPENSHIFT_SECRET_TOKEN
processRel = (rel, rest) ->
console.log "Processing symbols from #{rel.name}..."
webhook.downloadAssets {'repository': {'full_name': req.query.project}, 'release': rel}, (err)->
if err?
console.log "Failed to process #{rel.name}: #{err}" if err?
return
console.log "Processing symbols from #{rel.name}: Done..."
return if rest.length == 0
rel = rest.pop()
processRel rel, rest
github.getReleases {}, (err, rels)->
return next err if err?
return next "Error fetching releases from #{req.query.project}" if !rels?
rel = rels.pop()
processRel rel, rels
res.end()
app.post '/crash_upload', (req, res, next) ->
console.log "Crash upload request received."
saver.saveRequest req, db, (err, filename) ->
return next err if err?
console.log 'saved', filename
res.send path.basename(filename)
res.end()
# handle the symbol upload post command.
app.post '/symbol_upload', (req, res, next) ->
console.log "Symbol upload request received."
return symbols.saveSymbols req, symbDb, (error, destination) ->
if error?
console.log "Error saving symbol!"
console.log error
return next error
console.log "Saved symbol: #{destination}"
return res.end()
root =
if process.env.CALIPER_SERVER_ROOT?
"#{process.env.CALIPER_SERVER_ROOT}/"
else
''
app.post "/login", passport.authenticate("local", successRedirect:"/#{root}", failureRedirect:"/login_page")
app.get "/login_page", (req, res, next) ->
res.render 'login', {menu:'login', title: 'Login'}
app.get "/#{root}", isLoggedIn, (req, res, next) ->
res.render 'index', {menu: 'crash', title: 'Crash Reports', records: db.getAllRecords()}
app.get "/#{root}view/:id", isLoggedIn, (req, res, next) ->
db.restoreRecord req.params.id, (err, record) ->
return next err if err?
reader.getStackTraceFromRecord record, (err, report) ->
return next err if err?
fields = record.fields
res.render 'view', {menu: 'crash', title: 'Crash Report', report, fields}
app.get "/#{root}symbol/", isLoggedIn, (req, res, next) ->
res.render 'symbols', {menu: 'symbol', title: 'Symbols', symbols: symbDb.getAllRecords()}
| true | path = require 'path'
express = require 'express'
expressSession = require 'express-session'
methodOverride = require 'method-override'
bodyParser = require 'body-parser'
passport = require 'passport'
passportLocal = require 'passport-local'
randomstring = require 'randomstring'
# local requirements
reader = require './reader'
saver = require './saver'
Database = require './database'
SymbolDatabase = require './symbol-database'
WebHook = require './webhook'
symbols = require './symbols'
GitHub = require 'github-releases'
# setting up security keys
secret_session_string = process.env.OPENSHIFT_SECRET_TOKEN or randomstring.generate()
secret_admin_password = process.env.CALIPER_ADMIN_PASSWORD or randomPI:PASSWORD:<PASSWORD>END_PI()
api_key = process.env.CALIPER_API_KEY or randomstring.generate()
# basic auth
localStrategy = new passportLocal.Strategy (username, password, callback) ->
return callback null, false, message: "Incorrect Username" unless username is "admin"
return callback null, false, message: "Incorrect Password" unless username is "admin" and password is PI:PASSWORD:<PASSWORD>END_PI
return callback null, user: "this is the user object"
passport.use localStrategy
passport.serializeUser (user, callback) ->
callback null, user
passport.deserializeUser (user, callback) ->
callback null, user
# simple function to check if user is logged in
isLoggedIn = (req, res, next) ->
return next() if req.isAuthenticated()
res.redirect("/login_page")
# server setup and startup
app = express()
db = new Database
symbDb = new SymbolDatabase
webhook = new WebHook(symbDb)
startServer = () ->
ipaddress = process.env.OPENSHIFT_NODEJS_IP ? "127.0.0.1"
port = process.env.OPENSHIFT_NODEJS_PORT ? 80
app.listen port, ipaddress
console.log "Caliper started!"
console.log "Listening on port #{port}"
console.log "Using admin password: #{secret_admin_password}"
console.log "Using api_key: #{api_key}"
console.log "Using provided OpenShift server token" if process.env.OPENSHIFT_SECRET_TOKEN
console.log "Using randomly generated server token: #{secret_session_string}" if not process.env.OPENSHIFT_SECRET_TOKEN
db.on 'load', ->
console.log "crash db ready"
symbDb.on 'load', ->
console.log "symb db ready"
startServer()
app.set 'views', path.resolve(__dirname, '..', 'views')
app.set 'view engine', 'jade'
app.use bodyParser.json()
app.use bodyParser.urlencoded(extended : true)
app.use methodOverride()
app.use (err, req, res, next) ->
res.send 500, "Bad things happened:<br/> #{err.message}"
app.on 'error', (err)->
console.log "Whoops #{err}"
# set up session variables; this is needed for AUTH
app.use expressSession(secret: secret_session_string, resave: true, saveUninitialized: true)
app.use passport.initialize()
app.use passport.session()
# server logic
app.post '/webhook', (req, res, next) ->
webhook.onRequest req
console.log 'webhook requested', req.body.repository.full_name
res.end()
app.get '/fetch', (req, res, next) ->
return next "Invalid key" if req.query.key != api_key
github = new GitHub
repo: req.query.project
token: process.env.OPENSHIFT_SECRET_TOKEN
processRel = (rel, rest) ->
console.log "Processing symbols from #{rel.name}..."
webhook.downloadAssets {'repository': {'full_name': req.query.project}, 'release': rel}, (err)->
if err?
console.log "Failed to process #{rel.name}: #{err}" if err?
return
console.log "Processing symbols from #{rel.name}: Done..."
return if rest.length == 0
rel = rest.pop()
processRel rel, rest
github.getReleases {}, (err, rels)->
return next err if err?
return next "Error fetching releases from #{req.query.project}" if !rels?
rel = rels.pop()
processRel rel, rels
res.end()
app.post '/crash_upload', (req, res, next) ->
console.log "Crash upload request received."
saver.saveRequest req, db, (err, filename) ->
return next err if err?
console.log 'saved', filename
res.send path.basename(filename)
res.end()
# handle the symbol upload post command.
app.post '/symbol_upload', (req, res, next) ->
console.log "Symbol upload request received."
return symbols.saveSymbols req, symbDb, (error, destination) ->
if error?
console.log "Error saving symbol!"
console.log error
return next error
console.log "Saved symbol: #{destination}"
return res.end()
root =
if process.env.CALIPER_SERVER_ROOT?
"#{process.env.CALIPER_SERVER_ROOT}/"
else
''
app.post "/login", passport.authenticate("local", successRedirect:"/#{root}", failureRedirect:"/login_page")
app.get "/login_page", (req, res, next) ->
res.render 'login', {menu:'login', title: 'Login'}
app.get "/#{root}", isLoggedIn, (req, res, next) ->
res.render 'index', {menu: 'crash', title: 'Crash Reports', records: db.getAllRecords()}
app.get "/#{root}view/:id", isLoggedIn, (req, res, next) ->
db.restoreRecord req.params.id, (err, record) ->
return next err if err?
reader.getStackTraceFromRecord record, (err, report) ->
return next err if err?
fields = record.fields
res.render 'view', {menu: 'crash', title: 'Crash Report', report, fields}
app.get "/#{root}symbol/", isLoggedIn, (req, res, next) ->
res.render 'symbols', {menu: 'symbol', title: 'Symbols', symbols: symbDb.getAllRecords()}
|
[
{
"context": "ne) ->\n @uuid = uuid.v1()\n @rawToken = 'akuma'\n @token = bcrypt.hashSync(@rawToken, 8)\n ",
"end": 1383,
"score": 0.9242126941680908,
"start": 1378,
"tag": "PASSWORD",
"value": "akuma"
},
{
"context": "getDevice.yields null\n @sut @uuid, {name... | test/lib/updateDevice-spec.coffee | Christopheraburns/projecttelemetry | 0 | _ = require 'lodash'
uuid = require 'node-uuid'
bcrypt = require 'bcrypt'
TestDatabase = require '../test-database'
describe 'Update Device', ->
beforeEach (done) ->
@sut = require '../../lib/updateDevice'
@getDevice = sinon.stub()
@clearCache = sinon.stub()
@getGeo = sinon.stub()
@getGeo.yields null, null
TestDatabase.open (error, database) =>
@database = database
@devices = @database.devices
@dependencies = {database: @database, getDevice: @getDevice, clearCache: @clearCache, getGeo: @getGeo}
done error
afterEach ->
@database.close?()
it 'should be a function', ->
expect(@sut).to.be.a 'function'
describe 'when called with nothing', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut null, null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when called with a uuid that doesn\'t exist', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut 'not-real', null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when a device exists', ->
beforeEach (done) ->
@uuid = uuid.v1()
@rawToken = 'akuma'
@token = bcrypt.hashSync(@rawToken, 8)
@originalDevice = {uuid: @uuid, name: 'hadoken', token : @token, online :true}
@devices.insert _.clone(@originalDevice), done
describe 'when updateDevice is called', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'shakunetsu'}, done, @dependencies
it 'should call clearCache with uuid', ->
expect(@clearCache).to.be.calledWith @uuid
describe 'when update is called with that uuid and different name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'shakunetsu'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'shakunetsu'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'hadoken'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'hadoken'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null, {foo: 'bar'}
storeDevice = (@error, @device) => done()
@sut @uuid, {name: 'hadoken'}, storeDevice, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
return done error if error?
expect(device.name).to.equal 'hadoken'
done()
it 'should call the callback with the updated device', ->
expect(@device.foo).to.equal 'bar'
describe 'when updated with a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: 'ken masters', token : 'masters ken' }
@sut @uuid, @device, done, @dependencies
it 'should update a hash of the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@device.token, storeDevice.token)).to.be.true
done()
describe 'when updated without a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: 'shin akuma' }
@sut @uuid, @device, done, @dependencies
it 'should not update the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@rawToken, storeDevice.token)).to.be.true
done()
describe 'when updated with an online of "false"', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: 'false'}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.true
done()
describe 'when updated with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.false
done()
describe 'when updated without a timestamp', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should create a timestamp', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.timestamp).to.exist
done()
describe 'when updated without geo', ->
beforeEach (done) ->
@getDevice.yields null
@getGeo.yields null, {foo: 'bar'}
@sut @uuid, {ipAddress: '127.0.0.1'}, done, @dependencies
it 'should add a geo', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.geo).to.exist
done()
describe 'when a device exists with online = true', ->
beforeEach (done) ->
@uuid = uuid.v1()
oneHour = 60 * 60 * 1000
@date = new Date(Date.now() - oneHour)
@devices.insert {uuid: @uuid, online: true, onlineSince: @date}, done
describe 'when called without online', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
expect(device.online).to.be.true
done()
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when updated with online = true', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, { online : true }, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when called with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
| 92072 | _ = require 'lodash'
uuid = require 'node-uuid'
bcrypt = require 'bcrypt'
TestDatabase = require '../test-database'
describe 'Update Device', ->
beforeEach (done) ->
@sut = require '../../lib/updateDevice'
@getDevice = sinon.stub()
@clearCache = sinon.stub()
@getGeo = sinon.stub()
@getGeo.yields null, null
TestDatabase.open (error, database) =>
@database = database
@devices = @database.devices
@dependencies = {database: @database, getDevice: @getDevice, clearCache: @clearCache, getGeo: @getGeo}
done error
afterEach ->
@database.close?()
it 'should be a function', ->
expect(@sut).to.be.a 'function'
describe 'when called with nothing', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut null, null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when called with a uuid that doesn\'t exist', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut 'not-real', null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when a device exists', ->
beforeEach (done) ->
@uuid = uuid.v1()
@rawToken = '<PASSWORD>'
@token = bcrypt.hashSync(@rawToken, 8)
@originalDevice = {uuid: @uuid, name: 'hadoken', token : @token, online :true}
@devices.insert _.clone(@originalDevice), done
describe 'when updateDevice is called', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'shakunetsu'}, done, @dependencies
it 'should call clearCache with uuid', ->
expect(@clearCache).to.be.calledWith @uuid
describe 'when update is called with that uuid and different name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: '<NAME>'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'sh<NAME>'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: '<NAME>'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'hadoken'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null, {foo: 'bar'}
storeDevice = (@error, @device) => done()
@sut @uuid, {name: '<NAME>'}, storeDevice, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
return done error if error?
expect(device.name).to.equal 'hadoken'
done()
it 'should call the callback with the updated device', ->
expect(@device.foo).to.equal 'bar'
describe 'when updated with a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: '<NAME>', token : '<PASSWORD>' }
@sut @uuid, @device, done, @dependencies
it 'should update a hash of the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@device.token, storeDevice.token)).to.be.true
done()
describe 'when updated without a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: '<NAME>' }
@sut @uuid, @device, done, @dependencies
it 'should not update the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@rawToken, storeDevice.token)).to.be.true
done()
describe 'when updated with an online of "false"', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: 'false'}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.true
done()
describe 'when updated with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.false
done()
describe 'when updated without a timestamp', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should create a timestamp', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.timestamp).to.exist
done()
describe 'when updated without geo', ->
beforeEach (done) ->
@getDevice.yields null
@getGeo.yields null, {foo: 'bar'}
@sut @uuid, {ipAddress: '127.0.0.1'}, done, @dependencies
it 'should add a geo', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.geo).to.exist
done()
describe 'when a device exists with online = true', ->
beforeEach (done) ->
@uuid = uuid.v1()
oneHour = 60 * 60 * 1000
@date = new Date(Date.now() - oneHour)
@devices.insert {uuid: @uuid, online: true, onlineSince: @date}, done
describe 'when called without online', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
expect(device.online).to.be.true
done()
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when updated with online = true', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, { online : true }, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when called with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
| true | _ = require 'lodash'
uuid = require 'node-uuid'
bcrypt = require 'bcrypt'
TestDatabase = require '../test-database'
describe 'Update Device', ->
beforeEach (done) ->
@sut = require '../../lib/updateDevice'
@getDevice = sinon.stub()
@clearCache = sinon.stub()
@getGeo = sinon.stub()
@getGeo.yields null, null
TestDatabase.open (error, database) =>
@database = database
@devices = @database.devices
@dependencies = {database: @database, getDevice: @getDevice, clearCache: @clearCache, getGeo: @getGeo}
done error
afterEach ->
@database.close?()
it 'should be a function', ->
expect(@sut).to.be.a 'function'
describe 'when called with nothing', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut null, null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when called with a uuid that doesn\'t exist', ->
beforeEach (done) ->
@getDevice.yields null
storeError = (@error, device) => done()
@sut 'not-real', null, storeError, @dependencies
it 'should call its callback with an error', ->
expect(@error).to.exist
describe 'when a device exists', ->
beforeEach (done) ->
@uuid = uuid.v1()
@rawToken = 'PI:PASSWORD:<PASSWORD>END_PI'
@token = bcrypt.hashSync(@rawToken, 8)
@originalDevice = {uuid: @uuid, name: 'hadoken', token : @token, online :true}
@devices.insert _.clone(@originalDevice), done
describe 'when updateDevice is called', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'shakunetsu'}, done, @dependencies
it 'should call clearCache with uuid', ->
expect(@clearCache).to.be.calledWith @uuid
describe 'when update is called with that uuid and different name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'PI:NAME:<NAME>END_PI'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'shPI:NAME:<NAME>END_PI'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {name: 'PI:NAME:<NAME>END_PI'}, done, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) ->
done error if error?
expect(device.name).to.equal 'hadoken'
done()
describe 'when update is called with that uuid and the same name', ->
beforeEach (done) ->
@getDevice.yields null, {foo: 'bar'}
storeDevice = (@error, @device) => done()
@sut @uuid, {name: 'PI:NAME:<NAME>END_PI'}, storeDevice, @dependencies
it 'should update the record', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
return done error if error?
expect(device.name).to.equal 'hadoken'
done()
it 'should call the callback with the updated device', ->
expect(@device.foo).to.equal 'bar'
describe 'when updated with a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: 'PI:NAME:<NAME>END_PI', token : 'PI:PASSWORD:<PASSWORD>END_PI' }
@sut @uuid, @device, done, @dependencies
it 'should update a hash of the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@device.token, storeDevice.token)).to.be.true
done()
describe 'when updated without a token', ->
beforeEach (done) ->
@getDevice.yields null
@device = { name: 'PI:NAME:<NAME>END_PI' }
@sut @uuid, @device, done, @dependencies
it 'should not update the token', (done) ->
@database.devices.findOne { uuid: @uuid }, (error, storeDevice) =>
return done error if error?
expect(bcrypt.compareSync(@rawToken, storeDevice.token)).to.be.true
done()
describe 'when updated with an online of "false"', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: 'false'}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.true
done()
describe 'when updated with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should create a device with an online of true', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.online).to.be.false
done()
describe 'when updated without a timestamp', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should create a timestamp', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.timestamp).to.exist
done()
describe 'when updated without geo', ->
beforeEach (done) ->
@getDevice.yields null
@getGeo.yields null, {foo: 'bar'}
@sut @uuid, {ipAddress: '127.0.0.1'}, done, @dependencies
it 'should add a geo', (done) ->
@devices.findOne {}, (error, device) =>
expect(device.geo).to.exist
done()
describe 'when a device exists with online = true', ->
beforeEach (done) ->
@uuid = uuid.v1()
oneHour = 60 * 60 * 1000
@date = new Date(Date.now() - oneHour)
@devices.insert {uuid: @uuid, online: true, onlineSince: @date}, done
describe 'when called without online', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {}, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
expect(device.online).to.be.true
done()
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when updated with online = true', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, { online : true }, done, @dependencies
it 'should not modify online', (done) ->
@devices.findOne {uuid: @uuid}, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
describe 'when called with an online of false', ->
beforeEach (done) ->
@getDevice.yields null
@sut @uuid, {online: false}, done, @dependencies
it 'should not modify onlineSince', (done) ->
@devices.findOne { uuid: @uuid }, (error, device) =>
done error if error?
onlineSinceTime = device.onlineSince.getTime()
expect(onlineSinceTime).to.equal @date.getTime()
done()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.