entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "key: 'comment'\n\npatterns: [\n\n # Matches block comments\n #\n #",
"end": 13,
"score": 0.9832372069358826,
"start": 6,
"tag": "KEY",
"value": "comment"
}
] | grammars/repositories/partials/comment-grammar.cson | andrewcarver/atom-language-asciidoc | 45 | key: 'comment'
patterns: [
# Matches block comments
#
# Examples
#
# ////
# A multi-line comment.
#
# Notice it's a delimited block.
# ////
#
name: 'comment.block.asciidoc'
begin: '^(/{4,})$'
patterns: [
include: '#inlines'
]
end: '^\\1$'
,
# Matches single line comments
#
# // A single-line comment
#
name: 'comment.inline.asciidoc'
match: '^\/{2}([^\/].*)?$'
]
| 195680 | key: '<KEY>'
patterns: [
# Matches block comments
#
# Examples
#
# ////
# A multi-line comment.
#
# Notice it's a delimited block.
# ////
#
name: 'comment.block.asciidoc'
begin: '^(/{4,})$'
patterns: [
include: '#inlines'
]
end: '^\\1$'
,
# Matches single line comments
#
# // A single-line comment
#
name: 'comment.inline.asciidoc'
match: '^\/{2}([^\/].*)?$'
]
| true | key: 'PI:KEY:<KEY>END_PI'
patterns: [
# Matches block comments
#
# Examples
#
# ////
# A multi-line comment.
#
# Notice it's a delimited block.
# ////
#
name: 'comment.block.asciidoc'
begin: '^(/{4,})$'
patterns: [
include: '#inlines'
]
end: '^\\1$'
,
# Matches single line comments
#
# // A single-line comment
#
name: 'comment.inline.asciidoc'
match: '^\/{2}([^\/].*)?$'
]
|
[
{
"context": "- List people who are off tomorrow\n#\n# Author:\n# Jeff Sault (jeff.sault@smartpipesolutions.com)\n#\nmodule.expo",
"end": 693,
"score": 0.9996628761291504,
"start": 683,
"tag": "NAME",
"value": "Jeff Sault"
},
{
"context": " who are off tomorrow\n#\n# Author:\n# Jeff Sault (jeff.sault@smartpipesolutions.com)\n#\nmodule.exports = (robot) ->\n robot.respond /(",
"end": 728,
"score": 0.9999359846115112,
"start": 695,
"tag": "EMAIL",
"value": "jeff.sault@smartpipesolutions.com"
}
] | src/officecalendar.coffee | potchin/hubot-holidaycalendar | 0 | # Description:
# Bot which can load your office holiday/vacation days from an iCal calendar URL.
# Easily find out who is on vacation. Accepts human-readable relative days (like tomorrow, next monday etc)
#
# Configuration:
# HUBOT_HOLIDAYCALENDAR_ICAL_URL - url of ical file containing your holidays
#
# Commands:
# hubot "holidays|off work|vacation" - List people who are off today
# hubot "holidays tomorrow" - List people who are off tomorrow
# hubot "holidays friday" - List people who are off friday this week
# hubot "holidays next friday" - List people who are friday next week
# hubot "holidays next week" - List people who are off tomorrow
#
# Author:
# Jeff Sault (jeff.sault@smartpipesolutions.com)
#
module.exports = (robot) ->
robot.respond /(?:holidays|off work|vacation)(.*)/i, (msg) ->
fuzzywhentolook = "today"
whentolook = new Date()
if msg.match[1]
fuzzywhentolook = msg.match[1].trim()
daysofweek = ["sunday","monday","tuesday","wednesday","thursday","friday","saturday"]
#this function takes a day of week and optional number of weeks
#it returns a date object for the result. defaults to next week
Date::getNextWeekDay = (d, w=1) ->
if d
next = this
next.setDate @getDate() - @getDay() + (w*7) + d
return next
return
if fuzzywhentolook == 'today'
#not really needed
whentolook = new Date()
else if fuzzywhentolook == 'tomorrow'
whentolook = whentolook.setDate(whentolook.getDate() + 1);
else if fuzzywhentolook == 'yesterday'
whentolook = whentolook.setDate(whentolook.getDate() - 1);
else if fuzzywhentolook == 'this week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-1)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=0)
else if fuzzywhentolook == 'last week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-2)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=-1)
else if fuzzywhentolook == 'next week'
now = new Date()
whentolook = now.getNextWeekDay(1)
now = new Date()
whentolookend = now.getNextWeekDay(5)
else if fuzzywhentolook.substring(0, 5) == "next "
wantedday = fuzzywhentolook.replace /next /, ""
wanteddaynum = daysofweek.indexOf(wantedday.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum)
else if fuzzywhentolook.toLowerCase() in daysofweek
wanteddaynum = daysofweek.indexOf(fuzzywhentolook.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum,0)
else
msg.send "No idea when #{fuzzywhentolook} is. Sorry!"
return
if not whentolookend?
console.log "Setting end date same as start date"
whentolookend = whentolook
console.log "Looking for events from #{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()}"
ical = require('ical')
verbiage = []
ical.fromURL process.env.HUBOT_HOLIDAYCALENDAR_ICAL_URL, {}, (err, data) ->
for k, v of data
if data.hasOwnProperty(k)
eventlist = data[k]
for cal of data
for _, event of data[cal]
if event.type == 'VEVENT'
eventstartdate = new Date(event.start)
eventenddate = new Date(event.end)
if (eventstartdate < whentolook and eventenddate > whentolookend) \
or (eventstartdate > whentolook and eventstartdate < whentolookend) \
or (eventenddate > whentolook and eventenddate < whentolookend)
verbiage.push "\t\t#{event.summary} from #{new Date(event.start).toDateString()}, returning on #{new Date(event.end).toDateString()}"
if verbiage.length > 0
msg.send "Holidays #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})\n"+verbiage.join("\n")
else
msg.send "No holidays found for #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})"
# This is what an event looks like...
# 'c6381ae2-42b1-42ac-aeb1-0271f8197665':
# { type: 'VEVENT',
# params: [],
# description: 'Created By : someone',
# end: { 2017-06-09T23:00:00.000Z tz: undefined },
# dtstamp: '20170724T141449Z',
# start: { 2017-06-08T23:00:00.000Z tz: undefined },
# sequence: '0',
# summary: 'bah - Holiday',
# uid: 'c6381ae2-42b1-42ac-aeb1-0271f8197665' }, | 31482 | # Description:
# Bot which can load your office holiday/vacation days from an iCal calendar URL.
# Easily find out who is on vacation. Accepts human-readable relative days (like tomorrow, next monday etc)
#
# Configuration:
# HUBOT_HOLIDAYCALENDAR_ICAL_URL - url of ical file containing your holidays
#
# Commands:
# hubot "holidays|off work|vacation" - List people who are off today
# hubot "holidays tomorrow" - List people who are off tomorrow
# hubot "holidays friday" - List people who are off friday this week
# hubot "holidays next friday" - List people who are friday next week
# hubot "holidays next week" - List people who are off tomorrow
#
# Author:
# <NAME> (<EMAIL>)
#
module.exports = (robot) ->
robot.respond /(?:holidays|off work|vacation)(.*)/i, (msg) ->
fuzzywhentolook = "today"
whentolook = new Date()
if msg.match[1]
fuzzywhentolook = msg.match[1].trim()
daysofweek = ["sunday","monday","tuesday","wednesday","thursday","friday","saturday"]
#this function takes a day of week and optional number of weeks
#it returns a date object for the result. defaults to next week
Date::getNextWeekDay = (d, w=1) ->
if d
next = this
next.setDate @getDate() - @getDay() + (w*7) + d
return next
return
if fuzzywhentolook == 'today'
#not really needed
whentolook = new Date()
else if fuzzywhentolook == 'tomorrow'
whentolook = whentolook.setDate(whentolook.getDate() + 1);
else if fuzzywhentolook == 'yesterday'
whentolook = whentolook.setDate(whentolook.getDate() - 1);
else if fuzzywhentolook == 'this week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-1)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=0)
else if fuzzywhentolook == 'last week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-2)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=-1)
else if fuzzywhentolook == 'next week'
now = new Date()
whentolook = now.getNextWeekDay(1)
now = new Date()
whentolookend = now.getNextWeekDay(5)
else if fuzzywhentolook.substring(0, 5) == "next "
wantedday = fuzzywhentolook.replace /next /, ""
wanteddaynum = daysofweek.indexOf(wantedday.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum)
else if fuzzywhentolook.toLowerCase() in daysofweek
wanteddaynum = daysofweek.indexOf(fuzzywhentolook.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum,0)
else
msg.send "No idea when #{fuzzywhentolook} is. Sorry!"
return
if not whentolookend?
console.log "Setting end date same as start date"
whentolookend = whentolook
console.log "Looking for events from #{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()}"
ical = require('ical')
verbiage = []
ical.fromURL process.env.HUBOT_HOLIDAYCALENDAR_ICAL_URL, {}, (err, data) ->
for k, v of data
if data.hasOwnProperty(k)
eventlist = data[k]
for cal of data
for _, event of data[cal]
if event.type == 'VEVENT'
eventstartdate = new Date(event.start)
eventenddate = new Date(event.end)
if (eventstartdate < whentolook and eventenddate > whentolookend) \
or (eventstartdate > whentolook and eventstartdate < whentolookend) \
or (eventenddate > whentolook and eventenddate < whentolookend)
verbiage.push "\t\t#{event.summary} from #{new Date(event.start).toDateString()}, returning on #{new Date(event.end).toDateString()}"
if verbiage.length > 0
msg.send "Holidays #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})\n"+verbiage.join("\n")
else
msg.send "No holidays found for #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})"
# This is what an event looks like...
# 'c6381ae2-42b1-42ac-aeb1-0271f8197665':
# { type: 'VEVENT',
# params: [],
# description: 'Created By : someone',
# end: { 2017-06-09T23:00:00.000Z tz: undefined },
# dtstamp: '20170724T141449Z',
# start: { 2017-06-08T23:00:00.000Z tz: undefined },
# sequence: '0',
# summary: 'bah - Holiday',
# uid: 'c6381ae2-42b1-42ac-aeb1-0271f8197665' }, | true | # Description:
# Bot which can load your office holiday/vacation days from an iCal calendar URL.
# Easily find out who is on vacation. Accepts human-readable relative days (like tomorrow, next monday etc)
#
# Configuration:
# HUBOT_HOLIDAYCALENDAR_ICAL_URL - url of ical file containing your holidays
#
# Commands:
# hubot "holidays|off work|vacation" - List people who are off today
# hubot "holidays tomorrow" - List people who are off tomorrow
# hubot "holidays friday" - List people who are off friday this week
# hubot "holidays next friday" - List people who are friday next week
# hubot "holidays next week" - List people who are off tomorrow
#
# Author:
# PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
#
module.exports = (robot) ->
robot.respond /(?:holidays|off work|vacation)(.*)/i, (msg) ->
fuzzywhentolook = "today"
whentolook = new Date()
if msg.match[1]
fuzzywhentolook = msg.match[1].trim()
daysofweek = ["sunday","monday","tuesday","wednesday","thursday","friday","saturday"]
#this function takes a day of week and optional number of weeks
#it returns a date object for the result. defaults to next week
Date::getNextWeekDay = (d, w=1) ->
if d
next = this
next.setDate @getDate() - @getDay() + (w*7) + d
return next
return
if fuzzywhentolook == 'today'
#not really needed
whentolook = new Date()
else if fuzzywhentolook == 'tomorrow'
whentolook = whentolook.setDate(whentolook.getDate() + 1);
else if fuzzywhentolook == 'yesterday'
whentolook = whentolook.setDate(whentolook.getDate() - 1);
else if fuzzywhentolook == 'this week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-1)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=0)
else if fuzzywhentolook == 'last week'
now = new Date()
whentolook = now.getNextWeekDay(1, w=-2)
now = new Date()
whentolookend = now.getNextWeekDay(5, w=-1)
else if fuzzywhentolook == 'next week'
now = new Date()
whentolook = now.getNextWeekDay(1)
now = new Date()
whentolookend = now.getNextWeekDay(5)
else if fuzzywhentolook.substring(0, 5) == "next "
wantedday = fuzzywhentolook.replace /next /, ""
wanteddaynum = daysofweek.indexOf(wantedday.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum)
else if fuzzywhentolook.toLowerCase() in daysofweek
wanteddaynum = daysofweek.indexOf(fuzzywhentolook.toLowerCase());
now = new Date()
whentolook = now.getNextWeekDay(wanteddaynum,0)
else
msg.send "No idea when #{fuzzywhentolook} is. Sorry!"
return
if not whentolookend?
console.log "Setting end date same as start date"
whentolookend = whentolook
console.log "Looking for events from #{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()}"
ical = require('ical')
verbiage = []
ical.fromURL process.env.HUBOT_HOLIDAYCALENDAR_ICAL_URL, {}, (err, data) ->
for k, v of data
if data.hasOwnProperty(k)
eventlist = data[k]
for cal of data
for _, event of data[cal]
if event.type == 'VEVENT'
eventstartdate = new Date(event.start)
eventenddate = new Date(event.end)
if (eventstartdate < whentolook and eventenddate > whentolookend) \
or (eventstartdate > whentolook and eventstartdate < whentolookend) \
or (eventenddate > whentolook and eventenddate < whentolookend)
verbiage.push "\t\t#{event.summary} from #{new Date(event.start).toDateString()}, returning on #{new Date(event.end).toDateString()}"
if verbiage.length > 0
msg.send "Holidays #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})\n"+verbiage.join("\n")
else
msg.send "No holidays found for #{fuzzywhentolook} (#{new Date(whentolook).toDateString()} to #{new Date(whentolookend).toDateString()})"
# This is what an event looks like...
# 'c6381ae2-42b1-42ac-aeb1-0271f8197665':
# { type: 'VEVENT',
# params: [],
# description: 'Created By : someone',
# end: { 2017-06-09T23:00:00.000Z tz: undefined },
# dtstamp: '20170724T141449Z',
# start: { 2017-06-08T23:00:00.000Z tz: undefined },
# sequence: '0',
# summary: 'bah - Holiday',
# uid: 'c6381ae2-42b1-42ac-aeb1-0271f8197665' }, |
[
{
"context": "e of query\n field = schema[key]\n key = \"_id\" if key == \"id\"\n if _.isRegExp(value)\n ",
"end": 1917,
"score": 0.5522149205207825,
"start": 1915,
"tag": "KEY",
"value": "\"_"
}
] | src/tower/server/store/mongodb/serialization.coffee | vjsingh/tower | 1 | # @module
Tower.Store.MongoDB.Serialization =
serializeModel: (attributes) ->
return attributes if attributes instanceof Tower.Model
klass = Tower.constant(@className)
attributes.id ||= attributes._id
delete attributes._id
model = new klass(attributes)
model
generateId: ->
new @constructor.database.bson_serializer.ObjectID()
# tags: [1, 2] == $set: tags: [1, 2]
# createdAt: Date == $set: createdAt: mongodate
serializeAttributesForUpdate: (attributes) ->
result = {}
schema = @schema()
for key, value of attributes
continue if key == "id" && value == undefined || value == null
operator = @constructor.atomicModifiers[key]
if operator
key = operator
result[key] ||= {}
for _key, _value of value
result[key][_key] = @encode schema[_key], _value, operator
else
result["$set"] ||= {}
result["$set"][key] = @encode schema[key], value
result
serializeAttributesForCreate: (record) ->
result = {}
schema = @schema()
attributes = @deserializeModel(record)
for key, value of attributes
continue if key == "id" && value == undefined || value == null
realKey = if key == "id" then "_id" else key
operator = @constructor.atomicModifiers[key]
unless operator
result[realKey] = @encode schema[key], value
result
deserializeAttributes: (attributes) ->
schema = @schema()
for key, value of attributes
field = schema[key]
attributes[key] = @decode field, value if field
attributes
# title: "santa"
# createdAt: "<": new Date()
serializeConditions: (criteria) ->
schema = @schema()
result = {}
query = @deserializeModel(criteria.conditions())
for key, value of query
field = schema[key]
key = "_id" if key == "id"
if _.isRegExp(value)
result[key] = value
else if _.isBaseObject(value)
result[key] = {}
for _key, _value of value
operator = @constructor.queryOperators[_key]
if operator == "$eq"
result[key] = @encode field, _value, _key
else
_key = operator if operator
if _key == "$in"
_value = _.castArray(_value)
result[key][_key] = @encode field, _value, _key
else
result[key] = @encode field, value
result
# batchSize
# hint
# explain
serializeOptions: (criteria) ->
limit = criteria.get('limit')
sort = criteria.get('order')
offset = criteria.get('offset')
options = {}
options.limit = limit if limit
if sort.length
options.sort = _.map sort, (set) ->
[
if set[0] == "id" then "_id" else set[0],
if set[1] == 'asc' then 1 else -1
]
options.skip = offset if offset
options
encode: (field, value, operation) ->
return value unless field
method = @["encode#{field.encodingType}"]
value = method.call(@, value, operation) if method
value = [value] if operation == "$in" && !_.isArray(value)
value
decode: (field, value, operation) ->
return value unless field
method = @["decode#{field.type}"]
value = method.call(@, value) if method
value
encodeString: (value) ->
if value then value.toString() else value
encodeOrder: (value) ->
encodeDate: (value) ->
# if Tower.Support.config.useTimeZone
time = require('moment')
switch typeof(value)
when "string"
time.parse(value)
when Date
time.local(value.year, value.month, value.day, value.hour, value.min, value.sec)
when Array
time.local(value)
else
value
encodeGeo: (value) ->
# [lng, lat]
[value.lng, value.lat].reverse()
decodeGeo: (value) ->
return value unless value
lat: value[1], lng: value[0]
decodeDate: (value) ->
value
encodeBoolean: (value) ->
if @constructor.booleans.hasOwnProperty(value)
@constructor.booleans[value]
else
throw new Error("#{value.toString()} is not a boolean")
encodeArray: (value, operation) ->
unless operation || value == null || _.isArray(value)
throw new Error("Value is not Array")
value
encodeFloat: (value) ->
return null if _.isBlank(value)
try
parseFloat(value)
catch error
value
encodeInteger: (value) ->
return null if !value && value != 0
if value.toString().match(/(^[-+]?[0-9]+$)|(\.0+)$/) then parseInt(value) else parseFloat(value)
encodeLocalized: (value) ->
object = {}
object[I18n.locale] = value.toString()
decodeLocalized: (value) ->
value[I18n.locale]
encodeNilClass: (value) ->
null
decodeNilClass: (value) ->
null
# to mongo
encodeId: (value) ->
return value unless value
if _.isArray(value)
result = []
for item, i in value
try
id = @_encodeId(item)
result[i] = id
catch error
id
return result
else
@_encodeId(value)
# @todo need to figure out a better way to do this.
_encodeId: (value) ->
return value if typeof value == 'number'
try
@constructor.database.bson_serializer.ObjectID(value)
catch error
value
# from mongo
decodeId: (value) ->
value.toString()
module.exports = Tower.Store.MongoDB.Serialization
| 154163 | # @module
Tower.Store.MongoDB.Serialization =
serializeModel: (attributes) ->
return attributes if attributes instanceof Tower.Model
klass = Tower.constant(@className)
attributes.id ||= attributes._id
delete attributes._id
model = new klass(attributes)
model
generateId: ->
new @constructor.database.bson_serializer.ObjectID()
# tags: [1, 2] == $set: tags: [1, 2]
# createdAt: Date == $set: createdAt: mongodate
serializeAttributesForUpdate: (attributes) ->
result = {}
schema = @schema()
for key, value of attributes
continue if key == "id" && value == undefined || value == null
operator = @constructor.atomicModifiers[key]
if operator
key = operator
result[key] ||= {}
for _key, _value of value
result[key][_key] = @encode schema[_key], _value, operator
else
result["$set"] ||= {}
result["$set"][key] = @encode schema[key], value
result
serializeAttributesForCreate: (record) ->
result = {}
schema = @schema()
attributes = @deserializeModel(record)
for key, value of attributes
continue if key == "id" && value == undefined || value == null
realKey = if key == "id" then "_id" else key
operator = @constructor.atomicModifiers[key]
unless operator
result[realKey] = @encode schema[key], value
result
deserializeAttributes: (attributes) ->
schema = @schema()
for key, value of attributes
field = schema[key]
attributes[key] = @decode field, value if field
attributes
# title: "santa"
# createdAt: "<": new Date()
serializeConditions: (criteria) ->
schema = @schema()
result = {}
query = @deserializeModel(criteria.conditions())
for key, value of query
field = schema[key]
key = <KEY>id" if key == "id"
if _.isRegExp(value)
result[key] = value
else if _.isBaseObject(value)
result[key] = {}
for _key, _value of value
operator = @constructor.queryOperators[_key]
if operator == "$eq"
result[key] = @encode field, _value, _key
else
_key = operator if operator
if _key == "$in"
_value = _.castArray(_value)
result[key][_key] = @encode field, _value, _key
else
result[key] = @encode field, value
result
# batchSize
# hint
# explain
serializeOptions: (criteria) ->
limit = criteria.get('limit')
sort = criteria.get('order')
offset = criteria.get('offset')
options = {}
options.limit = limit if limit
if sort.length
options.sort = _.map sort, (set) ->
[
if set[0] == "id" then "_id" else set[0],
if set[1] == 'asc' then 1 else -1
]
options.skip = offset if offset
options
encode: (field, value, operation) ->
return value unless field
method = @["encode#{field.encodingType}"]
value = method.call(@, value, operation) if method
value = [value] if operation == "$in" && !_.isArray(value)
value
decode: (field, value, operation) ->
return value unless field
method = @["decode#{field.type}"]
value = method.call(@, value) if method
value
encodeString: (value) ->
if value then value.toString() else value
encodeOrder: (value) ->
encodeDate: (value) ->
# if Tower.Support.config.useTimeZone
time = require('moment')
switch typeof(value)
when "string"
time.parse(value)
when Date
time.local(value.year, value.month, value.day, value.hour, value.min, value.sec)
when Array
time.local(value)
else
value
encodeGeo: (value) ->
# [lng, lat]
[value.lng, value.lat].reverse()
decodeGeo: (value) ->
return value unless value
lat: value[1], lng: value[0]
decodeDate: (value) ->
value
encodeBoolean: (value) ->
if @constructor.booleans.hasOwnProperty(value)
@constructor.booleans[value]
else
throw new Error("#{value.toString()} is not a boolean")
encodeArray: (value, operation) ->
unless operation || value == null || _.isArray(value)
throw new Error("Value is not Array")
value
encodeFloat: (value) ->
return null if _.isBlank(value)
try
parseFloat(value)
catch error
value
encodeInteger: (value) ->
return null if !value && value != 0
if value.toString().match(/(^[-+]?[0-9]+$)|(\.0+)$/) then parseInt(value) else parseFloat(value)
encodeLocalized: (value) ->
object = {}
object[I18n.locale] = value.toString()
decodeLocalized: (value) ->
value[I18n.locale]
encodeNilClass: (value) ->
null
decodeNilClass: (value) ->
null
# to mongo
encodeId: (value) ->
return value unless value
if _.isArray(value)
result = []
for item, i in value
try
id = @_encodeId(item)
result[i] = id
catch error
id
return result
else
@_encodeId(value)
# @todo need to figure out a better way to do this.
_encodeId: (value) ->
return value if typeof value == 'number'
try
@constructor.database.bson_serializer.ObjectID(value)
catch error
value
# from mongo
decodeId: (value) ->
value.toString()
module.exports = Tower.Store.MongoDB.Serialization
| true | # @module
Tower.Store.MongoDB.Serialization =
serializeModel: (attributes) ->
return attributes if attributes instanceof Tower.Model
klass = Tower.constant(@className)
attributes.id ||= attributes._id
delete attributes._id
model = new klass(attributes)
model
generateId: ->
new @constructor.database.bson_serializer.ObjectID()
# tags: [1, 2] == $set: tags: [1, 2]
# createdAt: Date == $set: createdAt: mongodate
serializeAttributesForUpdate: (attributes) ->
result = {}
schema = @schema()
for key, value of attributes
continue if key == "id" && value == undefined || value == null
operator = @constructor.atomicModifiers[key]
if operator
key = operator
result[key] ||= {}
for _key, _value of value
result[key][_key] = @encode schema[_key], _value, operator
else
result["$set"] ||= {}
result["$set"][key] = @encode schema[key], value
result
serializeAttributesForCreate: (record) ->
result = {}
schema = @schema()
attributes = @deserializeModel(record)
for key, value of attributes
continue if key == "id" && value == undefined || value == null
realKey = if key == "id" then "_id" else key
operator = @constructor.atomicModifiers[key]
unless operator
result[realKey] = @encode schema[key], value
result
deserializeAttributes: (attributes) ->
schema = @schema()
for key, value of attributes
field = schema[key]
attributes[key] = @decode field, value if field
attributes
# title: "santa"
# createdAt: "<": new Date()
serializeConditions: (criteria) ->
schema = @schema()
result = {}
query = @deserializeModel(criteria.conditions())
for key, value of query
field = schema[key]
key = PI:KEY:<KEY>END_PIid" if key == "id"
if _.isRegExp(value)
result[key] = value
else if _.isBaseObject(value)
result[key] = {}
for _key, _value of value
operator = @constructor.queryOperators[_key]
if operator == "$eq"
result[key] = @encode field, _value, _key
else
_key = operator if operator
if _key == "$in"
_value = _.castArray(_value)
result[key][_key] = @encode field, _value, _key
else
result[key] = @encode field, value
result
# batchSize
# hint
# explain
serializeOptions: (criteria) ->
limit = criteria.get('limit')
sort = criteria.get('order')
offset = criteria.get('offset')
options = {}
options.limit = limit if limit
if sort.length
options.sort = _.map sort, (set) ->
[
if set[0] == "id" then "_id" else set[0],
if set[1] == 'asc' then 1 else -1
]
options.skip = offset if offset
options
encode: (field, value, operation) ->
return value unless field
method = @["encode#{field.encodingType}"]
value = method.call(@, value, operation) if method
value = [value] if operation == "$in" && !_.isArray(value)
value
decode: (field, value, operation) ->
return value unless field
method = @["decode#{field.type}"]
value = method.call(@, value) if method
value
encodeString: (value) ->
if value then value.toString() else value
encodeOrder: (value) ->
encodeDate: (value) ->
# if Tower.Support.config.useTimeZone
time = require('moment')
switch typeof(value)
when "string"
time.parse(value)
when Date
time.local(value.year, value.month, value.day, value.hour, value.min, value.sec)
when Array
time.local(value)
else
value
encodeGeo: (value) ->
# [lng, lat]
[value.lng, value.lat].reverse()
decodeGeo: (value) ->
return value unless value
lat: value[1], lng: value[0]
decodeDate: (value) ->
value
encodeBoolean: (value) ->
if @constructor.booleans.hasOwnProperty(value)
@constructor.booleans[value]
else
throw new Error("#{value.toString()} is not a boolean")
encodeArray: (value, operation) ->
unless operation || value == null || _.isArray(value)
throw new Error("Value is not Array")
value
encodeFloat: (value) ->
return null if _.isBlank(value)
try
parseFloat(value)
catch error
value
encodeInteger: (value) ->
return null if !value && value != 0
if value.toString().match(/(^[-+]?[0-9]+$)|(\.0+)$/) then parseInt(value) else parseFloat(value)
encodeLocalized: (value) ->
object = {}
object[I18n.locale] = value.toString()
decodeLocalized: (value) ->
value[I18n.locale]
encodeNilClass: (value) ->
null
decodeNilClass: (value) ->
null
# to mongo
encodeId: (value) ->
return value unless value
if _.isArray(value)
result = []
for item, i in value
try
id = @_encodeId(item)
result[i] = id
catch error
id
return result
else
@_encodeId(value)
# @todo need to figure out a better way to do this.
_encodeId: (value) ->
return value if typeof value == 'number'
try
@constructor.database.bson_serializer.ObjectID(value)
catch error
value
# from mongo
decodeId: (value) ->
value.toString()
module.exports = Tower.Store.MongoDB.Serialization
|
[
{
"context": "=================================\n# Copyright 2014 Hatio, Lab.\n# Licensed under The MIT License\n# http",
"end": 63,
"score": 0.6974778771400452,
"start": 62,
"tag": "NAME",
"value": "H"
}
] | src/property.coffee | heartyoh/dou | 1 | # ==========================================
# Copyright 2014 Hatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./utils'
'./compose'
'./event'
], (utils, compose, event) ->
"use strict"
set = (key, val)->
return this if !key
if arguments.length > 1 && typeof(arguments[0]) is 'string'
attrs = {}
attrs[key] = val
return @set attrs
@attrs || (@attrs = {})
attrs = key
after = {}
before = {}
(before[key] = val) for own key, val of @attrs
utils.push @attrs, attrs
for own key, val of @attrs
if val isnt before[key]
after[key] = val
else
delete before[key]
if Object.keys(after).length isnt 0
@trigger 'change', this, before, after
return this
get = (attr) ->
if @attrs then @attrs[attr] else undefined
getAll = ->
if @attrs then utils.clone(@attrs) else {}
->
# plugin dependency : event.withEvent
compose.mixin this, event.withEvent
@set = set
@get = get
@getAll = getAll
| 168072 | # ==========================================
# Copyright 2014 <NAME>atio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./utils'
'./compose'
'./event'
], (utils, compose, event) ->
"use strict"
set = (key, val)->
return this if !key
if arguments.length > 1 && typeof(arguments[0]) is 'string'
attrs = {}
attrs[key] = val
return @set attrs
@attrs || (@attrs = {})
attrs = key
after = {}
before = {}
(before[key] = val) for own key, val of @attrs
utils.push @attrs, attrs
for own key, val of @attrs
if val isnt before[key]
after[key] = val
else
delete before[key]
if Object.keys(after).length isnt 0
@trigger 'change', this, before, after
return this
get = (attr) ->
if @attrs then @attrs[attr] else undefined
getAll = ->
if @attrs then utils.clone(@attrs) else {}
->
# plugin dependency : event.withEvent
compose.mixin this, event.withEvent
@set = set
@get = get
@getAll = getAll
| true | # ==========================================
# Copyright 2014 PI:NAME:<NAME>END_PIatio, Lab.
# Licensed under The MIT License
# http://opensource.org/licenses/MIT
# ==========================================
define [
'./utils'
'./compose'
'./event'
], (utils, compose, event) ->
"use strict"
set = (key, val)->
return this if !key
if arguments.length > 1 && typeof(arguments[0]) is 'string'
attrs = {}
attrs[key] = val
return @set attrs
@attrs || (@attrs = {})
attrs = key
after = {}
before = {}
(before[key] = val) for own key, val of @attrs
utils.push @attrs, attrs
for own key, val of @attrs
if val isnt before[key]
after[key] = val
else
delete before[key]
if Object.keys(after).length isnt 0
@trigger 'change', this, before, after
return this
get = (attr) ->
if @attrs then @attrs[attr] else undefined
getAll = ->
if @attrs then utils.clone(@attrs) else {}
->
# plugin dependency : event.withEvent
compose.mixin this, event.withEvent
@set = set
@get = get
@getAll = getAll
|
[
{
"context": "####################################\n\nVaultKey = \"hubot-deploy-github-secret\"\n\nclass ApiTokenVerifier\n constructor: (token) -",
"end": 282,
"score": 0.9978340268135071,
"start": 256,
"tag": "KEY",
"value": "hubot-deploy-github-secret"
},
{
"context": "constructor: () ->\n @subnets = [ new Address4(\"192.30.252.0/22\"), new Address4(\"140.82.112.0/20\") ]\n\n ipIsVa",
"end": 771,
"score": 0.9996808171272278,
"start": 759,
"tag": "IP_ADDRESS",
"value": "192.30.252.0"
},
{
"context": " [ new Address4(\"192.30.252.0/22\"), new Address4(\"140.82.112.0/20\") ]\n\n ipIsValid: (ipAddress) ->\n address =",
"end": 804,
"score": 0.9996947646141052,
"start": 792,
"tag": "IP_ADDRESS",
"value": "140.82.112.0"
}
] | src/models/verifiers.coffee | travis-ci/hubot-deploy | 2 | Path = require "path"
Octonode = require "octonode"
Address4 = require("ip-address").Address4
ApiConfig = require(Path.join(__dirname, "api_config")).ApiConfig
###########################################################################
VaultKey = "hubot-deploy-github-secret"
class ApiTokenVerifier
constructor: (token) ->
@token = token?.trim()
@config = new ApiConfig(@token, null)
@api = Octonode.client(@config.token, {hostname: @config.hostname})
valid: (cb) ->
@api.get "/user", (err, status, data, headers) ->
scopes = headers?['x-oauth-scopes']
if scopes?.indexOf('repo') >= 0
cb(true)
else
cb(false)
class GitHubWebHookIpVerifier
constructor: () ->
@subnets = [ new Address4("192.30.252.0/22"), new Address4("140.82.112.0/20") ]
ipIsValid: (ipAddress) ->
address = new Address4("#{ipAddress}/24")
for subnet in @subnets
return true if address.isInSubnet(subnet)
false
exports.VaultKey = VaultKey
exports.ApiTokenVerifier = ApiTokenVerifier
exports.GitHubWebHookIpVerifier = GitHubWebHookIpVerifier
| 177600 | Path = require "path"
Octonode = require "octonode"
Address4 = require("ip-address").Address4
ApiConfig = require(Path.join(__dirname, "api_config")).ApiConfig
###########################################################################
VaultKey = "<KEY>"
class ApiTokenVerifier
constructor: (token) ->
@token = token?.trim()
@config = new ApiConfig(@token, null)
@api = Octonode.client(@config.token, {hostname: @config.hostname})
valid: (cb) ->
@api.get "/user", (err, status, data, headers) ->
scopes = headers?['x-oauth-scopes']
if scopes?.indexOf('repo') >= 0
cb(true)
else
cb(false)
class GitHubWebHookIpVerifier
constructor: () ->
@subnets = [ new Address4("172.16.58.3/22"), new Address4("192.168.127.12/20") ]
ipIsValid: (ipAddress) ->
address = new Address4("#{ipAddress}/24")
for subnet in @subnets
return true if address.isInSubnet(subnet)
false
exports.VaultKey = VaultKey
exports.ApiTokenVerifier = ApiTokenVerifier
exports.GitHubWebHookIpVerifier = GitHubWebHookIpVerifier
| true | Path = require "path"
Octonode = require "octonode"
Address4 = require("ip-address").Address4
ApiConfig = require(Path.join(__dirname, "api_config")).ApiConfig
###########################################################################
VaultKey = "PI:KEY:<KEY>END_PI"
class ApiTokenVerifier
constructor: (token) ->
@token = token?.trim()
@config = new ApiConfig(@token, null)
@api = Octonode.client(@config.token, {hostname: @config.hostname})
valid: (cb) ->
@api.get "/user", (err, status, data, headers) ->
scopes = headers?['x-oauth-scopes']
if scopes?.indexOf('repo') >= 0
cb(true)
else
cb(false)
class GitHubWebHookIpVerifier
constructor: () ->
@subnets = [ new Address4("PI:IP_ADDRESS:172.16.58.3END_PI/22"), new Address4("PI:IP_ADDRESS:192.168.127.12END_PI/20") ]
ipIsValid: (ipAddress) ->
address = new Address4("#{ipAddress}/24")
for subnet in @subnets
return true if address.isInSubnet(subnet)
false
exports.VaultKey = VaultKey
exports.ApiTokenVerifier = ApiTokenVerifier
exports.GitHubWebHookIpVerifier = GitHubWebHookIpVerifier
|
[
{
"context": " $.get \"./session\", {username: username, password: password}, (data)=>\n console.log data\n if data =",
"end": 525,
"score": 0.9617757201194763,
"start": 517,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "(@ids[name]).fadeIn 'slow'\n\n @ids =\n username: \"#username_container\",\n password: \"#password_container\",\n regist",
"end": 851,
"score": 0.9996282458305359,
"start": 831,
"tag": "USERNAME",
"value": "\"#username_container"
},
{
"context": " username: \"#username_container\",\n password: \"#password_container\",\n registry: \"#registry_container\"\n",
"end": 888,
"score": 0.9991909265518188,
"start": 868,
"tag": "PASSWORD",
"value": "\"#password_container"
}
] | source/client/login/login.coffee | muit/duros | 0 | class Dollars.Login
@load: ->
self = this
$("#username").focus()
$("#username").keydown (e)=>
if e.keyCode == 13
self.next()
$("#login_next").click ()=>
self.next()
$("#username_container").submit ()=>
self.next()
@next: ->
@username = window.username.value
if @username != ""
@hide "username"
@show "password"
$("#password").focus()
@login: (username, password, success, error)->
$.get "./session", {username: username, password: password}, (data)=>
console.log data
if data == "logged in" || data == "logged again"
if success then success()
else
if error then error()
@hide: (name, callback)->
$(@ids[name]).hide 400,callback
@show: (name)->
$(@ids[name]).fadeIn 'slow'
@ids =
username: "#username_container",
password: "#password_container",
registry: "#registry_container"
| 13394 | class Dollars.Login
@load: ->
self = this
$("#username").focus()
$("#username").keydown (e)=>
if e.keyCode == 13
self.next()
$("#login_next").click ()=>
self.next()
$("#username_container").submit ()=>
self.next()
@next: ->
@username = window.username.value
if @username != ""
@hide "username"
@show "password"
$("#password").focus()
@login: (username, password, success, error)->
$.get "./session", {username: username, password: <PASSWORD>}, (data)=>
console.log data
if data == "logged in" || data == "logged again"
if success then success()
else
if error then error()
@hide: (name, callback)->
$(@ids[name]).hide 400,callback
@show: (name)->
$(@ids[name]).fadeIn 'slow'
@ids =
username: "#username_container",
password: <PASSWORD>",
registry: "#registry_container"
| true | class Dollars.Login
@load: ->
self = this
$("#username").focus()
$("#username").keydown (e)=>
if e.keyCode == 13
self.next()
$("#login_next").click ()=>
self.next()
$("#username_container").submit ()=>
self.next()
@next: ->
@username = window.username.value
if @username != ""
@hide "username"
@show "password"
$("#password").focus()
@login: (username, password, success, error)->
$.get "./session", {username: username, password: PI:PASSWORD:<PASSWORD>END_PI}, (data)=>
console.log data
if data == "logged in" || data == "logged again"
if success then success()
else
if error then error()
@hide: (name, callback)->
$(@ids[name]).hide 400,callback
@show: (name)->
$(@ids[name]).fadeIn 'slow'
@ids =
username: "#username_container",
password: PI:PASSWORD:<PASSWORD>END_PI",
registry: "#registry_container"
|
[
{
"context": "\"*** 開始 ***\"\n\nconfig = {\n\tuser: 'sa',\n\tpassword: 'scott_tiger',\n\tserver: 'host_mssql',\n\tdatabase: 'city',\n}\n\ndi",
"end": 1415,
"score": 0.9984744191169739,
"start": 1404,
"tag": "PASSWORD",
"value": "scott_tiger"
}
] | mssql/coffee/create/mssql_create.coffee | ekzemplaro/data_base_language | 3 | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mssql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mssql = require('mssql')
text_manipulate= require('/var/www/data_base/common/node_common/text_manipulate')
sql_manipulate= require('/var/www/data_base/common/node_common/sql_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1071','前橋',16473,'1954-7-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1072','高崎',45297,'1954-8-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1073','桐生',64871,'1954-11-19')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1074','沼田',86974,'1954-6-17')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1075','伊勢崎',31258,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1076','水上',65792,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1077','太田',37251,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1078','安中',59486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1079','みどり',21957,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
console.log "*** 開始 ***"
config = {
user: 'sa',
password: 'scott_tiger',
server: 'host_mssql',
database: 'city',
}
dict_aa = data_prepare_proc()
connection = new mssql.Connection(config, (err) ->
request = new mssql.Request(connection)
request.query('drop table cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
request.query(command, (err, rows) ->
for key of dict_aa
# console.log (key)
sql_str = "insert into cities "
sql_str += "(id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name
str_data += "'," + dict_aa[key].population
str_data += ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
request.query (sql_str)
connection.close()
console.log "*** 終了 ***"
)
)
# ---------------------------------------------------------------
| 186451 | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mssql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mssql = require('mssql')
text_manipulate= require('/var/www/data_base/common/node_common/text_manipulate')
sql_manipulate= require('/var/www/data_base/common/node_common/sql_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1071','前橋',16473,'1954-7-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1072','高崎',45297,'1954-8-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1073','桐生',64871,'1954-11-19')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1074','沼田',86974,'1954-6-17')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1075','伊勢崎',31258,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1076','水上',65792,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1077','太田',37251,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1078','安中',59486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1079','みどり',21957,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
console.log "*** 開始 ***"
config = {
user: 'sa',
password: '<PASSWORD>',
server: 'host_mssql',
database: 'city',
}
dict_aa = data_prepare_proc()
connection = new mssql.Connection(config, (err) ->
request = new mssql.Request(connection)
request.query('drop table cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
request.query(command, (err, rows) ->
for key of dict_aa
# console.log (key)
sql_str = "insert into cities "
sql_str += "(id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name
str_data += "'," + dict_aa[key].population
str_data += ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
request.query (sql_str)
connection.close()
console.log "*** 終了 ***"
)
)
# ---------------------------------------------------------------
| true | #! /usr/bin/coffee
# ---------------------------------------------------------------
# mssql_create.coffee
#
# Jul/03/2014
#
# ---------------------------------------------------------------
mssql = require('mssql')
text_manipulate= require('/var/www/data_base/common/node_common/text_manipulate')
sql_manipulate= require('/var/www/data_base/common/node_common/sql_manipulate')
# ---------------------------------------------------------------
data_prepare_proc = () ->
dict_aa = new Object
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1071','前橋',16473,'1954-7-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1072','高崎',45297,'1954-8-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1073','桐生',64871,'1954-11-19')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1074','沼田',86974,'1954-6-17')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1075','伊勢崎',31258,'1954-8-14')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1076','水上',65792,'1954-9-12')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1077','太田',37251,'1954-3-21')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1078','安中',59486,'1954-7-26')
dict_aa = text_manipulate.dict_append_proc(dict_aa,'t1079','みどり',21957,'1954-10-2')
return dict_aa
# ---------------------------------------------------------------
console.log "*** 開始 ***"
config = {
user: 'sa',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
server: 'host_mssql',
database: 'city',
}
dict_aa = data_prepare_proc()
connection = new mssql.Connection(config, (err) ->
request = new mssql.Request(connection)
request.query('drop table cities')
command = 'create table cities (id varchar(10), name varchar(20),'
command += ' population int, date_mod date)'
request.query(command, (err, rows) ->
for key of dict_aa
# console.log (key)
sql_str = "insert into cities "
sql_str += "(id,name,population,date_mod) values ("
str_data = "'" + key + "','" + dict_aa[key].name
str_data += "'," + dict_aa[key].population
str_data += ",'" + dict_aa[key].date_mod + "')"
sql_str += str_data
request.query (sql_str)
connection.close()
console.log "*** 終了 ***"
)
)
# ---------------------------------------------------------------
|
[
{
"context": "ctor: (@game, @game_state, @player) ->\n key = 'atlas'\n frame = 'ammo'\n x = if @player.is_player_",
"end": 97,
"score": 0.9783406257629395,
"start": 92,
"tag": "KEY",
"value": "atlas"
}
] | src/coffeescripts/game/sprites/Ammo.coffee | rongierlach/gunfight-game | 0 | class Ammo extends Phaser.Sprite
constructor: (@game, @game_state, @player) ->
key = 'atlas'
frame = 'ammo'
x = if @player.is_player_one then 94 else 816
y = @game.height - 45
super @game, x, y, key, frame
# clean up
@game.add.existing @
@hide()
# add crop rect
@cropRect = @game.add.graphics()
return @
show: -> @reset @x, @y
hide: -> @kill()
crop: ->
width = @width - @player.num_bullets / 6 * @width
@drawRect width
reload: -> @cropRect.clear()
drawRect: (width) ->
@cropRect.lineStyle 0
@cropRect.beginFill "0x000000"
@cropRect.drawRect @x, @y, width, @height
@cropRect.endFill()
module.exports = Ammo
| 14593 | class Ammo extends Phaser.Sprite
constructor: (@game, @game_state, @player) ->
key = '<KEY>'
frame = 'ammo'
x = if @player.is_player_one then 94 else 816
y = @game.height - 45
super @game, x, y, key, frame
# clean up
@game.add.existing @
@hide()
# add crop rect
@cropRect = @game.add.graphics()
return @
show: -> @reset @x, @y
hide: -> @kill()
crop: ->
width = @width - @player.num_bullets / 6 * @width
@drawRect width
reload: -> @cropRect.clear()
drawRect: (width) ->
@cropRect.lineStyle 0
@cropRect.beginFill "0x000000"
@cropRect.drawRect @x, @y, width, @height
@cropRect.endFill()
module.exports = Ammo
| true | class Ammo extends Phaser.Sprite
constructor: (@game, @game_state, @player) ->
key = 'PI:KEY:<KEY>END_PI'
frame = 'ammo'
x = if @player.is_player_one then 94 else 816
y = @game.height - 45
super @game, x, y, key, frame
# clean up
@game.add.existing @
@hide()
# add crop rect
@cropRect = @game.add.graphics()
return @
show: -> @reset @x, @y
hide: -> @kill()
crop: ->
width = @width - @player.num_bullets / 6 * @width
@drawRect width
reload: -> @cropRect.clear()
drawRect: (width) ->
@cropRect.lineStyle 0
@cropRect.beginFill "0x000000"
@cropRect.drawRect @x, @y, width, @height
@cropRect.endFill()
module.exports = Ammo
|
[
{
"context": " OK: 0\n KERBEROS_V5: 2\n CLEARTEXT_PASSWORD: 3\n CRYPT_PASSWORD: 4\n MD5_PASSWORD: 5\n SCM",
"end": 82,
"score": 0.9628182649612427,
"start": 81,
"tag": "PASSWORD",
"value": "3"
},
{
"context": ": 2\n CLEARTEXT_PASSWORD: 3\n CRYPT_PASSWORD: 4\n MD5_PASSWORD: 5\n SCM_CREDENTIAL: 6\n GSS",
"end": 104,
"score": 0.9616536498069763,
"start": 103,
"tag": "PASSWORD",
"value": "4"
},
{
"context": "SSWORD: 3\n CRYPT_PASSWORD: 4\n MD5_PASSWORD: 5\n SCM_CREDENTIAL: 6\n GSS: 7\n GSS_CONTINUE",
"end": 124,
"score": 0.9558742642402649,
"start": 123,
"tag": "PASSWORD",
"value": "5"
}
] | src/authentication.coffee | simplereach/node-vertica | 25 | Authentication =
methods:
OK: 0
KERBEROS_V5: 2
CLEARTEXT_PASSWORD: 3
CRYPT_PASSWORD: 4
MD5_PASSWORD: 5
SCM_CREDENTIAL: 6
GSS: 7
GSS_CONTINUE: 8
SSPI: 9
# Exports
module.exports = Authentication
| 213762 | Authentication =
methods:
OK: 0
KERBEROS_V5: 2
CLEARTEXT_PASSWORD: <PASSWORD>
CRYPT_PASSWORD: <PASSWORD>
MD5_PASSWORD: <PASSWORD>
SCM_CREDENTIAL: 6
GSS: 7
GSS_CONTINUE: 8
SSPI: 9
# Exports
module.exports = Authentication
| true | Authentication =
methods:
OK: 0
KERBEROS_V5: 2
CLEARTEXT_PASSWORD: PI:PASSWORD:<PASSWORD>END_PI
CRYPT_PASSWORD: PI:PASSWORD:<PASSWORD>END_PI
MD5_PASSWORD: PI:PASSWORD:<PASSWORD>END_PI
SCM_CREDENTIAL: 6
GSS: 7
GSS_CONTINUE: 8
SSPI: 9
# Exports
module.exports = Authentication
|
[
{
"context": "\n\n\t\t\tlayerA = new Layer frame:Screen.frame, name:\"Koen\", image:\"../static/test.png\"\n\t\t\tlayerB = new Laye",
"end": 1505,
"score": 0.9338879585266113,
"start": 1501,
"tag": "NAME",
"value": "Koen"
},
{
"context": "ponent.wrap(layerA)\n\n\t\t\tscroll.name.should.equal \"Koen\"\n\t\t\tscroll.image.should.equal \"../static/test.png",
"end": 1651,
"score": 0.9966124892234802,
"start": 1647,
"tag": "NAME",
"value": "Koen"
}
] | test/tests/ScrollComponentTest.coffee | HydAu/FramerJS | 0 |
describe "ScrollComponent", ->
it "should have the right size", ->
scroll = new ScrollComponent
size: 300
scroll.frame.should.eql {x:0, y:0, width:300, height:300}
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
it "should have the right content frame with align", ->
scroll = new ScrollComponent
size: 300
layer = new Layer
parent: scroll.content
size: 100
point: Align.center
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
layer.frame.should.eql {x:100, y:100, width:100, height:100}
it "should apply constructor options", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
it "should keep scrollHorizontal value on copy", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
copy = instance.copy()
copy.scrollHorizontal.should.be.false
describe "wrap", ->
it "should use the wrapped layer as content layer when there are children", ->
layerA = new Layer frame:Screen.frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.content.should.equal layerA
it "should use the wrapped layer as content if there are no children", ->
layerA = new Layer frame:Screen.frame
scroll = ScrollComponent.wrap(layerA)
scroll.content.children[0].should.equal layerA
it "should copy the name and image", ->
layerA = new Layer frame:Screen.frame, name:"Koen", image:"../static/test.png"
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.name.should.equal "Koen"
scroll.image.should.equal "../static/test.png"
scroll.content.should.equal layerA
scroll.content.name.should.equal "content"
scroll.content.image.should.equal ""
it "should correct the scroll frame", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should correct the scroll frame with children", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should work with null backgroundColor", ->
layerA = new Layer
layerB = new Layer superLayer:layerA
delete layerA._properties.backgroundColor
scroll = ScrollComponent.wrap(layerA)
it "should throw a warning on no layer", ->
f = -> ScrollComponent.wrap()
f.should.throw()
it "should set content clip to true", ->
scroll = new ScrollComponent()
scroll.content.clip.should.equal true
| 68448 |
describe "ScrollComponent", ->
it "should have the right size", ->
scroll = new ScrollComponent
size: 300
scroll.frame.should.eql {x:0, y:0, width:300, height:300}
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
it "should have the right content frame with align", ->
scroll = new ScrollComponent
size: 300
layer = new Layer
parent: scroll.content
size: 100
point: Align.center
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
layer.frame.should.eql {x:100, y:100, width:100, height:100}
it "should apply constructor options", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
it "should keep scrollHorizontal value on copy", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
copy = instance.copy()
copy.scrollHorizontal.should.be.false
describe "wrap", ->
it "should use the wrapped layer as content layer when there are children", ->
layerA = new Layer frame:Screen.frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.content.should.equal layerA
it "should use the wrapped layer as content if there are no children", ->
layerA = new Layer frame:Screen.frame
scroll = ScrollComponent.wrap(layerA)
scroll.content.children[0].should.equal layerA
it "should copy the name and image", ->
layerA = new Layer frame:Screen.frame, name:"<NAME>", image:"../static/test.png"
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.name.should.equal "<NAME>"
scroll.image.should.equal "../static/test.png"
scroll.content.should.equal layerA
scroll.content.name.should.equal "content"
scroll.content.image.should.equal ""
it "should correct the scroll frame", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should correct the scroll frame with children", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should work with null backgroundColor", ->
layerA = new Layer
layerB = new Layer superLayer:layerA
delete layerA._properties.backgroundColor
scroll = ScrollComponent.wrap(layerA)
it "should throw a warning on no layer", ->
f = -> ScrollComponent.wrap()
f.should.throw()
it "should set content clip to true", ->
scroll = new ScrollComponent()
scroll.content.clip.should.equal true
| true |
describe "ScrollComponent", ->
it "should have the right size", ->
scroll = new ScrollComponent
size: 300
scroll.frame.should.eql {x:0, y:0, width:300, height:300}
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
it "should have the right content frame with align", ->
scroll = new ScrollComponent
size: 300
layer = new Layer
parent: scroll.content
size: 100
point: Align.center
scroll.content.frame.should.eql {x:0, y:0, width:300, height:300}
layer.frame.should.eql {x:100, y:100, width:100, height:100}
it "should apply constructor options", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
it "should keep scrollHorizontal value on copy", ->
instance = new ScrollComponent(scrollHorizontal: false)
instance.scrollHorizontal.should.be.false
copy = instance.copy()
copy.scrollHorizontal.should.be.false
describe "wrap", ->
it "should use the wrapped layer as content layer when there are children", ->
layerA = new Layer frame:Screen.frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.content.should.equal layerA
it "should use the wrapped layer as content if there are no children", ->
layerA = new Layer frame:Screen.frame
scroll = ScrollComponent.wrap(layerA)
scroll.content.children[0].should.equal layerA
it "should copy the name and image", ->
layerA = new Layer frame:Screen.frame, name:"PI:NAME:<NAME>END_PI", image:"../static/test.png"
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.name.should.equal "PI:NAME:<NAME>END_PI"
scroll.image.should.equal "../static/test.png"
scroll.content.should.equal layerA
scroll.content.name.should.equal "content"
scroll.content.image.should.equal ""
it "should correct the scroll frame", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should correct the scroll frame with children", ->
frame = Screen.frame
frame.width += 100
frame.height += 100
layerA = new Layer frame:frame
layerB = new Layer superLayer:layerA
scroll = ScrollComponent.wrap(layerA)
scroll.width.should.equal Screen.width
scroll.height.should.equal Screen.height
it "should work with null backgroundColor", ->
layerA = new Layer
layerB = new Layer superLayer:layerA
delete layerA._properties.backgroundColor
scroll = ScrollComponent.wrap(layerA)
it "should throw a warning on no layer", ->
f = -> ScrollComponent.wrap()
f.should.throw()
it "should set content clip to true", ->
scroll = new ScrollComponent()
scroll.content.clip.should.equal true
|
[
{
"context": "###\n @author (at)taikiken / http://inazumatv.com\n Copyright (c) 2",
"end": 17,
"score": 0.7751848697662354,
"start": 15,
"tag": "USERNAME",
"value": "at"
},
{
"context": "###\n @author (at)taikiken / http://inazumatv.com\n Copyright (c) 2011-2015 ",
"end": 26,
"score": 0.9272382259368896,
"start": 18,
"tag": "NAME",
"value": "taikiken"
}
] | _deprecated/setting.coffee | taikiken/moku.js | 1 | ###
@author (at)taikiken / http://inazumatv.com
Copyright (c) 2011-2015 inazumatv.com
Licensed under the Apache License, Version 2.0 (the "License");
https://www.apache.org/licenses/LICENSE-2.0
###
# ------------------------------------------------------
# package
# ------------------------------------------------------
pkg = require './package.json'
# ------------------------------------------------------
# Node / Gulp module
# ------------------------------------------------------
# Include Gulp & tools we'll use
gulp = require 'gulp'
$ = do require 'gulp-load-plugins'
# module
$$ = {}
# module del
$$.del = require 'del'
# module run-sequence
$$.runSequence = require 'run-sequence'
# module webpack
$$.webpack = require 'webpack'
# module browser-sync
browserSync = require 'browser-sync'
$$.browserSync = browserSync
$$.reload = browserSync.reload
# ------------------------------------------------------
# directory
# ------------------------------------------------------
dir = {}
root = '.'
dir.root = root
# dev root
app = root + '/app'
assets = app + '/assets'
dir.app =
root: app
assets: assets
img: assets + '/img'
css: assets + '/css'
js: assets + '/js'
libs: assets + '/js/libs'
bundle: assets + '/js/bundle'
# scss module
dir.scss = root + '/scss'
# sprite
sprite = root + '/sprite'
dir.sprite =
root: sprite
css: dir.app.css
img: dir.app.img + '/sprite'
# tmp, dev compiled css directory
dir.tmp = root + '/.tmp'
# scripts, project js library
scripts = root + '/scripts'
dir.scripts =
src: scripts + '/src'
dependencies: scripts + '/dependencies'
# bower
bower = root + '/bower'
dir.bower =
components: bower + '/bower_components'
exports: bower + '/bower_exports'
# dist root
dist = root
dir.dist =
root: dist
libs: root + '/libs'
# babels
babels = root + '/babels'
dir.babels =
src: babels + '/src'
dependencies: babels + '/dependencies'
compile: babels + '/compile'
# ------------------------------------------------------
# webpack config
# ------------------------------------------------------
wpk =
entry: __dirname
output:
path: dir.app.bundle
publicPath: 'assets/js/bundle'
filename: 'moku.js'
chunkFilename: '[chunkhash].bundle.js'
# ------------------------------------------------------
# Sass prefix (Browser vendor prefix)
# ------------------------------------------------------
AUTO_PREFIX_BROWSERS = [
'ie >= 11'
'ie_mob >= 10'
'ff >= 44'
'chrome >= 48'
'safari >= 9'
'opera >= 34'
'ios >= 8.4'
'android >= 4.2'
'bb >= 10'
]
# ------------------------------------------------------
# patterns for replace
# ------------------------------------------------------
patterns = [
{
match: 'buildTime'
replacement: new Date().toLocaleString()
}
{
match: 'year'
replacement: new Date().getFullYear()
}
{
match: 'version'
replacement: pkg.version
}
{
match: 'copyright'
replacement: 'inazumatv.com'
}
]
# ------------------------------------------------------
# compression
# ------------------------------------------------------
compress = {}
# image
###
optimizationLevel: default 3
圧縮効率は下記設定の方が大きい
progressive: true
interlaced: true
###
compress.img =
optimizationLevel: 5
progressive: false
interlaced: false
###
# html / css minify するかしないか
# true: minifyする
#
# default =
# html: false
# css: true
###
compress.html = false
compress.css = true
# ------------------------------------------------------
# server (browserSync)
# ------------------------------------------------------
server = {}
###
_port.coffee を port.coffee へ rename します
port.coffee の port 値を環境に合わせ変更します
port.coffee を .gitignore に加えます
###
try
port = require './port'
catch error
port = { port : 61000 }
server.port = port.port
###
indexes
directory indexes を設定します
* 【注意】directory index が無効になってしまうので default false にしてます
###
server.indexes = true
# ------------------------------------------------------
# exports
# ------------------------------------------------------
module.exports =
gulp: gulp
$: $
wpk: wpk
$$: $$
dir: dir
server: server
AUTO_PREFIX_BROWSERS: AUTO_PREFIX_BROWSERS
patterns: patterns
compress: compress
| 77366 | ###
@author (at)<NAME> / http://inazumatv.com
Copyright (c) 2011-2015 inazumatv.com
Licensed under the Apache License, Version 2.0 (the "License");
https://www.apache.org/licenses/LICENSE-2.0
###
# ------------------------------------------------------
# package
# ------------------------------------------------------
pkg = require './package.json'
# ------------------------------------------------------
# Node / Gulp module
# ------------------------------------------------------
# Include Gulp & tools we'll use
gulp = require 'gulp'
$ = do require 'gulp-load-plugins'
# module
$$ = {}
# module del
$$.del = require 'del'
# module run-sequence
$$.runSequence = require 'run-sequence'
# module webpack
$$.webpack = require 'webpack'
# module browser-sync
browserSync = require 'browser-sync'
$$.browserSync = browserSync
$$.reload = browserSync.reload
# ------------------------------------------------------
# directory
# ------------------------------------------------------
dir = {}
root = '.'
dir.root = root
# dev root
app = root + '/app'
assets = app + '/assets'
dir.app =
root: app
assets: assets
img: assets + '/img'
css: assets + '/css'
js: assets + '/js'
libs: assets + '/js/libs'
bundle: assets + '/js/bundle'
# scss module
dir.scss = root + '/scss'
# sprite
sprite = root + '/sprite'
dir.sprite =
root: sprite
css: dir.app.css
img: dir.app.img + '/sprite'
# tmp, dev compiled css directory
dir.tmp = root + '/.tmp'
# scripts, project js library
scripts = root + '/scripts'
dir.scripts =
src: scripts + '/src'
dependencies: scripts + '/dependencies'
# bower
bower = root + '/bower'
dir.bower =
components: bower + '/bower_components'
exports: bower + '/bower_exports'
# dist root
dist = root
dir.dist =
root: dist
libs: root + '/libs'
# babels
babels = root + '/babels'
dir.babels =
src: babels + '/src'
dependencies: babels + '/dependencies'
compile: babels + '/compile'
# ------------------------------------------------------
# webpack config
# ------------------------------------------------------
wpk =
entry: __dirname
output:
path: dir.app.bundle
publicPath: 'assets/js/bundle'
filename: 'moku.js'
chunkFilename: '[chunkhash].bundle.js'
# ------------------------------------------------------
# Sass prefix (Browser vendor prefix)
# ------------------------------------------------------
AUTO_PREFIX_BROWSERS = [
'ie >= 11'
'ie_mob >= 10'
'ff >= 44'
'chrome >= 48'
'safari >= 9'
'opera >= 34'
'ios >= 8.4'
'android >= 4.2'
'bb >= 10'
]
# ------------------------------------------------------
# patterns for replace
# ------------------------------------------------------
patterns = [
{
match: 'buildTime'
replacement: new Date().toLocaleString()
}
{
match: 'year'
replacement: new Date().getFullYear()
}
{
match: 'version'
replacement: pkg.version
}
{
match: 'copyright'
replacement: 'inazumatv.com'
}
]
# ------------------------------------------------------
# compression
# ------------------------------------------------------
compress = {}
# image
###
optimizationLevel: default 3
圧縮効率は下記設定の方が大きい
progressive: true
interlaced: true
###
compress.img =
optimizationLevel: 5
progressive: false
interlaced: false
###
# html / css minify するかしないか
# true: minifyする
#
# default =
# html: false
# css: true
###
compress.html = false
compress.css = true
# ------------------------------------------------------
# server (browserSync)
# ------------------------------------------------------
server = {}
###
_port.coffee を port.coffee へ rename します
port.coffee の port 値を環境に合わせ変更します
port.coffee を .gitignore に加えます
###
try
port = require './port'
catch error
port = { port : 61000 }
server.port = port.port
###
indexes
directory indexes を設定します
* 【注意】directory index が無効になってしまうので default false にしてます
###
server.indexes = true
# ------------------------------------------------------
# exports
# ------------------------------------------------------
module.exports =
gulp: gulp
$: $
wpk: wpk
$$: $$
dir: dir
server: server
AUTO_PREFIX_BROWSERS: AUTO_PREFIX_BROWSERS
patterns: patterns
compress: compress
| true | ###
@author (at)PI:NAME:<NAME>END_PI / http://inazumatv.com
Copyright (c) 2011-2015 inazumatv.com
Licensed under the Apache License, Version 2.0 (the "License");
https://www.apache.org/licenses/LICENSE-2.0
###
# ------------------------------------------------------
# package
# ------------------------------------------------------
pkg = require './package.json'
# ------------------------------------------------------
# Node / Gulp module
# ------------------------------------------------------
# Include Gulp & tools we'll use
gulp = require 'gulp'
$ = do require 'gulp-load-plugins'
# module
$$ = {}
# module del
$$.del = require 'del'
# module run-sequence
$$.runSequence = require 'run-sequence'
# module webpack
$$.webpack = require 'webpack'
# module browser-sync
browserSync = require 'browser-sync'
$$.browserSync = browserSync
$$.reload = browserSync.reload
# ------------------------------------------------------
# directory
# ------------------------------------------------------
dir = {}
root = '.'
dir.root = root
# dev root
app = root + '/app'
assets = app + '/assets'
dir.app =
root: app
assets: assets
img: assets + '/img'
css: assets + '/css'
js: assets + '/js'
libs: assets + '/js/libs'
bundle: assets + '/js/bundle'
# scss module
dir.scss = root + '/scss'
# sprite
sprite = root + '/sprite'
dir.sprite =
root: sprite
css: dir.app.css
img: dir.app.img + '/sprite'
# tmp, dev compiled css directory
dir.tmp = root + '/.tmp'
# scripts, project js library
scripts = root + '/scripts'
dir.scripts =
src: scripts + '/src'
dependencies: scripts + '/dependencies'
# bower
bower = root + '/bower'
dir.bower =
components: bower + '/bower_components'
exports: bower + '/bower_exports'
# dist root
dist = root
dir.dist =
root: dist
libs: root + '/libs'
# babels
babels = root + '/babels'
dir.babels =
src: babels + '/src'
dependencies: babels + '/dependencies'
compile: babels + '/compile'
# ------------------------------------------------------
# webpack config
# ------------------------------------------------------
wpk =
entry: __dirname
output:
path: dir.app.bundle
publicPath: 'assets/js/bundle'
filename: 'moku.js'
chunkFilename: '[chunkhash].bundle.js'
# ------------------------------------------------------
# Sass prefix (Browser vendor prefix)
# ------------------------------------------------------
AUTO_PREFIX_BROWSERS = [
'ie >= 11'
'ie_mob >= 10'
'ff >= 44'
'chrome >= 48'
'safari >= 9'
'opera >= 34'
'ios >= 8.4'
'android >= 4.2'
'bb >= 10'
]
# ------------------------------------------------------
# patterns for replace
# ------------------------------------------------------
patterns = [
{
match: 'buildTime'
replacement: new Date().toLocaleString()
}
{
match: 'year'
replacement: new Date().getFullYear()
}
{
match: 'version'
replacement: pkg.version
}
{
match: 'copyright'
replacement: 'inazumatv.com'
}
]
# ------------------------------------------------------
# compression
# ------------------------------------------------------
compress = {}
# image
###
optimizationLevel: default 3
圧縮効率は下記設定の方が大きい
progressive: true
interlaced: true
###
compress.img =
optimizationLevel: 5
progressive: false
interlaced: false
###
# html / css minify するかしないか
# true: minifyする
#
# default =
# html: false
# css: true
###
compress.html = false
compress.css = true
# ------------------------------------------------------
# server (browserSync)
# ------------------------------------------------------
server = {}
###
_port.coffee を port.coffee へ rename します
port.coffee の port 値を環境に合わせ変更します
port.coffee を .gitignore に加えます
###
try
port = require './port'
catch error
port = { port : 61000 }
server.port = port.port
###
indexes
directory indexes を設定します
* 【注意】directory index が無効になってしまうので default false にしてます
###
server.indexes = true
# ------------------------------------------------------
# exports
# ------------------------------------------------------
module.exports =
gulp: gulp
$: $
wpk: wpk
$$: $$
dir: dir
server: server
AUTO_PREFIX_BROWSERS: AUTO_PREFIX_BROWSERS
patterns: patterns
compress: compress
|
[
{
"context": "rap (done) ->\n user = new User({passwordHash: '1234', anonymous: true})\n user = yield user.save()\n",
"end": 1269,
"score": 0.9993522763252258,
"start": 1265,
"tag": "PASSWORD",
"value": "1234"
},
{
"context": "er({emailSubscriptions: ['announcement'], email: 'tester@gmail.com'})\n User.updateServiceSettings(user)\n\n desc",
"end": 2516,
"score": 0.9999160766601562,
"start": 2500,
"tag": "EMAIL",
"value": "tester@gmail.com"
}
] | spec/server/unit/user.spec.coffee | kbespalyi/codecombat | 0 | GLOBAL._ = require 'lodash'
User = require '../../../server/models/User'
utils = require '../utils'
mongoose = require 'mongoose'
describe 'User', ->
it 'uses the schema defaults to fill in email preferences', (done) ->
user = new User()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('anyNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('recruitNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('archmageNews')).toBeFalsy()
done()
it 'uses old subs if they\'re around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
expect(user.isEmailSubscriptionEnabled('adventurerNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeFalsy()
done()
it 'maintains the old subs list if it\'s around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
user.setEmailSubscription('artisanNews', true)
expect(JSON.stringify(user.get('emailSubscriptions'))).toBe(JSON.stringify(['tester', 'level_creator']))
done()
it 'does not allow anonymous to be set to true if there is a login method', utils.wrap (done) ->
user = new User({passwordHash: '1234', anonymous: true})
user = yield user.save()
expect(user.get('anonymous')).toBe(false)
done()
it 'prevents duplicate oAuthIdentities', utils.wrap (done) ->
provider1 = new mongoose.Types.ObjectId()
provider2 = new mongoose.Types.ObjectId()
identity1 = { provider: provider1, id: 'abcd' }
identity2 = { provider: provider2, id: 'abcd' }
identity3 = { provider: provider1, id: '1234' }
# These three should live in harmony
users = []
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
users.push yield utils.initUser({ oAuthIdentities: [identity2] })
users.push yield utils.initUser({ oAuthIdentities: [identity3] })
e = null
try
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
catch e
expect(e).not.toBe(null)
done()
describe '.updateServiceSettings()', ->
makeMC = (callback) ->
it 'uses emails to determine what to send to MailChimp', (done) ->
spyOn(mc.lists, 'subscribe').and.callFake (params) ->
expect(JSON.stringify(params.merge_vars.groupings[0].groups)).toBe(JSON.stringify(['Announcements']))
done()
user = new User({emailSubscriptions: ['announcement'], email: 'tester@gmail.com'})
User.updateServiceSettings(user)
describe '.isAdmin()', ->
it 'returns true if user has "admin" permission', (done) ->
adminUser = new User()
adminUser.set('permissions', ['whatever', 'admin', 'user'])
expect(adminUser.isAdmin()).toBeTruthy()
done()
it 'returns false if user has no permissions', (done) ->
myUser = new User()
myUser.set('permissions', [])
expect(myUser.isAdmin()).toBeFalsy()
done()
it 'returns false if user has other permissions', (done) ->
classicUser = new User()
classicUser.set('permissions', ['user'])
expect(classicUser.isAdmin()).toBeFalsy()
done()
describe '.verificationCode(timestamp)', ->
it 'returns a timestamp and a hash', (done) ->
user = new User()
now = new Date()
code = user.verificationCode(now.getTime())
expect(code).toMatch(/[0-9]{13}:[0-9a-f]{64}/)
[timestamp, hash] = code.split(':')
expect(new Date(parseInt(timestamp))).toEqual(now)
done()
describe '.incrementStatAsync()', ->
it 'records nested stats', utils.wrap (done) ->
user = yield utils.initUser()
yield User.incrementStatAsync user.id, 'stats.testNumber'
yield User.incrementStatAsync user.id, 'stats.concepts.basic', {inc: 10}
user = yield User.findById(user.id)
expect(user.get('stats.testNumber')).toBe(1)
expect(user.get('stats.concepts.basic')).toBe(10)
done()
describe 'subscription virtual', ->
it 'has active and ends properties', ->
moment = require 'moment'
stripeEnd = moment().add(12, 'months').toISOString().substring(0,10)
user1 = new User({stripe: {free:stripeEnd}})
expectedEnd = "#{stripeEnd}T00:00:00.000Z"
expect(user1.get('subscription').active).toBe(true)
expect(user1.get('subscription').ends).toBe(expectedEnd)
expect(user1.toObject({virtuals: true}).subscription.ends).toBe(expectedEnd)
user2 = new User()
expect(user2.get('subscription').active).toBe(false)
user3 = new User({stripe: {free: true}})
expect(user3.get('subscription').active).toBe(true)
expect(user3.get('subscription').ends).toBeUndefined()
describe '.prepaidIncludesCourse(courseID)', ->
describe 'when the prepaid is a legacy full license', ->
it 'returns true', ->
user = new User({ coursePrepaidID: 'prepaid_1' })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a full license', ->
it 'returns true', ->
user = new User({ coursePrepaid: { _id: 'prepaid_1' } })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a starter license', ->
beforeEach ->
@user = new User({ coursePrepaid: { _id: 'prepaid_1', includedCourseIDs: ['course_1'] } })
describe 'that does include the course', ->
it 'returns true', ->
expect(@user.prepaidIncludesCourse('course_1')).toBe(true)
describe "that doesn't include the course", ->
it 'returns false', ->
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
describe 'when the user has no prepaid', ->
it 'returns false', ->
@user = new User({ coursePrepaid: undefined })
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
| 97800 | GLOBAL._ = require 'lodash'
User = require '../../../server/models/User'
utils = require '../utils'
mongoose = require 'mongoose'
describe 'User', ->
it 'uses the schema defaults to fill in email preferences', (done) ->
user = new User()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('anyNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('recruitNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('archmageNews')).toBeFalsy()
done()
it 'uses old subs if they\'re around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
expect(user.isEmailSubscriptionEnabled('adventurerNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeFalsy()
done()
it 'maintains the old subs list if it\'s around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
user.setEmailSubscription('artisanNews', true)
expect(JSON.stringify(user.get('emailSubscriptions'))).toBe(JSON.stringify(['tester', 'level_creator']))
done()
it 'does not allow anonymous to be set to true if there is a login method', utils.wrap (done) ->
user = new User({passwordHash: '<PASSWORD>', anonymous: true})
user = yield user.save()
expect(user.get('anonymous')).toBe(false)
done()
it 'prevents duplicate oAuthIdentities', utils.wrap (done) ->
provider1 = new mongoose.Types.ObjectId()
provider2 = new mongoose.Types.ObjectId()
identity1 = { provider: provider1, id: 'abcd' }
identity2 = { provider: provider2, id: 'abcd' }
identity3 = { provider: provider1, id: '1234' }
# These three should live in harmony
users = []
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
users.push yield utils.initUser({ oAuthIdentities: [identity2] })
users.push yield utils.initUser({ oAuthIdentities: [identity3] })
e = null
try
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
catch e
expect(e).not.toBe(null)
done()
describe '.updateServiceSettings()', ->
makeMC = (callback) ->
it 'uses emails to determine what to send to MailChimp', (done) ->
spyOn(mc.lists, 'subscribe').and.callFake (params) ->
expect(JSON.stringify(params.merge_vars.groupings[0].groups)).toBe(JSON.stringify(['Announcements']))
done()
user = new User({emailSubscriptions: ['announcement'], email: '<EMAIL>'})
User.updateServiceSettings(user)
describe '.isAdmin()', ->
it 'returns true if user has "admin" permission', (done) ->
adminUser = new User()
adminUser.set('permissions', ['whatever', 'admin', 'user'])
expect(adminUser.isAdmin()).toBeTruthy()
done()
it 'returns false if user has no permissions', (done) ->
myUser = new User()
myUser.set('permissions', [])
expect(myUser.isAdmin()).toBeFalsy()
done()
it 'returns false if user has other permissions', (done) ->
classicUser = new User()
classicUser.set('permissions', ['user'])
expect(classicUser.isAdmin()).toBeFalsy()
done()
describe '.verificationCode(timestamp)', ->
it 'returns a timestamp and a hash', (done) ->
user = new User()
now = new Date()
code = user.verificationCode(now.getTime())
expect(code).toMatch(/[0-9]{13}:[0-9a-f]{64}/)
[timestamp, hash] = code.split(':')
expect(new Date(parseInt(timestamp))).toEqual(now)
done()
describe '.incrementStatAsync()', ->
it 'records nested stats', utils.wrap (done) ->
user = yield utils.initUser()
yield User.incrementStatAsync user.id, 'stats.testNumber'
yield User.incrementStatAsync user.id, 'stats.concepts.basic', {inc: 10}
user = yield User.findById(user.id)
expect(user.get('stats.testNumber')).toBe(1)
expect(user.get('stats.concepts.basic')).toBe(10)
done()
describe 'subscription virtual', ->
it 'has active and ends properties', ->
moment = require 'moment'
stripeEnd = moment().add(12, 'months').toISOString().substring(0,10)
user1 = new User({stripe: {free:stripeEnd}})
expectedEnd = "#{stripeEnd}T00:00:00.000Z"
expect(user1.get('subscription').active).toBe(true)
expect(user1.get('subscription').ends).toBe(expectedEnd)
expect(user1.toObject({virtuals: true}).subscription.ends).toBe(expectedEnd)
user2 = new User()
expect(user2.get('subscription').active).toBe(false)
user3 = new User({stripe: {free: true}})
expect(user3.get('subscription').active).toBe(true)
expect(user3.get('subscription').ends).toBeUndefined()
describe '.prepaidIncludesCourse(courseID)', ->
describe 'when the prepaid is a legacy full license', ->
it 'returns true', ->
user = new User({ coursePrepaidID: 'prepaid_1' })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a full license', ->
it 'returns true', ->
user = new User({ coursePrepaid: { _id: 'prepaid_1' } })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a starter license', ->
beforeEach ->
@user = new User({ coursePrepaid: { _id: 'prepaid_1', includedCourseIDs: ['course_1'] } })
describe 'that does include the course', ->
it 'returns true', ->
expect(@user.prepaidIncludesCourse('course_1')).toBe(true)
describe "that doesn't include the course", ->
it 'returns false', ->
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
describe 'when the user has no prepaid', ->
it 'returns false', ->
@user = new User({ coursePrepaid: undefined })
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
| true | GLOBAL._ = require 'lodash'
User = require '../../../server/models/User'
utils = require '../utils'
mongoose = require 'mongoose'
describe 'User', ->
it 'uses the schema defaults to fill in email preferences', (done) ->
user = new User()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('anyNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('recruitNotes')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('archmageNews')).toBeFalsy()
done()
it 'uses old subs if they\'re around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
expect(user.isEmailSubscriptionEnabled('adventurerNews')).toBeTruthy()
expect(user.isEmailSubscriptionEnabled('generalNews')).toBeFalsy()
done()
it 'maintains the old subs list if it\'s around', (done) ->
user = new User()
user.set 'emailSubscriptions', ['tester']
user.setEmailSubscription('artisanNews', true)
expect(JSON.stringify(user.get('emailSubscriptions'))).toBe(JSON.stringify(['tester', 'level_creator']))
done()
it 'does not allow anonymous to be set to true if there is a login method', utils.wrap (done) ->
user = new User({passwordHash: 'PI:PASSWORD:<PASSWORD>END_PI', anonymous: true})
user = yield user.save()
expect(user.get('anonymous')).toBe(false)
done()
it 'prevents duplicate oAuthIdentities', utils.wrap (done) ->
provider1 = new mongoose.Types.ObjectId()
provider2 = new mongoose.Types.ObjectId()
identity1 = { provider: provider1, id: 'abcd' }
identity2 = { provider: provider2, id: 'abcd' }
identity3 = { provider: provider1, id: '1234' }
# These three should live in harmony
users = []
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
users.push yield utils.initUser({ oAuthIdentities: [identity2] })
users.push yield utils.initUser({ oAuthIdentities: [identity3] })
e = null
try
users.push yield utils.initUser({ oAuthIdentities: [identity1] })
catch e
expect(e).not.toBe(null)
done()
describe '.updateServiceSettings()', ->
makeMC = (callback) ->
it 'uses emails to determine what to send to MailChimp', (done) ->
spyOn(mc.lists, 'subscribe').and.callFake (params) ->
expect(JSON.stringify(params.merge_vars.groupings[0].groups)).toBe(JSON.stringify(['Announcements']))
done()
user = new User({emailSubscriptions: ['announcement'], email: 'PI:EMAIL:<EMAIL>END_PI'})
User.updateServiceSettings(user)
describe '.isAdmin()', ->
it 'returns true if user has "admin" permission', (done) ->
adminUser = new User()
adminUser.set('permissions', ['whatever', 'admin', 'user'])
expect(adminUser.isAdmin()).toBeTruthy()
done()
it 'returns false if user has no permissions', (done) ->
myUser = new User()
myUser.set('permissions', [])
expect(myUser.isAdmin()).toBeFalsy()
done()
it 'returns false if user has other permissions', (done) ->
classicUser = new User()
classicUser.set('permissions', ['user'])
expect(classicUser.isAdmin()).toBeFalsy()
done()
describe '.verificationCode(timestamp)', ->
it 'returns a timestamp and a hash', (done) ->
user = new User()
now = new Date()
code = user.verificationCode(now.getTime())
expect(code).toMatch(/[0-9]{13}:[0-9a-f]{64}/)
[timestamp, hash] = code.split(':')
expect(new Date(parseInt(timestamp))).toEqual(now)
done()
describe '.incrementStatAsync()', ->
it 'records nested stats', utils.wrap (done) ->
user = yield utils.initUser()
yield User.incrementStatAsync user.id, 'stats.testNumber'
yield User.incrementStatAsync user.id, 'stats.concepts.basic', {inc: 10}
user = yield User.findById(user.id)
expect(user.get('stats.testNumber')).toBe(1)
expect(user.get('stats.concepts.basic')).toBe(10)
done()
describe 'subscription virtual', ->
it 'has active and ends properties', ->
moment = require 'moment'
stripeEnd = moment().add(12, 'months').toISOString().substring(0,10)
user1 = new User({stripe: {free:stripeEnd}})
expectedEnd = "#{stripeEnd}T00:00:00.000Z"
expect(user1.get('subscription').active).toBe(true)
expect(user1.get('subscription').ends).toBe(expectedEnd)
expect(user1.toObject({virtuals: true}).subscription.ends).toBe(expectedEnd)
user2 = new User()
expect(user2.get('subscription').active).toBe(false)
user3 = new User({stripe: {free: true}})
expect(user3.get('subscription').active).toBe(true)
expect(user3.get('subscription').ends).toBeUndefined()
describe '.prepaidIncludesCourse(courseID)', ->
describe 'when the prepaid is a legacy full license', ->
it 'returns true', ->
user = new User({ coursePrepaidID: 'prepaid_1' })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a full license', ->
it 'returns true', ->
user = new User({ coursePrepaid: { _id: 'prepaid_1' } })
expect(user.prepaidIncludesCourse('course_1')).toBe(true)
describe 'when the prepaid is a starter license', ->
beforeEach ->
@user = new User({ coursePrepaid: { _id: 'prepaid_1', includedCourseIDs: ['course_1'] } })
describe 'that does include the course', ->
it 'returns true', ->
expect(@user.prepaidIncludesCourse('course_1')).toBe(true)
describe "that doesn't include the course", ->
it 'returns false', ->
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
describe 'when the user has no prepaid', ->
it 'returns false', ->
@user = new User({ coursePrepaid: undefined })
expect(@user.prepaidIncludesCourse('course_2')).toBe(false)
|
[
{
"context": "# Automatically creates project files\n#\n# Author: Anshul Kharbanda\n# Created: 10 - 20 - 2017\nTemplateSelector = requ",
"end": 89,
"score": 0.9998758435249329,
"start": 73,
"tag": "NAME",
"value": "Anshul Kharbanda"
}
] | lib/auto-create-files.coffee | UziTech/auto-create-files | 0 | # Auto Create Files
#
# Automatically creates project files
#
# Author: Anshul Kharbanda
# Created: 10 - 20 - 2017
TemplateSelector = require './template-selector'
{CompositeDisposable} = require 'atom'
# Export class file
module.exports = AutoCreateFiles =
# Configuration
config:
fullname:
type: 'string'
default: '[fullname]'
# Member variables
subscriptions: null
templateSelector: null
panel: null
# Activates the package
#
# @param state the current state of the package
activate: (state) ->
# Register commands list
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace',
'auto-create-files:gitignore': => @gitignore()
'auto-create-files:license': => @license()
# Returns empty serialization
serialize: -> {}
# Deactivates the package
deactivate: ->
@closeWindow()
@subscriptions.dispose()
# Show created template selector
showTemplateSelector: ->
console.log 'Show creator window.'
@panel = atom.workspace.addModalPanel
item: @templateSelector.selectorView.element
visible: true
@templateSelector.selectorView.focus()
# Creates a new .gitignore file
gitignore: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: '.gitignore'
apiUrl: '/gitignore/templates'
responseMapper: (item) -> item
getSource: (data) -> data.source
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Creates a new LICENSE file
license: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: 'LICENSE'
apiUrl: '/licenses'
responseMapper: (item) -> item.spdx_id
getSource: (data) -> data.body
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Closes the select list window
closeWindow: ->
console.log 'Closing modal panel...'
@panel.destroy()
@templateSelector.destroy()
| 152122 | # Auto Create Files
#
# Automatically creates project files
#
# Author: <NAME>
# Created: 10 - 20 - 2017
TemplateSelector = require './template-selector'
{CompositeDisposable} = require 'atom'
# Export class file
module.exports = AutoCreateFiles =
# Configuration
config:
fullname:
type: 'string'
default: '[fullname]'
# Member variables
subscriptions: null
templateSelector: null
panel: null
# Activates the package
#
# @param state the current state of the package
activate: (state) ->
# Register commands list
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace',
'auto-create-files:gitignore': => @gitignore()
'auto-create-files:license': => @license()
# Returns empty serialization
serialize: -> {}
# Deactivates the package
deactivate: ->
@closeWindow()
@subscriptions.dispose()
# Show created template selector
showTemplateSelector: ->
console.log 'Show creator window.'
@panel = atom.workspace.addModalPanel
item: @templateSelector.selectorView.element
visible: true
@templateSelector.selectorView.focus()
# Creates a new .gitignore file
gitignore: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: '.gitignore'
apiUrl: '/gitignore/templates'
responseMapper: (item) -> item
getSource: (data) -> data.source
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Creates a new LICENSE file
license: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: 'LICENSE'
apiUrl: '/licenses'
responseMapper: (item) -> item.spdx_id
getSource: (data) -> data.body
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Closes the select list window
closeWindow: ->
console.log 'Closing modal panel...'
@panel.destroy()
@templateSelector.destroy()
| true | # Auto Create Files
#
# Automatically creates project files
#
# Author: PI:NAME:<NAME>END_PI
# Created: 10 - 20 - 2017
TemplateSelector = require './template-selector'
{CompositeDisposable} = require 'atom'
# Export class file
module.exports = AutoCreateFiles =
# Configuration
config:
fullname:
type: 'string'
default: '[fullname]'
# Member variables
subscriptions: null
templateSelector: null
panel: null
# Activates the package
#
# @param state the current state of the package
activate: (state) ->
# Register commands list
@subscriptions = new CompositeDisposable
@subscriptions.add atom.commands.add 'atom-workspace',
'auto-create-files:gitignore': => @gitignore()
'auto-create-files:license': => @license()
# Returns empty serialization
serialize: -> {}
# Deactivates the package
deactivate: ->
@closeWindow()
@subscriptions.dispose()
# Show created template selector
showTemplateSelector: ->
console.log 'Show creator window.'
@panel = atom.workspace.addModalPanel
item: @templateSelector.selectorView.element
visible: true
@templateSelector.selectorView.focus()
# Creates a new .gitignore file
gitignore: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: '.gitignore'
apiUrl: '/gitignore/templates'
responseMapper: (item) -> item
getSource: (data) -> data.source
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Creates a new LICENSE file
license: ->
# Create template selector
@templateSelector = new TemplateSelector
filename: 'LICENSE'
apiUrl: '/licenses'
responseMapper: (item) -> item.spdx_id
getSource: (data) -> data.body
closePanel: => @closeWindow()
# Show template selector
@showTemplateSelector()
# Closes the select list window
closeWindow: ->
console.log 'Closing modal panel...'
@panel.destroy()
@templateSelector.destroy()
|
[
{
"context": " change the url', ->\n historyManager.setToken 'hello-world'\n\n {href} = document.location\n expect(href.",
"end": 1140,
"score": 0.9738472104072571,
"start": 1129,
"tag": "PASSWORD",
"value": "hello-world"
},
{
"context": " = getGoogHistory()\n hashedHistory.setToken 'hello-world'\n {href} = document.location\n\n expect(h",
"end": 1483,
"score": 0.979506254196167,
"start": 1472,
"tag": "PASSWORD",
"value": "hello-world"
}
] | src/tests/core/test_historyManager.coffee | dashersw/spark | 1 | goog = goog or goog = require: ->
goog.history or= {}
goog.history.Html5History or= isSupported: ->
goog.require 'spark.core.HistoryManager'
describe 'spark.core.HistoryManager', ->
historyManager = null
useHtml5History = -> goog.history.Html5History.isSupported = -> return yes
useGoogHistory = -> goog.history.Html5History.isSupported = -> return no
getHtml5History = ->
useHtml5History()
return new spark.core.HistoryManager useHtml5History: yes
getGoogHistory = ->
useGoogHistory()
h = new spark.core.HistoryManager
useHtml5History()
return h
beforeEach ->
historyManager = getHtml5History()
it 'should extend spark.core.Object', ->
expect(historyManager instanceof spark.core.Object).toBeTruthy()
it 'should have correct history instance for hashbang or pushstate', ->
if goog.DEBUG
expect(historyManager.history instanceof goog.history.Html5History).toBeTruthy()
googHistoryManager = getGoogHistory()
expect(googHistoryManager.history instanceof goog.History).toBeTruthy()
it 'should change the url', ->
historyManager.setToken 'hello-world'
{href} = document.location
expect(href.indexOf('#')).toBe -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
historyManager.setToken '/'
{href} = document.location
expect(href.indexOf('hello-world')).toBe -1
if goog.DEBUG
hashedHistory = getGoogHistory()
hashedHistory.setToken 'hello-world'
{href} = document.location
expect(href.indexOf('#')).toBeGreaterThan -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
it 'should emit Navigated event with token as event.data', ->
flag = null
historyManager.on 'Navigated', (e) -> flag = e.data
historyManager.setToken '/hello'
expect(flag).toBe '/hello'
it 'should have path prefix if a pathPrefix option passed', ->
hm = new spark.core.HistoryManager
pathPrefix: 'prefix'
useHtml5History: yes
hm.setToken '/world'
expect(document.location.href.indexOf('/prefix/world')).toBeGreaterThan -1
| 128643 | goog = goog or goog = require: ->
goog.history or= {}
goog.history.Html5History or= isSupported: ->
goog.require 'spark.core.HistoryManager'
describe 'spark.core.HistoryManager', ->
historyManager = null
useHtml5History = -> goog.history.Html5History.isSupported = -> return yes
useGoogHistory = -> goog.history.Html5History.isSupported = -> return no
getHtml5History = ->
useHtml5History()
return new spark.core.HistoryManager useHtml5History: yes
getGoogHistory = ->
useGoogHistory()
h = new spark.core.HistoryManager
useHtml5History()
return h
beforeEach ->
historyManager = getHtml5History()
it 'should extend spark.core.Object', ->
expect(historyManager instanceof spark.core.Object).toBeTruthy()
it 'should have correct history instance for hashbang or pushstate', ->
if goog.DEBUG
expect(historyManager.history instanceof goog.history.Html5History).toBeTruthy()
googHistoryManager = getGoogHistory()
expect(googHistoryManager.history instanceof goog.History).toBeTruthy()
it 'should change the url', ->
historyManager.setToken '<PASSWORD>'
{href} = document.location
expect(href.indexOf('#')).toBe -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
historyManager.setToken '/'
{href} = document.location
expect(href.indexOf('hello-world')).toBe -1
if goog.DEBUG
hashedHistory = getGoogHistory()
hashedHistory.setToken '<PASSWORD>'
{href} = document.location
expect(href.indexOf('#')).toBeGreaterThan -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
it 'should emit Navigated event with token as event.data', ->
flag = null
historyManager.on 'Navigated', (e) -> flag = e.data
historyManager.setToken '/hello'
expect(flag).toBe '/hello'
it 'should have path prefix if a pathPrefix option passed', ->
hm = new spark.core.HistoryManager
pathPrefix: 'prefix'
useHtml5History: yes
hm.setToken '/world'
expect(document.location.href.indexOf('/prefix/world')).toBeGreaterThan -1
| true | goog = goog or goog = require: ->
goog.history or= {}
goog.history.Html5History or= isSupported: ->
goog.require 'spark.core.HistoryManager'
describe 'spark.core.HistoryManager', ->
historyManager = null
useHtml5History = -> goog.history.Html5History.isSupported = -> return yes
useGoogHistory = -> goog.history.Html5History.isSupported = -> return no
getHtml5History = ->
useHtml5History()
return new spark.core.HistoryManager useHtml5History: yes
getGoogHistory = ->
useGoogHistory()
h = new spark.core.HistoryManager
useHtml5History()
return h
beforeEach ->
historyManager = getHtml5History()
it 'should extend spark.core.Object', ->
expect(historyManager instanceof spark.core.Object).toBeTruthy()
it 'should have correct history instance for hashbang or pushstate', ->
if goog.DEBUG
expect(historyManager.history instanceof goog.history.Html5History).toBeTruthy()
googHistoryManager = getGoogHistory()
expect(googHistoryManager.history instanceof goog.History).toBeTruthy()
it 'should change the url', ->
historyManager.setToken 'PI:PASSWORD:<PASSWORD>END_PI'
{href} = document.location
expect(href.indexOf('#')).toBe -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
historyManager.setToken '/'
{href} = document.location
expect(href.indexOf('hello-world')).toBe -1
if goog.DEBUG
hashedHistory = getGoogHistory()
hashedHistory.setToken 'PI:PASSWORD:<PASSWORD>END_PI'
{href} = document.location
expect(href.indexOf('#')).toBeGreaterThan -1
expect(href.indexOf('hello-world')).toBeGreaterThan -1
it 'should emit Navigated event with token as event.data', ->
flag = null
historyManager.on 'Navigated', (e) -> flag = e.data
historyManager.setToken '/hello'
expect(flag).toBe '/hello'
it 'should have path prefix if a pathPrefix option passed', ->
hm = new spark.core.HistoryManager
pathPrefix: 'prefix'
useHtml5History: yes
hm.setToken '/world'
expect(document.location.href.indexOf('/prefix/world')).toBeGreaterThan -1
|
[
{
"context": "r GUI tool to run Grunt tasks\n Copyright (c) 2013 Mark Parolisi, contributors\n Licensed under the MIT license.\n#",
"end": 111,
"score": 0.9998319745063782,
"start": 98,
"tag": "NAME",
"value": "Mark Parolisi"
}
] | Gruntfile.coffee | gruntjs-updater/grunt-peon-gui | 0 | ###
grunt-peon-gui
Creates a local webserver GUI tool to run Grunt tasks
Copyright (c) 2013 Mark Parolisi, contributors
Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
compass:
app:
options:
sassDir: "app/sass"
cssDir: "app/assets/css"
coffee:
app:
files:
"app/assets/js/lib/peon-gui.js": "app/coffee/peon-gui.coffee"
jshint:
app: [
"Gruntfile.js"
"app/js/*.js"
"<%= nodeunit.tests %>"
]
coffeelint:
app: ["lib/*.coffee", "tasks/*.coffee"]
nodeunit:
tests: ["tests/*_test.js"]
uglify:
app:
files:
'app/assets/js/lib.min.js': [
'app/assets/js/lib/vendor/jquery.js'
'app/assets/js/lib/vendor/lodash.js'
'app/assets/js/lib/vendor/bootstrap.js'
'app/assets/js/lib/vendor/pretty-json.js'
'app/assets/js/lib/templates.js'
'app/assets/js/lib/peon-gui.js'
'app/assets/js/lib/ui.js'
]
watch:
app:
files: ["**/*.coffee", "**/*.scss"]
tasks: ["default"]
options:
nospawn: true
jst:
compile:
options:
namespace: "guiTmpls"
processName: (filename) ->
return filename.split('.')[0].split('/').pop().split('.').shift().replace(/-([a-z])/g, (g)->
return g[1].toUpperCase()
)
files:
"app/assets/js/lib/templates.js": ["app/assets/tmpl/**/*.ejs"]
)
grunt.loadTasks("tasks")
grunt.loadNpmTasks("grunt-contrib-jshint")
grunt.loadNpmTasks("grunt-contrib-compass")
grunt.loadNpmTasks("grunt-contrib-coffee")
grunt.loadNpmTasks("grunt-contrib-watch")
grunt.loadNpmTasks("grunt-coffeelint")
grunt.loadNpmTasks("grunt-contrib-nodeunit")
grunt.loadNpmTasks("grunt-contrib-jst")
grunt.loadNpmTasks("grunt-contrib-uglify")
grunt.registerTask("default", ["coffeelint", "coffee", "compass", "jshint", "nodeunit", "jst", "uglify"]) | 44396 | ###
grunt-peon-gui
Creates a local webserver GUI tool to run Grunt tasks
Copyright (c) 2013 <NAME>, contributors
Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
compass:
app:
options:
sassDir: "app/sass"
cssDir: "app/assets/css"
coffee:
app:
files:
"app/assets/js/lib/peon-gui.js": "app/coffee/peon-gui.coffee"
jshint:
app: [
"Gruntfile.js"
"app/js/*.js"
"<%= nodeunit.tests %>"
]
coffeelint:
app: ["lib/*.coffee", "tasks/*.coffee"]
nodeunit:
tests: ["tests/*_test.js"]
uglify:
app:
files:
'app/assets/js/lib.min.js': [
'app/assets/js/lib/vendor/jquery.js'
'app/assets/js/lib/vendor/lodash.js'
'app/assets/js/lib/vendor/bootstrap.js'
'app/assets/js/lib/vendor/pretty-json.js'
'app/assets/js/lib/templates.js'
'app/assets/js/lib/peon-gui.js'
'app/assets/js/lib/ui.js'
]
watch:
app:
files: ["**/*.coffee", "**/*.scss"]
tasks: ["default"]
options:
nospawn: true
jst:
compile:
options:
namespace: "guiTmpls"
processName: (filename) ->
return filename.split('.')[0].split('/').pop().split('.').shift().replace(/-([a-z])/g, (g)->
return g[1].toUpperCase()
)
files:
"app/assets/js/lib/templates.js": ["app/assets/tmpl/**/*.ejs"]
)
grunt.loadTasks("tasks")
grunt.loadNpmTasks("grunt-contrib-jshint")
grunt.loadNpmTasks("grunt-contrib-compass")
grunt.loadNpmTasks("grunt-contrib-coffee")
grunt.loadNpmTasks("grunt-contrib-watch")
grunt.loadNpmTasks("grunt-coffeelint")
grunt.loadNpmTasks("grunt-contrib-nodeunit")
grunt.loadNpmTasks("grunt-contrib-jst")
grunt.loadNpmTasks("grunt-contrib-uglify")
grunt.registerTask("default", ["coffeelint", "coffee", "compass", "jshint", "nodeunit", "jst", "uglify"]) | true | ###
grunt-peon-gui
Creates a local webserver GUI tool to run Grunt tasks
Copyright (c) 2013 PI:NAME:<NAME>END_PI, contributors
Licensed under the MIT license.
###
module.exports = (grunt) ->
grunt.initConfig(
compass:
app:
options:
sassDir: "app/sass"
cssDir: "app/assets/css"
coffee:
app:
files:
"app/assets/js/lib/peon-gui.js": "app/coffee/peon-gui.coffee"
jshint:
app: [
"Gruntfile.js"
"app/js/*.js"
"<%= nodeunit.tests %>"
]
coffeelint:
app: ["lib/*.coffee", "tasks/*.coffee"]
nodeunit:
tests: ["tests/*_test.js"]
uglify:
app:
files:
'app/assets/js/lib.min.js': [
'app/assets/js/lib/vendor/jquery.js'
'app/assets/js/lib/vendor/lodash.js'
'app/assets/js/lib/vendor/bootstrap.js'
'app/assets/js/lib/vendor/pretty-json.js'
'app/assets/js/lib/templates.js'
'app/assets/js/lib/peon-gui.js'
'app/assets/js/lib/ui.js'
]
watch:
app:
files: ["**/*.coffee", "**/*.scss"]
tasks: ["default"]
options:
nospawn: true
jst:
compile:
options:
namespace: "guiTmpls"
processName: (filename) ->
return filename.split('.')[0].split('/').pop().split('.').shift().replace(/-([a-z])/g, (g)->
return g[1].toUpperCase()
)
files:
"app/assets/js/lib/templates.js": ["app/assets/tmpl/**/*.ejs"]
)
grunt.loadTasks("tasks")
grunt.loadNpmTasks("grunt-contrib-jshint")
grunt.loadNpmTasks("grunt-contrib-compass")
grunt.loadNpmTasks("grunt-contrib-coffee")
grunt.loadNpmTasks("grunt-contrib-watch")
grunt.loadNpmTasks("grunt-coffeelint")
grunt.loadNpmTasks("grunt-contrib-nodeunit")
grunt.loadNpmTasks("grunt-contrib-jst")
grunt.loadNpmTasks("grunt-contrib-uglify")
grunt.registerTask("default", ["coffeelint", "coffee", "compass", "jshint", "nodeunit", "jst", "uglify"]) |
[
{
"context": "18n.t 'UserModel::username'\n password: -> i18n.t 'UserModel::password'\n rePassword: -> i18n",
"end": 3026,
"score": 0.5121631026268005,
"start": 3025,
"tag": "PASSWORD",
"value": "8"
},
{
"context": "'UserModel::username'\n password: -> i18n.t 'UserModel::password'\n rePassword: -> i18n.t 'UserModel::rePasswor",
"end": 3050,
"score": 0.7351813912391663,
"start": 3031,
"tag": "PASSWORD",
"value": "UserModel::password"
},
{
"context": "i18n.t 'UserModel::rePassword'\n oldPassword: -> i18n.t 'UserModel::oldPassword'\n email: ->",
"end": 3124,
"score": 0.6960666179656982,
"start": 3123,
"tag": "PASSWORD",
"value": "i"
},
{
"context": "serModel::rePassword'\n oldPassword: -> i18n.t 'UserModel::oldPassword'\n email: -> i18n.t 'UserModel::email'\n ",
"end": 3153,
"score": 0.8688703775405884,
"start": 3131,
"tag": "PASSWORD",
"value": "UserModel::oldPassword"
}
] | src/app/modules/user/entities/User.coffee | josepramon/tfm-adminApp | 0 | # Dependencies
# -------------------------
# Libs/generic stuff:
$ = require 'jquery'
_ = require 'underscore'
Backbone = require 'backbone'
i18n = require 'i18next-client'
# Base class (extends Backbone.Model)
Model = require 'msq-appbase/lib/appBaseComponents/entities/Model'
# Related models
ProfileModel = require './Profile'
# Utility to compare privileges
checkPrivileges = require './util/PrivilegesChecker'
###
User model
==============
@class
@augments Model
###
module.exports = class User extends Model
###
@property {String} API url
###
urlRoot: ->
base = '/api/auth/'
role = @get 'role'
if role
base + role.toLowerCase() + 's'
else
base + 'users'
###
@property {Object} Model default attributes
###
defaults:
id: null
username: ''
email: ''
role: ''
privileges: null
profile: null
###
@property {Array} Nested entities
###
relations: [
type: Backbone.One
key: 'profile'
relatedModel: ProfileModel
]
###
Timestamp fields
js represents timestamps in miliseconds but the API represents that in seconds
this fields will be automatically converted when fetching/saving
@property {String[]} the attributes to transform
###
timestampFields: ['created_at', 'updated_at']
###
Relations to expand where querying the server
This is also used by the ArticleCollection, but can
be overrided there by setting a expandedRelations on
the collection
@property {Array} the attributes to expand. It accepts just the attribute names
or objects with the options, for example:
{
attribute: 'foo',
page: 4,
limit: 200,
order: {id: 1, name: -1}
}
@static
###
@expandedRelations: ['profile', 'profile.image']
###
@property {Object} Model validation rules
###
validation:
username:
required: true
email:
required: true
pattern: 'email'
password:
required: (value, attr, computedState) ->
!!computedState.rePassword || !!computedState.oldPassword
oldPassword:
required: (value, attr, computedState) ->
!!computedState.password
rePassword:
equalTo: 'password'
required: (value, attr, computedState) ->
!!computedState.password
###
Privileges verification
###
hasAccess: (requiredPermissions) ->
return checkPrivileges(requiredPermissions, @get 'privileges')
###
@property {Object} Custom attribute labels
Used by the validator when building the error messages
@static
####
@labels:
username: -> i18n.t 'UserModel::username'
password: -> i18n.t 'UserModel::password'
rePassword: -> i18n.t 'UserModel::rePassword'
oldPassword: -> i18n.t 'UserModel::oldPassword'
email: -> i18n.t 'UserModel::email'
role: -> i18n.t 'UserModel::role'
privileges: -> i18n.t 'UserModel::privileges'
profile: -> i18n.t 'UserModel::profile'
| 177589 | # Dependencies
# -------------------------
# Libs/generic stuff:
$ = require 'jquery'
_ = require 'underscore'
Backbone = require 'backbone'
i18n = require 'i18next-client'
# Base class (extends Backbone.Model)
Model = require 'msq-appbase/lib/appBaseComponents/entities/Model'
# Related models
ProfileModel = require './Profile'
# Utility to compare privileges
checkPrivileges = require './util/PrivilegesChecker'
###
User model
==============
@class
@augments Model
###
module.exports = class User extends Model
###
@property {String} API url
###
urlRoot: ->
base = '/api/auth/'
role = @get 'role'
if role
base + role.toLowerCase() + 's'
else
base + 'users'
###
@property {Object} Model default attributes
###
defaults:
id: null
username: ''
email: ''
role: ''
privileges: null
profile: null
###
@property {Array} Nested entities
###
relations: [
type: Backbone.One
key: 'profile'
relatedModel: ProfileModel
]
###
Timestamp fields
js represents timestamps in miliseconds but the API represents that in seconds
this fields will be automatically converted when fetching/saving
@property {String[]} the attributes to transform
###
timestampFields: ['created_at', 'updated_at']
###
Relations to expand where querying the server
This is also used by the ArticleCollection, but can
be overrided there by setting a expandedRelations on
the collection
@property {Array} the attributes to expand. It accepts just the attribute names
or objects with the options, for example:
{
attribute: 'foo',
page: 4,
limit: 200,
order: {id: 1, name: -1}
}
@static
###
@expandedRelations: ['profile', 'profile.image']
###
@property {Object} Model validation rules
###
validation:
username:
required: true
email:
required: true
pattern: 'email'
password:
required: (value, attr, computedState) ->
!!computedState.rePassword || !!computedState.oldPassword
oldPassword:
required: (value, attr, computedState) ->
!!computedState.password
rePassword:
equalTo: 'password'
required: (value, attr, computedState) ->
!!computedState.password
###
Privileges verification
###
hasAccess: (requiredPermissions) ->
return checkPrivileges(requiredPermissions, @get 'privileges')
###
@property {Object} Custom attribute labels
Used by the validator when building the error messages
@static
####
@labels:
username: -> i18n.t 'UserModel::username'
password: -> i1<PASSWORD>n.t '<PASSWORD>'
rePassword: -> i18n.t 'UserModel::rePassword'
oldPassword: -> <PASSWORD>18n.t '<PASSWORD>'
email: -> i18n.t 'UserModel::email'
role: -> i18n.t 'UserModel::role'
privileges: -> i18n.t 'UserModel::privileges'
profile: -> i18n.t 'UserModel::profile'
| true | # Dependencies
# -------------------------
# Libs/generic stuff:
$ = require 'jquery'
_ = require 'underscore'
Backbone = require 'backbone'
i18n = require 'i18next-client'
# Base class (extends Backbone.Model)
Model = require 'msq-appbase/lib/appBaseComponents/entities/Model'
# Related models
ProfileModel = require './Profile'
# Utility to compare privileges
checkPrivileges = require './util/PrivilegesChecker'
###
User model
==============
@class
@augments Model
###
module.exports = class User extends Model
###
@property {String} API url
###
urlRoot: ->
base = '/api/auth/'
role = @get 'role'
if role
base + role.toLowerCase() + 's'
else
base + 'users'
###
@property {Object} Model default attributes
###
defaults:
id: null
username: ''
email: ''
role: ''
privileges: null
profile: null
###
@property {Array} Nested entities
###
relations: [
type: Backbone.One
key: 'profile'
relatedModel: ProfileModel
]
###
Timestamp fields
js represents timestamps in miliseconds but the API represents that in seconds
this fields will be automatically converted when fetching/saving
@property {String[]} the attributes to transform
###
timestampFields: ['created_at', 'updated_at']
###
Relations to expand where querying the server
This is also used by the ArticleCollection, but can
be overrided there by setting a expandedRelations on
the collection
@property {Array} the attributes to expand. It accepts just the attribute names
or objects with the options, for example:
{
attribute: 'foo',
page: 4,
limit: 200,
order: {id: 1, name: -1}
}
@static
###
@expandedRelations: ['profile', 'profile.image']
###
@property {Object} Model validation rules
###
validation:
username:
required: true
email:
required: true
pattern: 'email'
password:
required: (value, attr, computedState) ->
!!computedState.rePassword || !!computedState.oldPassword
oldPassword:
required: (value, attr, computedState) ->
!!computedState.password
rePassword:
equalTo: 'password'
required: (value, attr, computedState) ->
!!computedState.password
###
Privileges verification
###
hasAccess: (requiredPermissions) ->
return checkPrivileges(requiredPermissions, @get 'privileges')
###
@property {Object} Custom attribute labels
Used by the validator when building the error messages
@static
####
@labels:
username: -> i18n.t 'UserModel::username'
password: -> i1PI:PASSWORD:<PASSWORD>END_PIn.t 'PI:PASSWORD:<PASSWORD>END_PI'
rePassword: -> i18n.t 'UserModel::rePassword'
oldPassword: -> PI:PASSWORD:<PASSWORD>END_PI18n.t 'PI:PASSWORD:<PASSWORD>END_PI'
email: -> i18n.t 'UserModel::email'
role: -> i18n.t 'UserModel::role'
privileges: -> i18n.t 'UserModel::privileges'
profile: -> i18n.t 'UserModel::profile'
|
[
{
"context": "or displaying things in correct units.\n#\n# @author Torstein Thune\n# @copyright 2016 Microbrew.it\nangular.module('Mi",
"end": 80,
"score": 0.9998663663864136,
"start": 66,
"tag": "NAME",
"value": "Torstein Thune"
}
] | app/common/UnitDirective.coffee | Microbrewit/microbrewit-recipe-calculator | 0 | #
# Directive for displaying things in correct units.
#
# @author Torstein Thune
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').directive('mbUnit', [
() ->
link = (scope, element, attrs, controller, transcludeFn) ->
updateLocalValue = ->
scope.localvalue = mbFormulas.convert.convert(scope.value, attrs.modelunit, scope.localunit)
updateModelValue = ->
scope.value = mbFormulas.convert.convert(scope.localvalue, scope.localunit, attrs.modelunit)
updateLocalValue()
updateModelValue()
# # modelunit will never change (it will always be the same as the server)
scope.$watch(->
return [scope.localunit, attrs.value]
, updateLocalValue, true)
scope.$watch(->
return [scope.localvalue]
, updateModelValue, true)
return {
scope:
'value': '='
'localunit': '@'
replace: true
template: '
<span class="no-wrap">
<input type="number" ng-model="localvalue"/> {{localunit}}
</span>
'
link: link
}
])
| 121328 | #
# Directive for displaying things in correct units.
#
# @author <NAME>
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').directive('mbUnit', [
() ->
link = (scope, element, attrs, controller, transcludeFn) ->
updateLocalValue = ->
scope.localvalue = mbFormulas.convert.convert(scope.value, attrs.modelunit, scope.localunit)
updateModelValue = ->
scope.value = mbFormulas.convert.convert(scope.localvalue, scope.localunit, attrs.modelunit)
updateLocalValue()
updateModelValue()
# # modelunit will never change (it will always be the same as the server)
scope.$watch(->
return [scope.localunit, attrs.value]
, updateLocalValue, true)
scope.$watch(->
return [scope.localvalue]
, updateModelValue, true)
return {
scope:
'value': '='
'localunit': '@'
replace: true
template: '
<span class="no-wrap">
<input type="number" ng-model="localvalue"/> {{localunit}}
</span>
'
link: link
}
])
| true | #
# Directive for displaying things in correct units.
#
# @author PI:NAME:<NAME>END_PI
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').directive('mbUnit', [
() ->
link = (scope, element, attrs, controller, transcludeFn) ->
updateLocalValue = ->
scope.localvalue = mbFormulas.convert.convert(scope.value, attrs.modelunit, scope.localunit)
updateModelValue = ->
scope.value = mbFormulas.convert.convert(scope.localvalue, scope.localunit, attrs.modelunit)
updateLocalValue()
updateModelValue()
# # modelunit will never change (it will always be the same as the server)
scope.$watch(->
return [scope.localunit, attrs.value]
, updateLocalValue, true)
scope.$watch(->
return [scope.localvalue]
, updateModelValue, true)
return {
scope:
'value': '='
'localunit': '@'
replace: true
template: '
<span class="no-wrap">
<input type="number" ng-model="localvalue"/> {{localunit}}
</span>
'
link: link
}
])
|
[
{
"context": "he latest English versions:\n## https://github.com/uploadcare/uploadcare-widget/blob/master/app/assets/javascri",
"end": 185,
"score": 0.9491005539894104,
"start": 175,
"tag": "USERNAME",
"value": "uploadcare"
},
{
"context": "eľký'\n upload: 'Nedá sa nahrať'\n user: 'Nahrávanie bolo zrušené'\n info: 'Informácie sa nedajú nahrať'\n",
"end": 620,
"score": 0.9641031622886658,
"start": 603,
"tag": "NAME",
"value": "Nahrávanie bolo z"
},
{
"context": "'Nedá sa nahrať'\n user: 'Nahrávanie bolo zrušené'\n info: 'Informácie sa nedajú nahrať'\n ",
"end": 626,
"score": 0.6933990716934204,
"start": 623,
"tag": "NAME",
"value": "ené"
},
{
"context": "meru.'\n preview:\n unknownName: 'neznámy'\n change: 'Zrušiť'\n back: 'Späť",
"end": 2989,
"score": 0.6147903800010681,
"start": 2984,
"tag": "NAME",
"value": "známy"
}
] | app/assets/javascripts/uploadcare/locale/sk.js.coffee | nd0ut/uploadcare-widget | 1 | ##
## Please, do not use this locale as a reference for new translations.
## It could be outdated or incomplete. Always use the latest English versions:
## https://github.com/uploadcare/uploadcare-widget/blob/master/app/assets/javascripts/uploadcare/locale/en.js.coffee
##
## Any fixes are welcome.
##
uploadcare.namespace 'locale.translations', (ns) ->
ns.sk =
uploading: 'Nahrávam... Prosím počkajte.'
loadingInfo: 'Nahrávam informácie...'
errors:
default: 'Chyba'
baddata: 'Nesprávna hodnota'
size: 'Súbor je príliš veľký'
upload: 'Nedá sa nahrať'
user: 'Nahrávanie bolo zrušené'
info: 'Informácie sa nedajú nahrať'
image: 'Povolené sú len obrázky'
createGroup: 'Nie je možné vytvoriť priečinok'
deleted: 'Súbor bol odstránený'
draghere: 'Sem presuňte súbor'
file:
one: '%1 súbor'
few: '%1 súbory'
other: '%1 súborov'
buttons:
cancel: 'Zrušiť'
remove: 'Odstrániť'
choose:
files:
one: 'Vyberte súbor'
other: 'Vyberte súbory'
images:
one: 'Vyberte obrázok'
other: 'Vyberte obrázky'
dialog:
close: 'Zavrieť'
openMenu: 'Otvoriť menu'
done: 'Hotovo'
showFiles: 'Ukázať súbory'
tabs:
names:
'empty-pubkey': 'Vitajte'
preview: 'Náhľad'
file: 'Z počítača'
url: 'Z internetu'
camera: 'Kamera'
facebook: 'Facebook'
dropbox: 'Dropbox'
gdrive: 'Disk Google'
gphotos: 'Google Obrázky'
instagram: 'Instagram'
vk: 'VK'
evernote: 'Evernote'
box: 'Box'
skydrive: 'OneDrive'
flickr: 'Flickr'
huddle: 'Huddle'
file:
drag: 'presuňte a vložte<br>akékoľvek súbory'
nodrop: 'Nahrajte súbory z vášho počítača'
cloudsTip: 'Cloud úložiská<br>a sociálne siete'
or: 'alebo'
button: 'Vyberte súbor z počítača'
also: 'alebo vyberte z'
url:
title: 'Súbory z internetu'
line1: 'Uložte akýkoľvek súbor z internetu.'
line2: 'Stačí pridať odkaz na neho.'
input: 'Vložte svoj odkaz sem...'
button: 'Nahrať'
camera:
title: 'Súbor z webkamery'
capture: 'Odfotiť'
mirror: 'Zrkadliť'
startRecord: 'Natočte video'
stopRecord: 'Prestať natáčať'
cancelRecord: 'Zrušiť'
retry: 'Znovu požiadať o prístup'
pleaseAllow:
title: 'Prosím povoľte prístup k vašej kamere'
text: 'Boli ste vyzvaní aby ste umožnili tejto stránke prístup ku kamere.<br>' +
'Prístup musíte povolit aby ste mohli fotiť s vašou kamerou.'
notFound:
title: 'Kamera nebola nájdená'
text: 'Zdá sa, že k tomuto zariadeniu nemáte pripojenú kameru.'
preview:
unknownName: 'neznámy'
change: 'Zrušiť'
back: 'Späť'
done: 'Pridať'
unknown:
title: 'Nahráva sa... Prosím počkajte na náhľad.'
done: 'Preskočiť náhľad a nahrať'
regular:
title: 'Pridať tento súbor?'
line1: 'Chystáte sa pridať vyššie uvedený súbor.'
line2: 'Prosím potvrďte váš výber.'
image:
title: 'Pridať tento obrázok?'
change: 'Zrušiť'
crop:
title: 'Orezať a pridať túto fotku'
done: 'Hotovo'
free: 'obnoviť'
video:
title: 'Pridať toto video?'
change: 'Zrušiť'
error:
default:
title: 'Ejha!'
text: 'Pri nahrávaní sa vyskytla chyba.'
back: 'Skúste to znovu'
image:
title: 'Je možné nahrávať len obrázky'
text: 'Skúste to znovu s iným súborom.'
back: 'Vybrať obrázok'
size:
title: 'Súbor, ktorý ste vybrali presahuje povolenú veľkosť.'
text: 'Skúste to znovu s iným súborom.'
loadImage:
title: 'Chyba'
text: 'Obrázok nie je možné vyhľadať'
multiple:
title: 'Vybrali ste %files%.'
question: 'Pridať %files%?'
tooManyFiles: 'Vybrali ste príliš veľa súborov. Maximum je %max%.'
tooFewFiles: 'Vybrali ste %files%. Potrebných je aspoň %min%.'
clear: 'Odstrániť všetky'
done: 'Pridať'
file:
preview: 'Nahliadnuť na %file%'
remove: 'Odstrániť %file%'
# Pluralization rules taken from:
# http://unicode.org/repos/cldr-tmp/trunk/diff/supplemental/language_plural_rules.html
uploadcare.namespace 'locale.pluralize', (ns) ->
ns.sk = (n) ->
if n == 1
'one'
else if (2 <= n <= 4)
'few'
else
'many'
| 124193 | ##
## Please, do not use this locale as a reference for new translations.
## It could be outdated or incomplete. Always use the latest English versions:
## https://github.com/uploadcare/uploadcare-widget/blob/master/app/assets/javascripts/uploadcare/locale/en.js.coffee
##
## Any fixes are welcome.
##
uploadcare.namespace 'locale.translations', (ns) ->
ns.sk =
uploading: 'Nahrávam... Prosím počkajte.'
loadingInfo: 'Nahrávam informácie...'
errors:
default: 'Chyba'
baddata: 'Nesprávna hodnota'
size: 'Súbor je príliš veľký'
upload: 'Nedá sa nahrať'
user: '<NAME>ruš<NAME>'
info: 'Informácie sa nedajú nahrať'
image: 'Povolené sú len obrázky'
createGroup: 'Nie je možné vytvoriť priečinok'
deleted: 'Súbor bol odstránený'
draghere: 'Sem presuňte súbor'
file:
one: '%1 súbor'
few: '%1 súbory'
other: '%1 súborov'
buttons:
cancel: 'Zrušiť'
remove: 'Odstrániť'
choose:
files:
one: 'Vyberte súbor'
other: 'Vyberte súbory'
images:
one: 'Vyberte obrázok'
other: 'Vyberte obrázky'
dialog:
close: 'Zavrieť'
openMenu: 'Otvoriť menu'
done: 'Hotovo'
showFiles: 'Ukázať súbory'
tabs:
names:
'empty-pubkey': 'Vitajte'
preview: 'Náhľad'
file: 'Z počítača'
url: 'Z internetu'
camera: 'Kamera'
facebook: 'Facebook'
dropbox: 'Dropbox'
gdrive: 'Disk Google'
gphotos: 'Google Obrázky'
instagram: 'Instagram'
vk: 'VK'
evernote: 'Evernote'
box: 'Box'
skydrive: 'OneDrive'
flickr: 'Flickr'
huddle: 'Huddle'
file:
drag: 'presuňte a vložte<br>akékoľvek súbory'
nodrop: 'Nahrajte súbory z vášho počítača'
cloudsTip: 'Cloud úložiská<br>a sociálne siete'
or: 'alebo'
button: 'Vyberte súbor z počítača'
also: 'alebo vyberte z'
url:
title: 'Súbory z internetu'
line1: 'Uložte akýkoľvek súbor z internetu.'
line2: 'Stačí pridať odkaz na neho.'
input: 'Vložte svoj odkaz sem...'
button: 'Nahrať'
camera:
title: 'Súbor z webkamery'
capture: 'Odfotiť'
mirror: 'Zrkadliť'
startRecord: 'Natočte video'
stopRecord: 'Prestať natáčať'
cancelRecord: 'Zrušiť'
retry: 'Znovu požiadať o prístup'
pleaseAllow:
title: 'Prosím povoľte prístup k vašej kamere'
text: 'Boli ste vyzvaní aby ste umožnili tejto stránke prístup ku kamere.<br>' +
'Prístup musíte povolit aby ste mohli fotiť s vašou kamerou.'
notFound:
title: 'Kamera nebola nájdená'
text: 'Zdá sa, že k tomuto zariadeniu nemáte pripojenú kameru.'
preview:
unknownName: 'ne<NAME>'
change: 'Zrušiť'
back: 'Späť'
done: 'Pridať'
unknown:
title: 'Nahráva sa... Prosím počkajte na náhľad.'
done: 'Preskočiť náhľad a nahrať'
regular:
title: 'Pridať tento súbor?'
line1: 'Chystáte sa pridať vyššie uvedený súbor.'
line2: 'Prosím potvrďte váš výber.'
image:
title: 'Pridať tento obrázok?'
change: 'Zrušiť'
crop:
title: 'Orezať a pridať túto fotku'
done: 'Hotovo'
free: 'obnoviť'
video:
title: 'Pridať toto video?'
change: 'Zrušiť'
error:
default:
title: 'Ejha!'
text: 'Pri nahrávaní sa vyskytla chyba.'
back: 'Skúste to znovu'
image:
title: 'Je možné nahrávať len obrázky'
text: 'Skúste to znovu s iným súborom.'
back: 'Vybrať obrázok'
size:
title: 'Súbor, ktorý ste vybrali presahuje povolenú veľkosť.'
text: 'Skúste to znovu s iným súborom.'
loadImage:
title: 'Chyba'
text: 'Obrázok nie je možné vyhľadať'
multiple:
title: 'Vybrali ste %files%.'
question: 'Pridať %files%?'
tooManyFiles: 'Vybrali ste príliš veľa súborov. Maximum je %max%.'
tooFewFiles: 'Vybrali ste %files%. Potrebných je aspoň %min%.'
clear: 'Odstrániť všetky'
done: 'Pridať'
file:
preview: 'Nahliadnuť na %file%'
remove: 'Odstrániť %file%'
# Pluralization rules taken from:
# http://unicode.org/repos/cldr-tmp/trunk/diff/supplemental/language_plural_rules.html
uploadcare.namespace 'locale.pluralize', (ns) ->
ns.sk = (n) ->
if n == 1
'one'
else if (2 <= n <= 4)
'few'
else
'many'
| true | ##
## Please, do not use this locale as a reference for new translations.
## It could be outdated or incomplete. Always use the latest English versions:
## https://github.com/uploadcare/uploadcare-widget/blob/master/app/assets/javascripts/uploadcare/locale/en.js.coffee
##
## Any fixes are welcome.
##
uploadcare.namespace 'locale.translations', (ns) ->
ns.sk =
uploading: 'Nahrávam... Prosím počkajte.'
loadingInfo: 'Nahrávam informácie...'
errors:
default: 'Chyba'
baddata: 'Nesprávna hodnota'
size: 'Súbor je príliš veľký'
upload: 'Nedá sa nahrať'
user: 'PI:NAME:<NAME>END_PIrušPI:NAME:<NAME>END_PI'
info: 'Informácie sa nedajú nahrať'
image: 'Povolené sú len obrázky'
createGroup: 'Nie je možné vytvoriť priečinok'
deleted: 'Súbor bol odstránený'
draghere: 'Sem presuňte súbor'
file:
one: '%1 súbor'
few: '%1 súbory'
other: '%1 súborov'
buttons:
cancel: 'Zrušiť'
remove: 'Odstrániť'
choose:
files:
one: 'Vyberte súbor'
other: 'Vyberte súbory'
images:
one: 'Vyberte obrázok'
other: 'Vyberte obrázky'
dialog:
close: 'Zavrieť'
openMenu: 'Otvoriť menu'
done: 'Hotovo'
showFiles: 'Ukázať súbory'
tabs:
names:
'empty-pubkey': 'Vitajte'
preview: 'Náhľad'
file: 'Z počítača'
url: 'Z internetu'
camera: 'Kamera'
facebook: 'Facebook'
dropbox: 'Dropbox'
gdrive: 'Disk Google'
gphotos: 'Google Obrázky'
instagram: 'Instagram'
vk: 'VK'
evernote: 'Evernote'
box: 'Box'
skydrive: 'OneDrive'
flickr: 'Flickr'
huddle: 'Huddle'
file:
drag: 'presuňte a vložte<br>akékoľvek súbory'
nodrop: 'Nahrajte súbory z vášho počítača'
cloudsTip: 'Cloud úložiská<br>a sociálne siete'
or: 'alebo'
button: 'Vyberte súbor z počítača'
also: 'alebo vyberte z'
url:
title: 'Súbory z internetu'
line1: 'Uložte akýkoľvek súbor z internetu.'
line2: 'Stačí pridať odkaz na neho.'
input: 'Vložte svoj odkaz sem...'
button: 'Nahrať'
camera:
title: 'Súbor z webkamery'
capture: 'Odfotiť'
mirror: 'Zrkadliť'
startRecord: 'Natočte video'
stopRecord: 'Prestať natáčať'
cancelRecord: 'Zrušiť'
retry: 'Znovu požiadať o prístup'
pleaseAllow:
title: 'Prosím povoľte prístup k vašej kamere'
text: 'Boli ste vyzvaní aby ste umožnili tejto stránke prístup ku kamere.<br>' +
'Prístup musíte povolit aby ste mohli fotiť s vašou kamerou.'
notFound:
title: 'Kamera nebola nájdená'
text: 'Zdá sa, že k tomuto zariadeniu nemáte pripojenú kameru.'
preview:
unknownName: 'nePI:NAME:<NAME>END_PI'
change: 'Zrušiť'
back: 'Späť'
done: 'Pridať'
unknown:
title: 'Nahráva sa... Prosím počkajte na náhľad.'
done: 'Preskočiť náhľad a nahrať'
regular:
title: 'Pridať tento súbor?'
line1: 'Chystáte sa pridať vyššie uvedený súbor.'
line2: 'Prosím potvrďte váš výber.'
image:
title: 'Pridať tento obrázok?'
change: 'Zrušiť'
crop:
title: 'Orezať a pridať túto fotku'
done: 'Hotovo'
free: 'obnoviť'
video:
title: 'Pridať toto video?'
change: 'Zrušiť'
error:
default:
title: 'Ejha!'
text: 'Pri nahrávaní sa vyskytla chyba.'
back: 'Skúste to znovu'
image:
title: 'Je možné nahrávať len obrázky'
text: 'Skúste to znovu s iným súborom.'
back: 'Vybrať obrázok'
size:
title: 'Súbor, ktorý ste vybrali presahuje povolenú veľkosť.'
text: 'Skúste to znovu s iným súborom.'
loadImage:
title: 'Chyba'
text: 'Obrázok nie je možné vyhľadať'
multiple:
title: 'Vybrali ste %files%.'
question: 'Pridať %files%?'
tooManyFiles: 'Vybrali ste príliš veľa súborov. Maximum je %max%.'
tooFewFiles: 'Vybrali ste %files%. Potrebných je aspoň %min%.'
clear: 'Odstrániť všetky'
done: 'Pridať'
file:
preview: 'Nahliadnuť na %file%'
remove: 'Odstrániť %file%'
# Pluralization rules taken from:
# http://unicode.org/repos/cldr-tmp/trunk/diff/supplemental/language_plural_rules.html
uploadcare.namespace 'locale.pluralize', (ns) ->
ns.sk = (n) ->
if n == 1
'one'
else if (2 <= n <= 4)
'few'
else
'many'
|
[
{
"context": "licationCtrl', {$scope: $scope});\n\t#------write by foreve_---------\n\t#$scope.able_to_comment = true\n\n\t#-----",
"end": 304,
"score": 0.9991785883903503,
"start": 297,
"tag": "USERNAME",
"value": "foreve_"
},
{
"context": "\t\t\t\tname: \"现代嵌入式系统\"\n\t\t\t\t}\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\tname: \"计算专硕\",\n\t\t\ttype: 2,\n\t\t\trequired: [\n\t\t\t\t{\n\t\t\t\t\tname: \"专",
"end": 1576,
"score": 0.5702239274978638,
"start": 1573,
"tag": "NAME",
"value": "计算专"
},
{
"context": "\tname: \"现代嵌入式系统\"\n\t\t\t\t}\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\tname: \"计算专硕\",\n\t\t\ttype: 2,\n\t\t\trequired: [\n\t\t\t\t{\n\t\t\t\t\tname: \"专业",
"end": 1577,
"score": 0.4895271360874176,
"start": 1576,
"tag": "NAME",
"value": "硕"
},
{
"context": "\t\t\t\tname: \"形式语言与自动机\"\n\t\t\t\t}\n\t\t\t]\n\t\t}\n\t\t{\n\t\t\tname: \"控制博士\",\n\t\t\ttype: 3,\n\t\t\trequired: [\n\t\t\t\t{\n\t\t\t\t\tname: \"控制",
"end": 2355,
"score": 0.9950376152992249,
"start": 2351,
"tag": "NAME",
"value": "控制博士"
},
{
"context": "\tname: \"移动机器人非线性控制\"\n\t\t\t\t}\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\tname: \"控制学硕\",\n\t\t\ttype: 4,\n\t\t\trequired: [\n\t\t\t\t{\n\t\t\t\t\tname: \"信息",
"end": 3091,
"score": 0.9242913722991943,
"start": 3087,
"tag": "NAME",
"value": "控制学硕"
},
{
"context": "\t\t\t\tname: \"机器人视觉控制\"\n\t\t\t\t}\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\tname: \"控制专硕\",\n\t\t\ttype: 5,\n\t\t\trequired: [\n\t\t\t\t{\n\t\t\t\t\tname: \"信息",
"end": 3793,
"score": 0.9471074342727661,
"start": 3789,
"tag": "NAME",
"value": "控制专硕"
}
] | public/front/content/js/coffee/scheme.coffee | yintengzhao/CCCourse | 0 | @ng_app.controller("SchemeCtrl", ['$controller', '$scope', '$interval', '$timeout', '$window', '$location', '$http', '$sce', 'NetManager',
($controller, $scope, $interval, $timeout, $window, $location, $http, $sce, NetManager)->
$controller('ApplicationCtrl', {$scope: $scope});
#------write by foreve_---------
#$scope.able_to_comment = true
#---------
schemes = [
{
name: "计算机博士",
type: 0,
required: [
{
name: "计算机科学专题研究报告"
},
{
name: "计算机科学前沿讨论班(1)"
},
{
name: "计算机科学前沿讨论班(2)"
}
],
elective: [
{
name: "分布式操作系统"
},
{
name: "网络存储系统"
},
{
name: "移动计算与无线网络"
},
{
name: "网络与系统安全"
},
{
name: "可重构系统"
},
{
name: "专业英语"
},
{
name: "媒体计算"
},
{
name: "认知计算与数据科学导论"
},
{
name: "WEB大数据挖掘"
}
]
},
{
name: "计算学硕",
type: 1,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "计算机算法设计与分析"
},
{
name: "计算机网络技术"
},
{
name: "高级计算机系统结构"
}
],
elective: [
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "并行计算技术"
},
{
name: "人工智能原理"
},
{
name: "计算机视觉"
},
{
name: "数据库系统实现"
},
{
name: "网络存储系统"
},
{
name: "分布式操作系统"
},
{
name: "软件测试技术"
},
{
name: "现代嵌入式系统"
}
]
},
{
name: "计算专硕",
type: 2,
required: [
{
name: "专业数学基础(A)"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "计算机技术前沿"
},
{
name: "数据科学前沿"
},
{
name: "计算机网络课程实践"
},
{
name: "计算机算法课程实践"
},
{
name: "自然辩证法"
},
{
name: "政治"
}
],
elective: [
{
name: "高级计算机系统结构"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "计算机视觉"
},
{
name: "现代嵌入式系统"
},
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
}
]
}
{
name: "控制博士",
type: 3,
required: [
{
name: "控制科学专题研究报告(1)"
},
{
name: "控制科学专题研究报告(2)"
},
{
name: "运筹学与最优化"
}
],
elective: [
{
name: "鲁棒控制理论基础"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "模糊系统与控制"
},
{
name: "随机过程"
},
{
name: "智能预测控制"
},
{
name: "微操作与虚拟现实"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "泛函分析基础"
},
{
name: "机器人高级技术"
},
{
name: "基于李雅普诺夫函数的非线性控制"
},
{
name: "机器人视觉控制"
},
{
name: "三维数据场可视化讨论班"
},
{
name: "生物启发计算"
},
{
name: "移动机器人非线性控制"
}
]
},
{
name: "控制学硕",
type: 4,
required: [
{
name: "信息科学前沿"
},
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "线性系统理论"
},
{
name: "机器人学"
}
],
elective: [
{
name: "专业数学基础(A)"
},
{
name: "运筹学与最优化"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分析"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
},
{
name: "模糊系统与控制"
},
{
name: "模式识别"
},
{
name: "泛函分析基础"
},
{
name: "机器人视觉控制"
}
]
},
{
name: "控制专硕",
type: 5,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "控制工程"
},
{
name: "控制理论前沿"
},
{
name: "智能机器人前沿"
},
{
name: "线性系统理论(专硕)"
},
{
name: "自适应控制"
},
{
name: "政治"
},
{
name: "自然辩证法"
}
],
elective: [
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "运筹学与最优化"
},
{
name: "机器人学"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "自适应控制理论及应"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
}
]
}
]
scheme_id = $location.search().id
$scope.scheme = schemes[scheme_id]
$scope.title = $scope.scheme.name
current_page = 1
$scope.switching = false
$scope.questions = [
{
options: [
"感兴趣,有需要"
"讲的好"
"给分高"
"容易过"
"随便选的"
"其他"
]
answer: []
other: null
}
]
$scope.go_advice = ->
$window.location = "./teac_advice.html#?id=#{scheme_id}"
NetManager.get("/Scheme/#{scheme_id}").then (data)->
console.log data
if data.status
$scope.scheme_score = data.info
NetManager.get("/Scheme/#{scheme_id}/advices").then (data)->
console.log data
if +data.status
$scope.advices = data.info
if $scope.advices.length < 10
$scope.no_more = true
else
$scope.no_more = true
$scope.comment_handler = ->
$('body').scrollTop(0)
$scope.page_state = 2
$scope.switching = true
$scope.back_handler = ->
if $scope.switching
$('body').scrollTop(0)
$scope.switching = false
else
if $window.history.length > 1
$window.history.back()
else
$window.location.href = "./welcome.html"
$scope.vote = (c, action) ->
#c : 一个advice
if action != "up" && action != "down" then console.log "wrong action"; return
unless c.action
net_action = NetManager.post
else
net_action = NetManager.delete
NetManager.post("/Advice/#{c.id}/#{action}").then (data)->
console.log data
if +data.status == 1
#获取对应advice的up和down的数量
NetManager.get("/Advice/#{c.id}").then (data) ->
return if +data.status != 1
info = data.info
c.up = info.up
c.down = info.down
c.vote = info.vote
$scope.load_more = ->
current_page += 1
request_param = {page: current_page}
NetManager.get("/Scheme/#{scheme_id}/advices", request_param).then (data)->
console.log data
if +data.status == 1
advices_arr = data.info
$scope.advices = $scope.advices.concat(advices_arr)
if advices_arr.length < 10
$scope.no_more = true
null
]); | 174466 | @ng_app.controller("SchemeCtrl", ['$controller', '$scope', '$interval', '$timeout', '$window', '$location', '$http', '$sce', 'NetManager',
($controller, $scope, $interval, $timeout, $window, $location, $http, $sce, NetManager)->
$controller('ApplicationCtrl', {$scope: $scope});
#------write by foreve_---------
#$scope.able_to_comment = true
#---------
schemes = [
{
name: "计算机博士",
type: 0,
required: [
{
name: "计算机科学专题研究报告"
},
{
name: "计算机科学前沿讨论班(1)"
},
{
name: "计算机科学前沿讨论班(2)"
}
],
elective: [
{
name: "分布式操作系统"
},
{
name: "网络存储系统"
},
{
name: "移动计算与无线网络"
},
{
name: "网络与系统安全"
},
{
name: "可重构系统"
},
{
name: "专业英语"
},
{
name: "媒体计算"
},
{
name: "认知计算与数据科学导论"
},
{
name: "WEB大数据挖掘"
}
]
},
{
name: "计算学硕",
type: 1,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "计算机算法设计与分析"
},
{
name: "计算机网络技术"
},
{
name: "高级计算机系统结构"
}
],
elective: [
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "并行计算技术"
},
{
name: "人工智能原理"
},
{
name: "计算机视觉"
},
{
name: "数据库系统实现"
},
{
name: "网络存储系统"
},
{
name: "分布式操作系统"
},
{
name: "软件测试技术"
},
{
name: "现代嵌入式系统"
}
]
},
{
name: "<NAME> <NAME>",
type: 2,
required: [
{
name: "专业数学基础(A)"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "计算机技术前沿"
},
{
name: "数据科学前沿"
},
{
name: "计算机网络课程实践"
},
{
name: "计算机算法课程实践"
},
{
name: "自然辩证法"
},
{
name: "政治"
}
],
elective: [
{
name: "高级计算机系统结构"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "计算机视觉"
},
{
name: "现代嵌入式系统"
},
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
}
]
}
{
name: "<NAME>",
type: 3,
required: [
{
name: "控制科学专题研究报告(1)"
},
{
name: "控制科学专题研究报告(2)"
},
{
name: "运筹学与最优化"
}
],
elective: [
{
name: "鲁棒控制理论基础"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "模糊系统与控制"
},
{
name: "随机过程"
},
{
name: "智能预测控制"
},
{
name: "微操作与虚拟现实"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "泛函分析基础"
},
{
name: "机器人高级技术"
},
{
name: "基于李雅普诺夫函数的非线性控制"
},
{
name: "机器人视觉控制"
},
{
name: "三维数据场可视化讨论班"
},
{
name: "生物启发计算"
},
{
name: "移动机器人非线性控制"
}
]
},
{
name: "<NAME>",
type: 4,
required: [
{
name: "信息科学前沿"
},
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "线性系统理论"
},
{
name: "机器人学"
}
],
elective: [
{
name: "专业数学基础(A)"
},
{
name: "运筹学与最优化"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分析"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
},
{
name: "模糊系统与控制"
},
{
name: "模式识别"
},
{
name: "泛函分析基础"
},
{
name: "机器人视觉控制"
}
]
},
{
name: "<NAME>",
type: 5,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "控制工程"
},
{
name: "控制理论前沿"
},
{
name: "智能机器人前沿"
},
{
name: "线性系统理论(专硕)"
},
{
name: "自适应控制"
},
{
name: "政治"
},
{
name: "自然辩证法"
}
],
elective: [
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "运筹学与最优化"
},
{
name: "机器人学"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "自适应控制理论及应"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
}
]
}
]
scheme_id = $location.search().id
$scope.scheme = schemes[scheme_id]
$scope.title = $scope.scheme.name
current_page = 1
$scope.switching = false
$scope.questions = [
{
options: [
"感兴趣,有需要"
"讲的好"
"给分高"
"容易过"
"随便选的"
"其他"
]
answer: []
other: null
}
]
$scope.go_advice = ->
$window.location = "./teac_advice.html#?id=#{scheme_id}"
NetManager.get("/Scheme/#{scheme_id}").then (data)->
console.log data
if data.status
$scope.scheme_score = data.info
NetManager.get("/Scheme/#{scheme_id}/advices").then (data)->
console.log data
if +data.status
$scope.advices = data.info
if $scope.advices.length < 10
$scope.no_more = true
else
$scope.no_more = true
$scope.comment_handler = ->
$('body').scrollTop(0)
$scope.page_state = 2
$scope.switching = true
$scope.back_handler = ->
if $scope.switching
$('body').scrollTop(0)
$scope.switching = false
else
if $window.history.length > 1
$window.history.back()
else
$window.location.href = "./welcome.html"
$scope.vote = (c, action) ->
#c : 一个advice
if action != "up" && action != "down" then console.log "wrong action"; return
unless c.action
net_action = NetManager.post
else
net_action = NetManager.delete
NetManager.post("/Advice/#{c.id}/#{action}").then (data)->
console.log data
if +data.status == 1
#获取对应advice的up和down的数量
NetManager.get("/Advice/#{c.id}").then (data) ->
return if +data.status != 1
info = data.info
c.up = info.up
c.down = info.down
c.vote = info.vote
$scope.load_more = ->
current_page += 1
request_param = {page: current_page}
NetManager.get("/Scheme/#{scheme_id}/advices", request_param).then (data)->
console.log data
if +data.status == 1
advices_arr = data.info
$scope.advices = $scope.advices.concat(advices_arr)
if advices_arr.length < 10
$scope.no_more = true
null
]); | true | @ng_app.controller("SchemeCtrl", ['$controller', '$scope', '$interval', '$timeout', '$window', '$location', '$http', '$sce', 'NetManager',
($controller, $scope, $interval, $timeout, $window, $location, $http, $sce, NetManager)->
$controller('ApplicationCtrl', {$scope: $scope});
#------write by foreve_---------
#$scope.able_to_comment = true
#---------
schemes = [
{
name: "计算机博士",
type: 0,
required: [
{
name: "计算机科学专题研究报告"
},
{
name: "计算机科学前沿讨论班(1)"
},
{
name: "计算机科学前沿讨论班(2)"
}
],
elective: [
{
name: "分布式操作系统"
},
{
name: "网络存储系统"
},
{
name: "移动计算与无线网络"
},
{
name: "网络与系统安全"
},
{
name: "可重构系统"
},
{
name: "专业英语"
},
{
name: "媒体计算"
},
{
name: "认知计算与数据科学导论"
},
{
name: "WEB大数据挖掘"
}
]
},
{
name: "计算学硕",
type: 1,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "计算机算法设计与分析"
},
{
name: "计算机网络技术"
},
{
name: "高级计算机系统结构"
}
],
elective: [
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "并行计算技术"
},
{
name: "人工智能原理"
},
{
name: "计算机视觉"
},
{
name: "数据库系统实现"
},
{
name: "网络存储系统"
},
{
name: "分布式操作系统"
},
{
name: "软件测试技术"
},
{
name: "现代嵌入式系统"
}
]
},
{
name: "PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI",
type: 2,
required: [
{
name: "专业数学基础(A)"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "计算机技术前沿"
},
{
name: "数据科学前沿"
},
{
name: "计算机网络课程实践"
},
{
name: "计算机算法课程实践"
},
{
name: "自然辩证法"
},
{
name: "政治"
}
],
elective: [
{
name: "高级计算机系统结构"
},
{
name: "模式识别"
},
{
name: "机器学习"
},
{
name: "计算机视觉"
},
{
name: "现代嵌入式系统"
},
{
name: "计算机图形与图像技术"
},
{
name: "自然语言处理"
},
{
name: "形式语言与自动机"
}
]
}
{
name: "PI:NAME:<NAME>END_PI",
type: 3,
required: [
{
name: "控制科学专题研究报告(1)"
},
{
name: "控制科学专题研究报告(2)"
},
{
name: "运筹学与最优化"
}
],
elective: [
{
name: "鲁棒控制理论基础"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "模糊系统与控制"
},
{
name: "随机过程"
},
{
name: "智能预测控制"
},
{
name: "微操作与虚拟现实"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "泛函分析基础"
},
{
name: "机器人高级技术"
},
{
name: "基于李雅普诺夫函数的非线性控制"
},
{
name: "机器人视觉控制"
},
{
name: "三维数据场可视化讨论班"
},
{
name: "生物启发计算"
},
{
name: "移动机器人非线性控制"
}
]
},
{
name: "PI:NAME:<NAME>END_PI",
type: 4,
required: [
{
name: "信息科学前沿"
},
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "线性系统理论"
},
{
name: "机器人学"
}
],
elective: [
{
name: "专业数学基础(A)"
},
{
name: "运筹学与最优化"
},
{
name: "计算机网络技术"
},
{
name: "计算机算法设计与分析"
},
{
name: "供应链建模与物流分析讨论班"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
},
{
name: "模糊系统与控制"
},
{
name: "模式识别"
},
{
name: "泛函分析基础"
},
{
name: "机器人视觉控制"
}
]
},
{
name: "PI:NAME:<NAME>END_PI",
type: 5,
required: [
{
name: "信息科学前沿"
},
{
name: "专业数学基础(A)"
},
{
name: "专业实践1"
},
{
name: "信息检索"
},
{
name: "知识产权"
},
{
name: "专业实践讲座"
},
{
name: "控制工程"
},
{
name: "控制理论前沿"
},
{
name: "智能机器人前沿"
},
{
name: "线性系统理论(专硕)"
},
{
name: "自适应控制"
},
{
name: "政治"
},
{
name: "自然辩证法"
}
],
elective: [
{
name: "建模与辨识"
},
{
name: "随机过程"
},
{
name: "运筹学与最优化"
},
{
name: "机器人学"
},
{
name: "自适应控制理论及应用讨论班(1)"
},
{
name: "自适应控制理论及应用讨论班(2)"
},
{
name: "自适应控制理论及应"
},
{
name: "智能预测控制"
},
{
name: "数字信号处理"
}
]
}
]
scheme_id = $location.search().id
$scope.scheme = schemes[scheme_id]
$scope.title = $scope.scheme.name
current_page = 1
$scope.switching = false
$scope.questions = [
{
options: [
"感兴趣,有需要"
"讲的好"
"给分高"
"容易过"
"随便选的"
"其他"
]
answer: []
other: null
}
]
$scope.go_advice = ->
$window.location = "./teac_advice.html#?id=#{scheme_id}"
NetManager.get("/Scheme/#{scheme_id}").then (data)->
console.log data
if data.status
$scope.scheme_score = data.info
NetManager.get("/Scheme/#{scheme_id}/advices").then (data)->
console.log data
if +data.status
$scope.advices = data.info
if $scope.advices.length < 10
$scope.no_more = true
else
$scope.no_more = true
$scope.comment_handler = ->
$('body').scrollTop(0)
$scope.page_state = 2
$scope.switching = true
$scope.back_handler = ->
if $scope.switching
$('body').scrollTop(0)
$scope.switching = false
else
if $window.history.length > 1
$window.history.back()
else
$window.location.href = "./welcome.html"
$scope.vote = (c, action) ->
#c : 一个advice
if action != "up" && action != "down" then console.log "wrong action"; return
unless c.action
net_action = NetManager.post
else
net_action = NetManager.delete
NetManager.post("/Advice/#{c.id}/#{action}").then (data)->
console.log data
if +data.status == 1
#获取对应advice的up和down的数量
NetManager.get("/Advice/#{c.id}").then (data) ->
return if +data.status != 1
info = data.info
c.up = info.up
c.down = info.down
c.vote = info.vote
$scope.load_more = ->
current_page += 1
request_param = {page: current_page}
NetManager.get("/Scheme/#{scheme_id}/advices", request_param).then (data)->
console.log data
if +data.status == 1
advices_arr = data.info
$scope.advices = $scope.advices.concat(advices_arr)
if advices_arr.length < 10
$scope.no_more = true
null
]); |
[
{
"context": "t Version #2\n# @return {Integer|Boolean}\n# @author Alexey Bass (albass)\n# @since 2011-07-14\npudu.versionCompare ",
"end": 1296,
"score": 0.999900221824646,
"start": 1285,
"tag": "NAME",
"value": "Alexey Bass"
},
{
"context": " @return {Integer|Boolean}\n# @author Alexey Bass (albass)\n# @since 2011-07-14\npudu.versionCompare = (left,",
"end": 1304,
"score": 0.999499499797821,
"start": 1298,
"tag": "USERNAME",
"value": "albass"
}
] | js/utils.coffee | chokchai/Pudumee | 0 | pudu = {}
pudu.defaultSetting =
version: '1.0'
#========== STORAGE ==========#
pudu.setLocalStorage = (data, func = ->)->
chrome.storage.local.set data, func
pudu.getLocalStorage = (func)->
chrome.storage.local.get func
pudu.clearLocalStorage = (func = ->)->
chrome.storage.local.clear(func)
pudu.removeLocalStorage = (key, func = ->)->
chrome.storage.local.remove(key, func)
pudu.getExtensionUrl = (uri)->
chrome.extension.getURL uri
pudu.isOptionEnable = (optionName, onEnable, onDisable = ->)->
pudu.getLocalStorage (items)->
if items[optionName] == undefined or items[optionName] == true
onEnable()
else
onDisable()
pudu.getOptionBoolean = (optionName, callback)->
pudu.isOptionEnable optionName,
(-> callback(true)), (-> callback(false))
# Simply compares two string version values.
#
# Example:
# versionCompare('1.1', '1.2') => -1
# versionCompare('1.1', '1.1') => 0
# versionCompare('1.2', '1.1') => 1
# versionCompare('2.23.3', '2.22.3') => 1
#
# Returns:
# -1 = left is LOWER than right
# 0 = they are equal
# 1 = left is GREATER = right is LOWER
# And FALSE if one of input versions are not valid
#
# @function
# @param {String} left Version #1
# @param {String} right Version #2
# @return {Integer|Boolean}
# @author Alexey Bass (albass)
# @since 2011-07-14
pudu.versionCompare = (left, right)->
if typeof left + typeof right != 'stringstring'
return false;
a = left.split('.')
b = right.split('.')
i = 0
len = Math.max(a.length, b.length);
while i < len
if ((a[i] && !b[i] && parseInt(a[i]) > 0) || (parseInt(a[i]) > parseInt(b[i])))
return 1;
else if ((b[i] && !a[i] && parseInt(b[i]) > 0) || (parseInt(a[i]) < parseInt(b[i])))
return -1;
i++
return 0;
#========== WATCHER ==========#
pudu.watcher = {}
pudu.watcherLoop = ()->
for name, func of pudu.watcher
func() # exec each watcher
setTimeout pudu.watcherLoop, 1000 # repeat
pudu.watcherElementIterator = ($elements, iterator)->
$elements.each ->
if not $(@).data('__done__')
notDone = iterator($(@), @)
$(@).data('__done__', notDone != true) # change result to boolean
#========== INIT ==========#
# init default setting if is first time, then refresh
pudu.getLocalStorage (items)->
# if not have setting data yet, just added default
if items.version is undefined
pudu.setLocalStorage pudu.defaultSetting, ->
window.location.reload()
# if current version is newer, just added only new default setting
else if pudu.versionCompare(items.version, pudu.defaultSetting.version) == -1
# assign new version
items.version = pudu.defaultSetting.version
# add only not exist
pudu.setLocalStorage $.extend(pudu.defaultSetting, items), ->
window.location.reload()
window.pudu = pudu | 107043 | pudu = {}
pudu.defaultSetting =
version: '1.0'
#========== STORAGE ==========#
pudu.setLocalStorage = (data, func = ->)->
chrome.storage.local.set data, func
pudu.getLocalStorage = (func)->
chrome.storage.local.get func
pudu.clearLocalStorage = (func = ->)->
chrome.storage.local.clear(func)
pudu.removeLocalStorage = (key, func = ->)->
chrome.storage.local.remove(key, func)
pudu.getExtensionUrl = (uri)->
chrome.extension.getURL uri
pudu.isOptionEnable = (optionName, onEnable, onDisable = ->)->
pudu.getLocalStorage (items)->
if items[optionName] == undefined or items[optionName] == true
onEnable()
else
onDisable()
pudu.getOptionBoolean = (optionName, callback)->
pudu.isOptionEnable optionName,
(-> callback(true)), (-> callback(false))
# Simply compares two string version values.
#
# Example:
# versionCompare('1.1', '1.2') => -1
# versionCompare('1.1', '1.1') => 0
# versionCompare('1.2', '1.1') => 1
# versionCompare('2.23.3', '2.22.3') => 1
#
# Returns:
# -1 = left is LOWER than right
# 0 = they are equal
# 1 = left is GREATER = right is LOWER
# And FALSE if one of input versions are not valid
#
# @function
# @param {String} left Version #1
# @param {String} right Version #2
# @return {Integer|Boolean}
# @author <NAME> (albass)
# @since 2011-07-14
pudu.versionCompare = (left, right)->
if typeof left + typeof right != 'stringstring'
return false;
a = left.split('.')
b = right.split('.')
i = 0
len = Math.max(a.length, b.length);
while i < len
if ((a[i] && !b[i] && parseInt(a[i]) > 0) || (parseInt(a[i]) > parseInt(b[i])))
return 1;
else if ((b[i] && !a[i] && parseInt(b[i]) > 0) || (parseInt(a[i]) < parseInt(b[i])))
return -1;
i++
return 0;
#========== WATCHER ==========#
pudu.watcher = {}
pudu.watcherLoop = ()->
for name, func of pudu.watcher
func() # exec each watcher
setTimeout pudu.watcherLoop, 1000 # repeat
pudu.watcherElementIterator = ($elements, iterator)->
$elements.each ->
if not $(@).data('__done__')
notDone = iterator($(@), @)
$(@).data('__done__', notDone != true) # change result to boolean
#========== INIT ==========#
# init default setting if is first time, then refresh
pudu.getLocalStorage (items)->
# if not have setting data yet, just added default
if items.version is undefined
pudu.setLocalStorage pudu.defaultSetting, ->
window.location.reload()
# if current version is newer, just added only new default setting
else if pudu.versionCompare(items.version, pudu.defaultSetting.version) == -1
# assign new version
items.version = pudu.defaultSetting.version
# add only not exist
pudu.setLocalStorage $.extend(pudu.defaultSetting, items), ->
window.location.reload()
window.pudu = pudu | true | pudu = {}
pudu.defaultSetting =
version: '1.0'
#========== STORAGE ==========#
pudu.setLocalStorage = (data, func = ->)->
chrome.storage.local.set data, func
pudu.getLocalStorage = (func)->
chrome.storage.local.get func
pudu.clearLocalStorage = (func = ->)->
chrome.storage.local.clear(func)
pudu.removeLocalStorage = (key, func = ->)->
chrome.storage.local.remove(key, func)
pudu.getExtensionUrl = (uri)->
chrome.extension.getURL uri
pudu.isOptionEnable = (optionName, onEnable, onDisable = ->)->
pudu.getLocalStorage (items)->
if items[optionName] == undefined or items[optionName] == true
onEnable()
else
onDisable()
pudu.getOptionBoolean = (optionName, callback)->
pudu.isOptionEnable optionName,
(-> callback(true)), (-> callback(false))
# Simply compares two string version values.
#
# Example:
# versionCompare('1.1', '1.2') => -1
# versionCompare('1.1', '1.1') => 0
# versionCompare('1.2', '1.1') => 1
# versionCompare('2.23.3', '2.22.3') => 1
#
# Returns:
# -1 = left is LOWER than right
# 0 = they are equal
# 1 = left is GREATER = right is LOWER
# And FALSE if one of input versions are not valid
#
# @function
# @param {String} left Version #1
# @param {String} right Version #2
# @return {Integer|Boolean}
# @author PI:NAME:<NAME>END_PI (albass)
# @since 2011-07-14
pudu.versionCompare = (left, right)->
if typeof left + typeof right != 'stringstring'
return false;
a = left.split('.')
b = right.split('.')
i = 0
len = Math.max(a.length, b.length);
while i < len
if ((a[i] && !b[i] && parseInt(a[i]) > 0) || (parseInt(a[i]) > parseInt(b[i])))
return 1;
else if ((b[i] && !a[i] && parseInt(b[i]) > 0) || (parseInt(a[i]) < parseInt(b[i])))
return -1;
i++
return 0;
#========== WATCHER ==========#
pudu.watcher = {}
pudu.watcherLoop = ()->
for name, func of pudu.watcher
func() # exec each watcher
setTimeout pudu.watcherLoop, 1000 # repeat
pudu.watcherElementIterator = ($elements, iterator)->
$elements.each ->
if not $(@).data('__done__')
notDone = iterator($(@), @)
$(@).data('__done__', notDone != true) # change result to boolean
#========== INIT ==========#
# init default setting if is first time, then refresh
pudu.getLocalStorage (items)->
# if not have setting data yet, just added default
if items.version is undefined
pudu.setLocalStorage pudu.defaultSetting, ->
window.location.reload()
# if current version is newer, just added only new default setting
else if pudu.versionCompare(items.version, pudu.defaultSetting.version) == -1
# assign new version
items.version = pudu.defaultSetting.version
# add only not exist
pudu.setLocalStorage $.extend(pudu.defaultSetting, items), ->
window.location.reload()
window.pudu = pudu |
[
{
"context": "###\n# @author Argi Karunia <arugikaru@yahoo.co.jp>\n# @author Rendy Halim <",
"end": 28,
"score": 0.9998807907104492,
"start": 16,
"tag": "NAME",
"value": "Argi Karunia"
},
{
"context": "###\n# @author Argi Karunia <arugikaru@yahoo.co.jp>\n# @author Rendy Halim <https://github.com/Rend",
"end": 51,
"score": 0.9999293088912964,
"start": 30,
"tag": "EMAIL",
"value": "arugikaru@yahoo.co.jp"
},
{
"context": " Argi Karunia <arugikaru@yahoo.co.jp>\n# @author Rendy Halim <https://github.com/RendyHalim>\n# @link https",
"end": 76,
"score": 0.9999015927314758,
"start": 65,
"tag": "NAME",
"value": "Rendy Halim"
},
{
"context": "o.jp>\n# @author Rendy Halim <https://github.com/RendyHalim>\n# @link https://gihtub.com/tokopedia/Nodame\n",
"end": 107,
"score": 0.9996627569198608,
"start": 97,
"tag": "USERNAME",
"value": "RendyHalim"
},
{
"context": "ub.com/RendyHalim>\n# @link https://gihtub.com/tokopedia/Nodame\n# @license http://opensource.org/licenses",
"end": 149,
"score": 0.8124844431877136,
"start": 140,
"tag": "USERNAME",
"value": "tokopedia"
}
] | src/request.coffee | tokopedia/nodame | 2 | ###
# @author Argi Karunia <arugikaru@yahoo.co.jp>
# @author Rendy Halim <https://github.com/RendyHalim>
# @link https://gihtub.com/tokopedia/Nodame
# @license http://opensource.org/licenses/maintenance
#
# @version 1.2.2
###
measure = require('measure')
querystring = require('query-string')
`GET = 'GET'`
`POST = 'POST'`
`PUT = 'PUT'`
`DELETE = 'DELETE'`
`UPDATE = 'UPDATE'`
`PATCH = 'PATCH'`
UA = 'curl/7.43.0'
class Request
###
# @constructor
# @param string url
# @param object optional custom options
# @throw On missing url args
###
constructor: (url, opts) ->
# Validate args
throw new Error 'Missing url args' unless url?
# Assign default options
@__default_options(url)
# Assign custom options if exists and is object
@__custom_options(opts) if opts? and typeof opts is 'object'
# Set client
@__set_client()
@__timeout = 5
return
###
# @method Parse URL and assign the results to options
# @param string URL
# @private
# @throw On unallowed protocol
###
__parse_url: (url) ->
# URL regex
re = /^(?:((http[s]{0,1}):\/\/))?([a-z0-9-_\.]+)(?:(:[0-9]+))?(.*)$/
found = url.match(re)
# Set default as http when protocol is not found
protocol = found[2] || 'http'
# Validate protocol
allowed_protocol = ['http', 'https']
if allowed_protocol.indexOf(protocol) is -1
throw new Error 'Unallowed protocol'
# Set port
if found?[4]?
port = found[4]
else
port = if protocol is 'http' then '80' else '443'
# Assign parsed_url object
@__options =
protocol: "#{protocol}:"
host: found[3]
port: port.replace(':', '')
path: found[5]
headers: {}
return
###
# @method Set default options
# @param string URL
# @private
###
__default_options: (url) ->
@__parse_url(url)
@__options.headers['User-Agent'] = UA
return
###
# @method Set default options
# @private
###
__custom_options: (options) ->
@set(option, options.option) for option of options
return
###
# @method Set request client
# @private
###
__set_client: ->
@__client = require(@__options.protocol.replace(':', ''))
return
###
# @method Set option
# @param string key
# @param object value
# @public
###
set: (key, val) ->
# Validate
throw new Error 'Missing args' if not key? or not val?
# Assign value
@__options[key] = val
return @
###
# @method Set header
# @param string key
# @param string value
# @public
# @throw on missing args
###
header: (key, args..., arg) ->
# Validate
throw new Error 'Missing header args' if not key? or not arg?
# Normal header
if args.length is 0
val = arg
# Anonymous function header
else
# Assign anonymous function
fn = arg
# Validate type
throw new TypeError 'Invalid args type' unless typeof fn is 'function'
# Get value from anonymous function
val = fn(args...)
# Assign value to header
@__options.headers[key] = val
return @
###
# @method Assign metric name
# @public
# @param string name
# @throw on undefined name
###
metric: (name) ->
# Validate existence of anonymous function
throw new Error 'Missing name args' unless name?
# Validate args type
throw new TypeError 'Invalid type of name args' unless typeof name is 'string'
# Assign metric
@__metric = name
return @
###
# @method Set timeout
# @public
# @param int second
# @throw on args type is empty or not number
###
timeout: (second) ->
# Validate the existence of second args
throw new Error 'Missing args' unless second?
# Validate args type
throw new TypeError 'Invalid args type' unless typeof second is 'number'
# Assign timeout
@__timeout = second
return @
###
# @method Assign content-type
# @public
# @param string type
# @throw on empty type and invalid type
###
type: (type) ->
# Validate empty type
throw new Error 'Missing args' unless type?
# Validate type
throw new TypeError 'Invalid args type' unless typeof type is 'string'
# Assign type
@__content_type = type
return @
###
# @method GET method
# @public
# @param callback
###
get: (callback) ->
@__request(GET, callback)
return
###
# @method POST method
# @public
# @param callback
###
post: (callback) ->
@__request(POST, callback)
return
###
# @method PUT method
# @public
# @param callback
###
put: (callback) ->
@__request(PUT, callback)
return
###
# @method UPDATE method
# @public
# @param callback
###
update: (callback) ->
@__request(UPDATE, callback)
return
###
# @method PATCH method
# @public
# @param callback
###
patch: (callback) ->
@__request(PATCH, callback)
return
###
# @method DELETE method
# @public
# @param callback
###
delete: (callback) ->
@__request(DELETE, callback)
return
###
# @method Set data
# @private
# @param object data
# @param string type
###
data: (data, type) ->
# Validate data
throw new Error 'Missing data args' unless data?
# Assign default type
type = 'json' if not @__content_type? and not type?
# Set Content-Type
switch type
when 'form'
@header('Content-Type', 'application/x-www-form-urlencoded')
data = querystring.stringify(data)
when 'json'
@header('Content-Type', 'application/vnd.api+json')
data = JSON.stringify(data)
when 'xml'
@header('Content-Type', 'application/xml')
data = data
else
@header('Content-Type', type)
data = data
# Set data
@set('body', data)
# Set content length
@header('Content-Length', data.length)
return @
###
# @method Execute request
# @private
# @param string method
# @param callback
###
__request: (method, callback) ->
@set('method', method)
# Response handler
response_handler = (res) =>
# Measure httpRequest response time
done = measure.measure('httpRequest') if @__metric?
# Initialize data
data = ''
# Append chunked data
res.on 'data', (chunk) =>
data += String(chunk)
return
# Parse data
res.on 'end', () =>
# Log request stat
# TODO: log is undefined
# if done?
# log.stat.histogram(@__metric, done(), ['env:' + nodame.env()])
# return callback
result = @__parse(res.headers['content-type'], data)
return callback(null, result)
return
# Execute request
req = @__client.request(@__options, response_handler)
# Error handler
req.on 'error', (err) =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}"
unless req.socket.destroyed
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, "#{error.title}. #{error.detail}")
return
# Write data
write_methods = [POST, PUT, UPDATE, DELETE, 'PATCH']
req.write(@__options.body) if write_methods.indexOf(@__options.method) isnt -1
# Timeout handler
req.setTimeout @__timeout * 1000, () =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path} with data: #{@__options.body}"
result =
errors: [error]
# Send alert
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, sprintf('%s. %s', error.title, error.detail))
# Destroy socket
req.socket.destroy()
# Abort socket
req.abort()
return callback(true, result)
# Close request
req.end()
return
###
# @method Parse response data
# @private
# @param string content-type
# @param object data
###
__parse: (content_type, data) ->
# Validate content-type
return data unless content_type?
# Validate xml or html response
if content_type?.match(/xml|html/)? and !(data[0] is '{' or data[0] is '[')
return data
# Parse JSON
try
result = JSON.parse(data)
catch err
error =
id: '110101'
title: 'Invalid response data'
detail: "Failed in fetching data from #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}.\n\nResponse Data:\n#{data}"
result =
errors: [error]
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.critical(error.id, "#{error.title}. #{error.detail}")
return result
module.exports = Request
| 200824 | ###
# @author <NAME> <<EMAIL>>
# @author <NAME> <https://github.com/RendyHalim>
# @link https://gihtub.com/tokopedia/Nodame
# @license http://opensource.org/licenses/maintenance
#
# @version 1.2.2
###
measure = require('measure')
querystring = require('query-string')
`GET = 'GET'`
`POST = 'POST'`
`PUT = 'PUT'`
`DELETE = 'DELETE'`
`UPDATE = 'UPDATE'`
`PATCH = 'PATCH'`
UA = 'curl/7.43.0'
class Request
###
# @constructor
# @param string url
# @param object optional custom options
# @throw On missing url args
###
constructor: (url, opts) ->
# Validate args
throw new Error 'Missing url args' unless url?
# Assign default options
@__default_options(url)
# Assign custom options if exists and is object
@__custom_options(opts) if opts? and typeof opts is 'object'
# Set client
@__set_client()
@__timeout = 5
return
###
# @method Parse URL and assign the results to options
# @param string URL
# @private
# @throw On unallowed protocol
###
__parse_url: (url) ->
# URL regex
re = /^(?:((http[s]{0,1}):\/\/))?([a-z0-9-_\.]+)(?:(:[0-9]+))?(.*)$/
found = url.match(re)
# Set default as http when protocol is not found
protocol = found[2] || 'http'
# Validate protocol
allowed_protocol = ['http', 'https']
if allowed_protocol.indexOf(protocol) is -1
throw new Error 'Unallowed protocol'
# Set port
if found?[4]?
port = found[4]
else
port = if protocol is 'http' then '80' else '443'
# Assign parsed_url object
@__options =
protocol: "#{protocol}:"
host: found[3]
port: port.replace(':', '')
path: found[5]
headers: {}
return
###
# @method Set default options
# @param string URL
# @private
###
__default_options: (url) ->
@__parse_url(url)
@__options.headers['User-Agent'] = UA
return
###
# @method Set default options
# @private
###
__custom_options: (options) ->
@set(option, options.option) for option of options
return
###
# @method Set request client
# @private
###
__set_client: ->
@__client = require(@__options.protocol.replace(':', ''))
return
###
# @method Set option
# @param string key
# @param object value
# @public
###
set: (key, val) ->
# Validate
throw new Error 'Missing args' if not key? or not val?
# Assign value
@__options[key] = val
return @
###
# @method Set header
# @param string key
# @param string value
# @public
# @throw on missing args
###
header: (key, args..., arg) ->
# Validate
throw new Error 'Missing header args' if not key? or not arg?
# Normal header
if args.length is 0
val = arg
# Anonymous function header
else
# Assign anonymous function
fn = arg
# Validate type
throw new TypeError 'Invalid args type' unless typeof fn is 'function'
# Get value from anonymous function
val = fn(args...)
# Assign value to header
@__options.headers[key] = val
return @
###
# @method Assign metric name
# @public
# @param string name
# @throw on undefined name
###
metric: (name) ->
# Validate existence of anonymous function
throw new Error 'Missing name args' unless name?
# Validate args type
throw new TypeError 'Invalid type of name args' unless typeof name is 'string'
# Assign metric
@__metric = name
return @
###
# @method Set timeout
# @public
# @param int second
# @throw on args type is empty or not number
###
timeout: (second) ->
# Validate the existence of second args
throw new Error 'Missing args' unless second?
# Validate args type
throw new TypeError 'Invalid args type' unless typeof second is 'number'
# Assign timeout
@__timeout = second
return @
###
# @method Assign content-type
# @public
# @param string type
# @throw on empty type and invalid type
###
type: (type) ->
# Validate empty type
throw new Error 'Missing args' unless type?
# Validate type
throw new TypeError 'Invalid args type' unless typeof type is 'string'
# Assign type
@__content_type = type
return @
###
# @method GET method
# @public
# @param callback
###
get: (callback) ->
@__request(GET, callback)
return
###
# @method POST method
# @public
# @param callback
###
post: (callback) ->
@__request(POST, callback)
return
###
# @method PUT method
# @public
# @param callback
###
put: (callback) ->
@__request(PUT, callback)
return
###
# @method UPDATE method
# @public
# @param callback
###
update: (callback) ->
@__request(UPDATE, callback)
return
###
# @method PATCH method
# @public
# @param callback
###
patch: (callback) ->
@__request(PATCH, callback)
return
###
# @method DELETE method
# @public
# @param callback
###
delete: (callback) ->
@__request(DELETE, callback)
return
###
# @method Set data
# @private
# @param object data
# @param string type
###
data: (data, type) ->
# Validate data
throw new Error 'Missing data args' unless data?
# Assign default type
type = 'json' if not @__content_type? and not type?
# Set Content-Type
switch type
when 'form'
@header('Content-Type', 'application/x-www-form-urlencoded')
data = querystring.stringify(data)
when 'json'
@header('Content-Type', 'application/vnd.api+json')
data = JSON.stringify(data)
when 'xml'
@header('Content-Type', 'application/xml')
data = data
else
@header('Content-Type', type)
data = data
# Set data
@set('body', data)
# Set content length
@header('Content-Length', data.length)
return @
###
# @method Execute request
# @private
# @param string method
# @param callback
###
__request: (method, callback) ->
@set('method', method)
# Response handler
response_handler = (res) =>
# Measure httpRequest response time
done = measure.measure('httpRequest') if @__metric?
# Initialize data
data = ''
# Append chunked data
res.on 'data', (chunk) =>
data += String(chunk)
return
# Parse data
res.on 'end', () =>
# Log request stat
# TODO: log is undefined
# if done?
# log.stat.histogram(@__metric, done(), ['env:' + nodame.env()])
# return callback
result = @__parse(res.headers['content-type'], data)
return callback(null, result)
return
# Execute request
req = @__client.request(@__options, response_handler)
# Error handler
req.on 'error', (err) =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}"
unless req.socket.destroyed
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, "#{error.title}. #{error.detail}")
return
# Write data
write_methods = [POST, PUT, UPDATE, DELETE, 'PATCH']
req.write(@__options.body) if write_methods.indexOf(@__options.method) isnt -1
# Timeout handler
req.setTimeout @__timeout * 1000, () =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path} with data: #{@__options.body}"
result =
errors: [error]
# Send alert
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, sprintf('%s. %s', error.title, error.detail))
# Destroy socket
req.socket.destroy()
# Abort socket
req.abort()
return callback(true, result)
# Close request
req.end()
return
###
# @method Parse response data
# @private
# @param string content-type
# @param object data
###
__parse: (content_type, data) ->
# Validate content-type
return data unless content_type?
# Validate xml or html response
if content_type?.match(/xml|html/)? and !(data[0] is '{' or data[0] is '[')
return data
# Parse JSON
try
result = JSON.parse(data)
catch err
error =
id: '110101'
title: 'Invalid response data'
detail: "Failed in fetching data from #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}.\n\nResponse Data:\n#{data}"
result =
errors: [error]
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.critical(error.id, "#{error.title}. #{error.detail}")
return result
module.exports = Request
| true | ###
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @author PI:NAME:<NAME>END_PI <https://github.com/RendyHalim>
# @link https://gihtub.com/tokopedia/Nodame
# @license http://opensource.org/licenses/maintenance
#
# @version 1.2.2
###
measure = require('measure')
querystring = require('query-string')
`GET = 'GET'`
`POST = 'POST'`
`PUT = 'PUT'`
`DELETE = 'DELETE'`
`UPDATE = 'UPDATE'`
`PATCH = 'PATCH'`
UA = 'curl/7.43.0'
class Request
###
# @constructor
# @param string url
# @param object optional custom options
# @throw On missing url args
###
constructor: (url, opts) ->
# Validate args
throw new Error 'Missing url args' unless url?
# Assign default options
@__default_options(url)
# Assign custom options if exists and is object
@__custom_options(opts) if opts? and typeof opts is 'object'
# Set client
@__set_client()
@__timeout = 5
return
###
# @method Parse URL and assign the results to options
# @param string URL
# @private
# @throw On unallowed protocol
###
__parse_url: (url) ->
# URL regex
re = /^(?:((http[s]{0,1}):\/\/))?([a-z0-9-_\.]+)(?:(:[0-9]+))?(.*)$/
found = url.match(re)
# Set default as http when protocol is not found
protocol = found[2] || 'http'
# Validate protocol
allowed_protocol = ['http', 'https']
if allowed_protocol.indexOf(protocol) is -1
throw new Error 'Unallowed protocol'
# Set port
if found?[4]?
port = found[4]
else
port = if protocol is 'http' then '80' else '443'
# Assign parsed_url object
@__options =
protocol: "#{protocol}:"
host: found[3]
port: port.replace(':', '')
path: found[5]
headers: {}
return
###
# @method Set default options
# @param string URL
# @private
###
__default_options: (url) ->
@__parse_url(url)
@__options.headers['User-Agent'] = UA
return
###
# @method Set default options
# @private
###
__custom_options: (options) ->
@set(option, options.option) for option of options
return
###
# @method Set request client
# @private
###
__set_client: ->
@__client = require(@__options.protocol.replace(':', ''))
return
###
# @method Set option
# @param string key
# @param object value
# @public
###
set: (key, val) ->
# Validate
throw new Error 'Missing args' if not key? or not val?
# Assign value
@__options[key] = val
return @
###
# @method Set header
# @param string key
# @param string value
# @public
# @throw on missing args
###
header: (key, args..., arg) ->
# Validate
throw new Error 'Missing header args' if not key? or not arg?
# Normal header
if args.length is 0
val = arg
# Anonymous function header
else
# Assign anonymous function
fn = arg
# Validate type
throw new TypeError 'Invalid args type' unless typeof fn is 'function'
# Get value from anonymous function
val = fn(args...)
# Assign value to header
@__options.headers[key] = val
return @
###
# @method Assign metric name
# @public
# @param string name
# @throw on undefined name
###
metric: (name) ->
# Validate existence of anonymous function
throw new Error 'Missing name args' unless name?
# Validate args type
throw new TypeError 'Invalid type of name args' unless typeof name is 'string'
# Assign metric
@__metric = name
return @
###
# @method Set timeout
# @public
# @param int second
# @throw on args type is empty or not number
###
timeout: (second) ->
# Validate the existence of second args
throw new Error 'Missing args' unless second?
# Validate args type
throw new TypeError 'Invalid args type' unless typeof second is 'number'
# Assign timeout
@__timeout = second
return @
###
# @method Assign content-type
# @public
# @param string type
# @throw on empty type and invalid type
###
type: (type) ->
# Validate empty type
throw new Error 'Missing args' unless type?
# Validate type
throw new TypeError 'Invalid args type' unless typeof type is 'string'
# Assign type
@__content_type = type
return @
###
# @method GET method
# @public
# @param callback
###
get: (callback) ->
@__request(GET, callback)
return
###
# @method POST method
# @public
# @param callback
###
post: (callback) ->
@__request(POST, callback)
return
###
# @method PUT method
# @public
# @param callback
###
put: (callback) ->
@__request(PUT, callback)
return
###
# @method UPDATE method
# @public
# @param callback
###
update: (callback) ->
@__request(UPDATE, callback)
return
###
# @method PATCH method
# @public
# @param callback
###
patch: (callback) ->
@__request(PATCH, callback)
return
###
# @method DELETE method
# @public
# @param callback
###
delete: (callback) ->
@__request(DELETE, callback)
return
###
# @method Set data
# @private
# @param object data
# @param string type
###
data: (data, type) ->
# Validate data
throw new Error 'Missing data args' unless data?
# Assign default type
type = 'json' if not @__content_type? and not type?
# Set Content-Type
switch type
when 'form'
@header('Content-Type', 'application/x-www-form-urlencoded')
data = querystring.stringify(data)
when 'json'
@header('Content-Type', 'application/vnd.api+json')
data = JSON.stringify(data)
when 'xml'
@header('Content-Type', 'application/xml')
data = data
else
@header('Content-Type', type)
data = data
# Set data
@set('body', data)
# Set content length
@header('Content-Length', data.length)
return @
###
# @method Execute request
# @private
# @param string method
# @param callback
###
__request: (method, callback) ->
@set('method', method)
# Response handler
response_handler = (res) =>
# Measure httpRequest response time
done = measure.measure('httpRequest') if @__metric?
# Initialize data
data = ''
# Append chunked data
res.on 'data', (chunk) =>
data += String(chunk)
return
# Parse data
res.on 'end', () =>
# Log request stat
# TODO: log is undefined
# if done?
# log.stat.histogram(@__metric, done(), ['env:' + nodame.env()])
# return callback
result = @__parse(res.headers['content-type'], data)
return callback(null, result)
return
# Execute request
req = @__client.request(@__options, response_handler)
# Error handler
req.on 'error', (err) =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}"
unless req.socket.destroyed
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, "#{error.title}. #{error.detail}")
return
# Write data
write_methods = [POST, PUT, UPDATE, DELETE, 'PATCH']
req.write(@__options.body) if write_methods.indexOf(@__options.method) isnt -1
# Timeout handler
req.setTimeout @__timeout * 1000, () =>
error =
id: '110102'
title: 'Request timeout'
detail: "Can't reach server at #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path} with data: #{@__options.body}"
result =
errors: [error]
# Send alert
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.alert(error.id, sprintf('%s. %s', error.title, error.detail))
# Destroy socket
req.socket.destroy()
# Abort socket
req.abort()
return callback(true, result)
# Close request
req.end()
return
###
# @method Parse response data
# @private
# @param string content-type
# @param object data
###
__parse: (content_type, data) ->
# Validate content-type
return data unless content_type?
# Validate xml or html response
if content_type?.match(/xml|html/)? and !(data[0] is '{' or data[0] is '[')
return data
# Parse JSON
try
result = JSON.parse(data)
catch err
error =
id: '110101'
title: 'Invalid response data'
detail: "Failed in fetching data from #{@__options.protocol}//#{@__options.host}:#{@__options.port}#{@__options.path}.\n\nResponse Data:\n#{data}"
result =
errors: [error]
console.log { id: error.id, title: error.title, detail: error.detail }
# TODO: log is undefined
# log.critical(error.id, "#{error.title}. #{error.detail}")
return result
module.exports = Request
|
[
{
"context": "ver\n\t\t\t\tMeteor.defer ->\n\t\t\t\t\tEmail.send\n\t\t\t\t\t\tto:\"you@email.com\"\n\t\t\t\t\t\tfrom:\"me@email.com\"\n\t\t\t\t\t\tsubject:\"New Cus",
"end": 507,
"score": 0.9999191164970398,
"start": 494,
"tag": "EMAIL",
"value": "you@email.com"
},
{
"context": "\t\tEmail.send\n\t\t\t\t\t\tto:\"you@email.com\"\n\t\t\t\t\t\tfrom:\"me@email.com\"\n\t\t\t\t\t\tsubject:\"New Customer!\"\n\t\t\t\t\t\ttext:\"Someon",
"end": 533,
"score": 0.9999176263809204,
"start": 521,
"tag": "EMAIL",
"value": "me@email.com"
}
] | _/Module 3/orders/cart/cart_methods.coffee | paullewallencom/meteor-978-1-7872-8775-4 | 9 | # /orders/cart/cart_methods.coffee
Meteor.methods
"cart.add-to-cart": (ops={}) ->
# Validate data
check ops,
order:Match.Optional(Match.OneOf(String,null))
product:String
quantity:Number
order = Orders.findOne ops.order
product = Products.findOne ops.product
# Insert Order if it doesn't exist
unless order
order_id = Orders.insert
status:"new"
total_products:0
subtotal:0
total:0
if Meteor.isServer
Meteor.defer ->
Email.send
to:"you@email.com"
from:"me@email.com"
subject:"New Customer!"
text:"Someone has created a new order"
else
# Validate order status
if order.status isnt "new"
throw new Meteor.Error 405, "Not Allowed"
order_id = order._id
# Set the session variable for future reference
if Meteor.isClient
Session.setPersistent "global.order",order_id
# Find the order
order = Orders.findOne order_id
# Check for details on this product
detail = OrderDetails.findOne
product:product._id
order:order._id
if detail
# Increase by one if the details exist
OrderDetails.update detail._id,
$inc:
quantity:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
else
# Insert if details do not exist
OrderDetails.insert
quantity:ops.quantity
product:product._id
order:order._id
price:product.price
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
| 116089 | # /orders/cart/cart_methods.coffee
Meteor.methods
"cart.add-to-cart": (ops={}) ->
# Validate data
check ops,
order:Match.Optional(Match.OneOf(String,null))
product:String
quantity:Number
order = Orders.findOne ops.order
product = Products.findOne ops.product
# Insert Order if it doesn't exist
unless order
order_id = Orders.insert
status:"new"
total_products:0
subtotal:0
total:0
if Meteor.isServer
Meteor.defer ->
Email.send
to:"<EMAIL>"
from:"<EMAIL>"
subject:"New Customer!"
text:"Someone has created a new order"
else
# Validate order status
if order.status isnt "new"
throw new Meteor.Error 405, "Not Allowed"
order_id = order._id
# Set the session variable for future reference
if Meteor.isClient
Session.setPersistent "global.order",order_id
# Find the order
order = Orders.findOne order_id
# Check for details on this product
detail = OrderDetails.findOne
product:product._id
order:order._id
if detail
# Increase by one if the details exist
OrderDetails.update detail._id,
$inc:
quantity:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
else
# Insert if details do not exist
OrderDetails.insert
quantity:ops.quantity
product:product._id
order:order._id
price:product.price
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
| true | # /orders/cart/cart_methods.coffee
Meteor.methods
"cart.add-to-cart": (ops={}) ->
# Validate data
check ops,
order:Match.Optional(Match.OneOf(String,null))
product:String
quantity:Number
order = Orders.findOne ops.order
product = Products.findOne ops.product
# Insert Order if it doesn't exist
unless order
order_id = Orders.insert
status:"new"
total_products:0
subtotal:0
total:0
if Meteor.isServer
Meteor.defer ->
Email.send
to:"PI:EMAIL:<EMAIL>END_PI"
from:"PI:EMAIL:<EMAIL>END_PI"
subject:"New Customer!"
text:"Someone has created a new order"
else
# Validate order status
if order.status isnt "new"
throw new Meteor.Error 405, "Not Allowed"
order_id = order._id
# Set the session variable for future reference
if Meteor.isClient
Session.setPersistent "global.order",order_id
# Find the order
order = Orders.findOne order_id
# Check for details on this product
detail = OrderDetails.findOne
product:product._id
order:order._id
if detail
# Increase by one if the details exist
OrderDetails.update detail._id,
$inc:
quantity:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
else
# Insert if details do not exist
OrderDetails.insert
quantity:ops.quantity
product:product._id
order:order._id
price:product.price
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
Orders.update order._id,
$inc:
total_products:ops.quantity
subtotal:product.price * ops.quantity
total:product.price * ops.quantity
|
[
{
"context": "beforeEach ->\n myBook = new Book(id: 1, name: \"test book\", features: null)\n\n describe \"IdentityMap\", ->\n ",
"end": 529,
"score": 0.7634492516517639,
"start": 520,
"tag": "NAME",
"value": "test book"
},
{
"context": "okList(id: 1, parent_book: {id: myBook.id, name: 'Name updated'}, items: [])\n IdentityMap.map(coll)\n ",
"end": 3393,
"score": 0.7654873132705688,
"start": 3381,
"tag": "NAME",
"value": "Name updated"
}
] | spec/IdentityMap_spec.coffee | pluff/angular-identity-map | 4 | Book = (plainValue) ->
@class_name = "Book"
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
BookList = (plainValue) ->
@class_name = "BookList"
@id = plainValue.id
@items = []
@parent_book = new Book(plainValue.parent_book) if plainValue.parent_book?
angular.forEach plainValue.items, (book) =>
@items.push(new Book(book))
return
describe "identity-map", ->
IdentityMap = undefined
myBook = undefined
beforeEach ->
myBook = new Book(id: 1, name: "test book", features: null)
describe "IdentityMap", ->
beforeEach ->
module "identity-map"
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
describe ".get", ->
it "returns mapped entity object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it "allows to get by an entity object", ->
IdentityMap.map myBook
expect(IdentityMap.get(new Book(myBook))).toBe myBook
it "allows to get by direct id and type scalar", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook.id, myBook.class_name)).toBe myBook
it "returns undefined when attempting to get an object not mapped before", ->
expect(IdentityMap.get(myBook)).toBe undefined
describe ".detach", ->
it "removes object from identity map", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook)).toBe myBook
IdentityMap.detach myBook
expect(IdentityMap.get(myBook)).toBe undefined
expect(myBook.constructor).toBe Book
describe ".isMappable", ->
it 'returns true if entity has valid entityType and entityId', ->
expect(IdentityMap.isMappable(myBook)).toBeTruthy()
it 'returns false if entity has no valid entityType or entityId', ->
myBook.class_name = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
myBook.class_name = 'Book'
myBook.id = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
describe ".map", ->
describe 'when passed param is not mappable', ->
describe 'for arrays & objects', ->
it 'returns same array\object with all elements mapped', ->
arr = [1, 2]
IdentityMap.map(arr)
expect(arr).toEqual([1, 2])
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
IdentityMap.map(myBook)
arr = [myBook, updatedBook]
IdentityMap.map arr
expect(arr).toEqual([myBook, myBook])
updatedAgainBook = new Book(myBook)
test_object = {book: updatedAgainBook}
IdentityMap.map test_object
expect(test_object.book).toBe myBook
describe 'for scalars', ->
it 'simply returns value back', ->
expect(IdentityMap.map(1)).toEqual 1
expect(IdentityMap.map('')).toEqual ''
describe "when object was not mapped before", ->
it "adds object to identity map and returns the object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it 'maps all child objects recursively', ->
IdentityMap.map(myBook)
coll = new BookList(id: 1, parent_book: {id: myBook.id, name: 'Name updated'}, items: [])
IdentityMap.map(coll)
expect(coll.parent_book).toBe(myBook)
describe "when the same object is already present in the map", ->
updatedBook = undefined
beforeEach ->
IdentityMap.map myBook
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
it "merges old object attributes with new object attributes", ->
mappedBook = IdentityMap.map(updatedBook)
expect(mappedBook.name).toBe updatedBook.name
expect(mappedBook.features).toBe myBook.features
it "returns old object reference", ->
expect(IdentityMap.map(updatedBook)).toBe myBook
it 'maps all childrens objects too', ->
book1 = new Book id: 10, name: 'Name'
book2 = new Book id: 20, name: 'Name'
IdentityMap.map(book1)
IdentityMap.map(book2)
parent_book = new Book id: 30, name: 'Name'
IdentityMap.map(parent_book)
collection1 = new BookList id: 1, items: []
collection1.parent_book = parent_book
IdentityMap.map(collection1)
updated_collection = new BookList({id: 1, items: [
{id: 10, name: 'Name 1'},
{id: 20, name: 'Name 2'}
], parent_book: {id: 30, name: 'Name 3'}})
IdentityMap.map(updated_collection)
expect(book1.name).toEqual 'Name 1'
expect(book2.name).toEqual 'Name 2'
expect(parent_book.name).toEqual 'Name 3'
expect(collection1.items).toEqual [book1, book2]
describe "when passed object is not mappable", ->
response = undefined
beforeEach ->
response = {status: 200, body: updatedBook}
it 'still maps all object properties', ->
mapped_response = IdentityMap.map(response)
expect(mapped_response).toBe response
expect(mapped_response.body).toBe myBook
describe ".clear", ->
it "removes all object from identity map, but keeps object references untouched", ->
IdentityMap.map myBook
anotherBook = new Book(id: myBook + 1, name: "another name")
IdentityMap.map anotherBook
IdentityMap.clear()
expect(myBook.constructor).toBe Book
expect(anotherBook.constructor).toBe Book
expect(IdentityMap.get(myBook)).toBe undefined
expect(IdentityMap.get(anotherBook)).toBe undefined
describe "IdentityMapProvider", ->
it "allows to configure entityId function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityIdFn (e) ->
e.name
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
anotherBook = new Book(id: myBook.id, name: "name!", features: myBook.features)
IdentityMap.map myBook
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
it "allows to configure entityType function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityTypeFn (e) ->
e.constructor.toString()
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
SuperBook = (plainValue) ->
@prototype = Book::
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
IdentityMap.map myBook
anotherBook = new SuperBook(myBook)
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
| 132608 | Book = (plainValue) ->
@class_name = "Book"
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
BookList = (plainValue) ->
@class_name = "BookList"
@id = plainValue.id
@items = []
@parent_book = new Book(plainValue.parent_book) if plainValue.parent_book?
angular.forEach plainValue.items, (book) =>
@items.push(new Book(book))
return
describe "identity-map", ->
IdentityMap = undefined
myBook = undefined
beforeEach ->
myBook = new Book(id: 1, name: "<NAME>", features: null)
describe "IdentityMap", ->
beforeEach ->
module "identity-map"
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
describe ".get", ->
it "returns mapped entity object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it "allows to get by an entity object", ->
IdentityMap.map myBook
expect(IdentityMap.get(new Book(myBook))).toBe myBook
it "allows to get by direct id and type scalar", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook.id, myBook.class_name)).toBe myBook
it "returns undefined when attempting to get an object not mapped before", ->
expect(IdentityMap.get(myBook)).toBe undefined
describe ".detach", ->
it "removes object from identity map", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook)).toBe myBook
IdentityMap.detach myBook
expect(IdentityMap.get(myBook)).toBe undefined
expect(myBook.constructor).toBe Book
describe ".isMappable", ->
it 'returns true if entity has valid entityType and entityId', ->
expect(IdentityMap.isMappable(myBook)).toBeTruthy()
it 'returns false if entity has no valid entityType or entityId', ->
myBook.class_name = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
myBook.class_name = 'Book'
myBook.id = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
describe ".map", ->
describe 'when passed param is not mappable', ->
describe 'for arrays & objects', ->
it 'returns same array\object with all elements mapped', ->
arr = [1, 2]
IdentityMap.map(arr)
expect(arr).toEqual([1, 2])
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
IdentityMap.map(myBook)
arr = [myBook, updatedBook]
IdentityMap.map arr
expect(arr).toEqual([myBook, myBook])
updatedAgainBook = new Book(myBook)
test_object = {book: updatedAgainBook}
IdentityMap.map test_object
expect(test_object.book).toBe myBook
describe 'for scalars', ->
it 'simply returns value back', ->
expect(IdentityMap.map(1)).toEqual 1
expect(IdentityMap.map('')).toEqual ''
describe "when object was not mapped before", ->
it "adds object to identity map and returns the object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it 'maps all child objects recursively', ->
IdentityMap.map(myBook)
coll = new BookList(id: 1, parent_book: {id: myBook.id, name: '<NAME>'}, items: [])
IdentityMap.map(coll)
expect(coll.parent_book).toBe(myBook)
describe "when the same object is already present in the map", ->
updatedBook = undefined
beforeEach ->
IdentityMap.map myBook
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
it "merges old object attributes with new object attributes", ->
mappedBook = IdentityMap.map(updatedBook)
expect(mappedBook.name).toBe updatedBook.name
expect(mappedBook.features).toBe myBook.features
it "returns old object reference", ->
expect(IdentityMap.map(updatedBook)).toBe myBook
it 'maps all childrens objects too', ->
book1 = new Book id: 10, name: 'Name'
book2 = new Book id: 20, name: 'Name'
IdentityMap.map(book1)
IdentityMap.map(book2)
parent_book = new Book id: 30, name: 'Name'
IdentityMap.map(parent_book)
collection1 = new BookList id: 1, items: []
collection1.parent_book = parent_book
IdentityMap.map(collection1)
updated_collection = new BookList({id: 1, items: [
{id: 10, name: 'Name 1'},
{id: 20, name: 'Name 2'}
], parent_book: {id: 30, name: 'Name 3'}})
IdentityMap.map(updated_collection)
expect(book1.name).toEqual 'Name 1'
expect(book2.name).toEqual 'Name 2'
expect(parent_book.name).toEqual 'Name 3'
expect(collection1.items).toEqual [book1, book2]
describe "when passed object is not mappable", ->
response = undefined
beforeEach ->
response = {status: 200, body: updatedBook}
it 'still maps all object properties', ->
mapped_response = IdentityMap.map(response)
expect(mapped_response).toBe response
expect(mapped_response.body).toBe myBook
describe ".clear", ->
it "removes all object from identity map, but keeps object references untouched", ->
IdentityMap.map myBook
anotherBook = new Book(id: myBook + 1, name: "another name")
IdentityMap.map anotherBook
IdentityMap.clear()
expect(myBook.constructor).toBe Book
expect(anotherBook.constructor).toBe Book
expect(IdentityMap.get(myBook)).toBe undefined
expect(IdentityMap.get(anotherBook)).toBe undefined
describe "IdentityMapProvider", ->
it "allows to configure entityId function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityIdFn (e) ->
e.name
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
anotherBook = new Book(id: myBook.id, name: "name!", features: myBook.features)
IdentityMap.map myBook
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
it "allows to configure entityType function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityTypeFn (e) ->
e.constructor.toString()
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
SuperBook = (plainValue) ->
@prototype = Book::
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
IdentityMap.map myBook
anotherBook = new SuperBook(myBook)
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
| true | Book = (plainValue) ->
@class_name = "Book"
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
BookList = (plainValue) ->
@class_name = "BookList"
@id = plainValue.id
@items = []
@parent_book = new Book(plainValue.parent_book) if plainValue.parent_book?
angular.forEach plainValue.items, (book) =>
@items.push(new Book(book))
return
describe "identity-map", ->
IdentityMap = undefined
myBook = undefined
beforeEach ->
myBook = new Book(id: 1, name: "PI:NAME:<NAME>END_PI", features: null)
describe "IdentityMap", ->
beforeEach ->
module "identity-map"
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
describe ".get", ->
it "returns mapped entity object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it "allows to get by an entity object", ->
IdentityMap.map myBook
expect(IdentityMap.get(new Book(myBook))).toBe myBook
it "allows to get by direct id and type scalar", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook.id, myBook.class_name)).toBe myBook
it "returns undefined when attempting to get an object not mapped before", ->
expect(IdentityMap.get(myBook)).toBe undefined
describe ".detach", ->
it "removes object from identity map", ->
IdentityMap.map myBook
expect(IdentityMap.get(myBook)).toBe myBook
IdentityMap.detach myBook
expect(IdentityMap.get(myBook)).toBe undefined
expect(myBook.constructor).toBe Book
describe ".isMappable", ->
it 'returns true if entity has valid entityType and entityId', ->
expect(IdentityMap.isMappable(myBook)).toBeTruthy()
it 'returns false if entity has no valid entityType or entityId', ->
myBook.class_name = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
myBook.class_name = 'Book'
myBook.id = undefined
expect(IdentityMap.isMappable(myBook)).toBeFalsy()
describe ".map", ->
describe 'when passed param is not mappable', ->
describe 'for arrays & objects', ->
it 'returns same array\object with all elements mapped', ->
arr = [1, 2]
IdentityMap.map(arr)
expect(arr).toEqual([1, 2])
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
IdentityMap.map(myBook)
arr = [myBook, updatedBook]
IdentityMap.map arr
expect(arr).toEqual([myBook, myBook])
updatedAgainBook = new Book(myBook)
test_object = {book: updatedAgainBook}
IdentityMap.map test_object
expect(test_object.book).toBe myBook
describe 'for scalars', ->
it 'simply returns value back', ->
expect(IdentityMap.map(1)).toEqual 1
expect(IdentityMap.map('')).toEqual ''
describe "when object was not mapped before", ->
it "adds object to identity map and returns the object", ->
expect(IdentityMap.map(myBook)).toBe myBook
expect(IdentityMap.get(myBook)).toBe myBook
it 'maps all child objects recursively', ->
IdentityMap.map(myBook)
coll = new BookList(id: 1, parent_book: {id: myBook.id, name: 'PI:NAME:<NAME>END_PI'}, items: [])
IdentityMap.map(coll)
expect(coll.parent_book).toBe(myBook)
describe "when the same object is already present in the map", ->
updatedBook = undefined
beforeEach ->
IdentityMap.map myBook
updatedBook = new Book(myBook)
updatedBook.name = "Updated name"
it "merges old object attributes with new object attributes", ->
mappedBook = IdentityMap.map(updatedBook)
expect(mappedBook.name).toBe updatedBook.name
expect(mappedBook.features).toBe myBook.features
it "returns old object reference", ->
expect(IdentityMap.map(updatedBook)).toBe myBook
it 'maps all childrens objects too', ->
book1 = new Book id: 10, name: 'Name'
book2 = new Book id: 20, name: 'Name'
IdentityMap.map(book1)
IdentityMap.map(book2)
parent_book = new Book id: 30, name: 'Name'
IdentityMap.map(parent_book)
collection1 = new BookList id: 1, items: []
collection1.parent_book = parent_book
IdentityMap.map(collection1)
updated_collection = new BookList({id: 1, items: [
{id: 10, name: 'Name 1'},
{id: 20, name: 'Name 2'}
], parent_book: {id: 30, name: 'Name 3'}})
IdentityMap.map(updated_collection)
expect(book1.name).toEqual 'Name 1'
expect(book2.name).toEqual 'Name 2'
expect(parent_book.name).toEqual 'Name 3'
expect(collection1.items).toEqual [book1, book2]
describe "when passed object is not mappable", ->
response = undefined
beforeEach ->
response = {status: 200, body: updatedBook}
it 'still maps all object properties', ->
mapped_response = IdentityMap.map(response)
expect(mapped_response).toBe response
expect(mapped_response.body).toBe myBook
describe ".clear", ->
it "removes all object from identity map, but keeps object references untouched", ->
IdentityMap.map myBook
anotherBook = new Book(id: myBook + 1, name: "another name")
IdentityMap.map anotherBook
IdentityMap.clear()
expect(myBook.constructor).toBe Book
expect(anotherBook.constructor).toBe Book
expect(IdentityMap.get(myBook)).toBe undefined
expect(IdentityMap.get(anotherBook)).toBe undefined
describe "IdentityMapProvider", ->
it "allows to configure entityId function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityIdFn (e) ->
e.name
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
anotherBook = new Book(id: myBook.id, name: "name!", features: myBook.features)
IdentityMap.map myBook
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
it "allows to configure entityType function", ->
module "identity-map", (IdentityMapProvider) ->
IdentityMapProvider.setEntityTypeFn (e) ->
e.constructor.toString()
return
inject ($injector) ->
IdentityMap = $injector.get("IdentityMap")
SuperBook = (plainValue) ->
@prototype = Book::
@id = plainValue.id
@name = plainValue.name
@features = plainValue.features
return
IdentityMap.map myBook
anotherBook = new SuperBook(myBook)
IdentityMap.map anotherBook
expect(IdentityMap.get(myBook)).toBe myBook
expect(IdentityMap.get(anotherBook)).toBe anotherBook
|
[
{
"context": "---------------------------------------------\n# @: The Anh\n# d: 150329\n# f: survey controller\n# ------------",
"end": 86,
"score": 0.9996222853660583,
"start": 79,
"tag": "NAME",
"value": "The Anh"
}
] | app/assets/javascripts/controllers/survey.coffee | theanh/96b0e903c0b3889b355a821204fb7dcf | 31 | 'use strict'
# ----------------------------------------------------------
# @: The Anh
# d: 150329
# f: survey controller
# ----------------------------------------------------------
angular.module('AppSurvey')
.controller 'SurveyCtrl', [
'$scope',
'$rails',
'Survey',
($scope, $rails, Survey) ->
# --------------------------------------------------------
# private variable
$survey = null
# --------------------------------------------------------
# public variable
# $scope.submitted = false # variable set state submitted
$scope.load_error = null
$scope.load_result = null
$scope.master = {}
$scope.attempts = {}
$scope.cus_validate = {}
# --------------------------------------------------------
# public process
# --- init survey
$scope.initSurvey = (survey_id, arr_chk_valid_questions)->
$scope.survey_id = survey_id
# init custom validate value
$scope.cus_validate.question = {}
angular.forEach arr_chk_valid_questions, (value, index)->
$scope.cus_validate.question[value] = true
return
# --- validate
$scope.checkValidate = ()->
flg_validate = true
if !$scope.attempts.question
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
$scope.cus_validate.question[index] = false
flg_validate = false
else
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
if !$scope.attempts.question[index]
$scope.cus_validate.question[index] = false
flg_validate = false
else
if $scope.attempts.question[index].length == 0
$scope.cus_validate.question[index] = false
flg_validate = false
else
# scan value false
flg_validate_f = false
angular.forEach $scope.attempts.question[index], (v, i)->
flg_validate_f = true if v
unless flg_validate_f
$scope.cus_validate.question[index] = false
flg_validate = false
unless flg_validate
$scope.form_survey.$valid = false
# timeout is needed for Chrome (is a bug in Chrome)
setTimeout(()->
# auto focus on the first invalid element!
frt_invalid = $('form[name=form_survey] .cus-validate.ng-invalid')[0]
if frt_invalid
frt_invalid.focus()
, 1)
return false
return true
# --- submit
$scope.submitSurvey = () ->
# check custom validate
if $scope.checkValidate()
$scope.auth_error = null
# clone scope
$scope.attempts['survey_id'] = $scope.survey_id
$scope.master = angular.copy $scope.attempts
# add new instances
$survey = new Survey()
$survey.attemptSurvey($scope.master).then (res)->
if res.status == 1
location.href = $rails.root_url + 'survey/result'
return
# --- show survey result
$scope.showResult = (survey_id)->
# common chart config
$scope.chart_config =
options:
chart:
plotBackgroundColor: null
plotBorderWidth: null
plotShadow: false
tooltip:
pointFormat: '{series.name}: <b>{point.percentage:.1f}%</b>'
# legend:
# align: 'right'
# verticalAlign: 'top'
# layout: 'vertical'
# x: 0
# y: 100
# floating: true
plotOptions:
pie:
allowPointSelect: true
cursor: 'pointer'
dataLabels:
enabled: false
format: '<b>{point.name}</b>: {point.percentage:.1f} %'
style:
color: Highcharts.theme && Highcharts.theme.contrastTextColor || 'black'
showInLegend: true
title: text: ''
series: [{
type: 'pie'
name: ''
data: []
}]
loading: false
func: (chart) ->
return
# add new instances
data_statistic = []
$survey = new Survey()
param =
survey_id: survey_id
$survey.showSurveyResult(param).then (res)->
if res.status == 1 && res.data[0] && res.data[0]['question_id']
# analyze returned data, group by question
data_statistic = []
cur_q = res.data[0]['question_id']
vi_length = res.data.length - 1
angular.forEach res.data, (v, i)->
if (cur_q != v['question_id']) || ( i == vi_length )
$scope['chart_config'+cur_q] = angular.copy $scope.chart_config
# if the last element => push it to final array!
if i == vi_length
data_statistic.push([v['option_text'], parseInt(v['count'])])
$scope['chart_config'+cur_q]['series'][0]['data'] = data_statistic
# reset flag
cur_q = v['question_id']
data_statistic = []
# push option result to array
data_statistic.push([v['option_text'], parseInt(v['count'])])
return
return
return
]
.factory 'Survey', [
'$http',
'$q',
'$rails',
'Validate',
'Common',
($http, $q, $rails, Validate, Common) ->
# -------------------------
# static variables
$common = new Common
$validate = new Validate
_url =
attempt_survey: $rails.root_url + 'api/v1/api_survey1'
show_survey_result: $rails.root_url + 'api/v1/api_survey2'
Survey = ()->
# private
# public
survey =
attemptSurvey: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.attempt_survey
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
showSurveyResult: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.show_survey_result
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
]
| 93845 | 'use strict'
# ----------------------------------------------------------
# @: <NAME>
# d: 150329
# f: survey controller
# ----------------------------------------------------------
angular.module('AppSurvey')
.controller 'SurveyCtrl', [
'$scope',
'$rails',
'Survey',
($scope, $rails, Survey) ->
# --------------------------------------------------------
# private variable
$survey = null
# --------------------------------------------------------
# public variable
# $scope.submitted = false # variable set state submitted
$scope.load_error = null
$scope.load_result = null
$scope.master = {}
$scope.attempts = {}
$scope.cus_validate = {}
# --------------------------------------------------------
# public process
# --- init survey
$scope.initSurvey = (survey_id, arr_chk_valid_questions)->
$scope.survey_id = survey_id
# init custom validate value
$scope.cus_validate.question = {}
angular.forEach arr_chk_valid_questions, (value, index)->
$scope.cus_validate.question[value] = true
return
# --- validate
$scope.checkValidate = ()->
flg_validate = true
if !$scope.attempts.question
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
$scope.cus_validate.question[index] = false
flg_validate = false
else
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
if !$scope.attempts.question[index]
$scope.cus_validate.question[index] = false
flg_validate = false
else
if $scope.attempts.question[index].length == 0
$scope.cus_validate.question[index] = false
flg_validate = false
else
# scan value false
flg_validate_f = false
angular.forEach $scope.attempts.question[index], (v, i)->
flg_validate_f = true if v
unless flg_validate_f
$scope.cus_validate.question[index] = false
flg_validate = false
unless flg_validate
$scope.form_survey.$valid = false
# timeout is needed for Chrome (is a bug in Chrome)
setTimeout(()->
# auto focus on the first invalid element!
frt_invalid = $('form[name=form_survey] .cus-validate.ng-invalid')[0]
if frt_invalid
frt_invalid.focus()
, 1)
return false
return true
# --- submit
$scope.submitSurvey = () ->
# check custom validate
if $scope.checkValidate()
$scope.auth_error = null
# clone scope
$scope.attempts['survey_id'] = $scope.survey_id
$scope.master = angular.copy $scope.attempts
# add new instances
$survey = new Survey()
$survey.attemptSurvey($scope.master).then (res)->
if res.status == 1
location.href = $rails.root_url + 'survey/result'
return
# --- show survey result
$scope.showResult = (survey_id)->
# common chart config
$scope.chart_config =
options:
chart:
plotBackgroundColor: null
plotBorderWidth: null
plotShadow: false
tooltip:
pointFormat: '{series.name}: <b>{point.percentage:.1f}%</b>'
# legend:
# align: 'right'
# verticalAlign: 'top'
# layout: 'vertical'
# x: 0
# y: 100
# floating: true
plotOptions:
pie:
allowPointSelect: true
cursor: 'pointer'
dataLabels:
enabled: false
format: '<b>{point.name}</b>: {point.percentage:.1f} %'
style:
color: Highcharts.theme && Highcharts.theme.contrastTextColor || 'black'
showInLegend: true
title: text: ''
series: [{
type: 'pie'
name: ''
data: []
}]
loading: false
func: (chart) ->
return
# add new instances
data_statistic = []
$survey = new Survey()
param =
survey_id: survey_id
$survey.showSurveyResult(param).then (res)->
if res.status == 1 && res.data[0] && res.data[0]['question_id']
# analyze returned data, group by question
data_statistic = []
cur_q = res.data[0]['question_id']
vi_length = res.data.length - 1
angular.forEach res.data, (v, i)->
if (cur_q != v['question_id']) || ( i == vi_length )
$scope['chart_config'+cur_q] = angular.copy $scope.chart_config
# if the last element => push it to final array!
if i == vi_length
data_statistic.push([v['option_text'], parseInt(v['count'])])
$scope['chart_config'+cur_q]['series'][0]['data'] = data_statistic
# reset flag
cur_q = v['question_id']
data_statistic = []
# push option result to array
data_statistic.push([v['option_text'], parseInt(v['count'])])
return
return
return
]
.factory 'Survey', [
'$http',
'$q',
'$rails',
'Validate',
'Common',
($http, $q, $rails, Validate, Common) ->
# -------------------------
# static variables
$common = new Common
$validate = new Validate
_url =
attempt_survey: $rails.root_url + 'api/v1/api_survey1'
show_survey_result: $rails.root_url + 'api/v1/api_survey2'
Survey = ()->
# private
# public
survey =
attemptSurvey: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.attempt_survey
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
showSurveyResult: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.show_survey_result
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
]
| true | 'use strict'
# ----------------------------------------------------------
# @: PI:NAME:<NAME>END_PI
# d: 150329
# f: survey controller
# ----------------------------------------------------------
angular.module('AppSurvey')
.controller 'SurveyCtrl', [
'$scope',
'$rails',
'Survey',
($scope, $rails, Survey) ->
# --------------------------------------------------------
# private variable
$survey = null
# --------------------------------------------------------
# public variable
# $scope.submitted = false # variable set state submitted
$scope.load_error = null
$scope.load_result = null
$scope.master = {}
$scope.attempts = {}
$scope.cus_validate = {}
# --------------------------------------------------------
# public process
# --- init survey
$scope.initSurvey = (survey_id, arr_chk_valid_questions)->
$scope.survey_id = survey_id
# init custom validate value
$scope.cus_validate.question = {}
angular.forEach arr_chk_valid_questions, (value, index)->
$scope.cus_validate.question[value] = true
return
# --- validate
$scope.checkValidate = ()->
flg_validate = true
if !$scope.attempts.question
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
$scope.cus_validate.question[index] = false
flg_validate = false
else
angular.forEach $scope.cus_validate.question, (value, index)->
if flg_validate
if !$scope.attempts.question[index]
$scope.cus_validate.question[index] = false
flg_validate = false
else
if $scope.attempts.question[index].length == 0
$scope.cus_validate.question[index] = false
flg_validate = false
else
# scan value false
flg_validate_f = false
angular.forEach $scope.attempts.question[index], (v, i)->
flg_validate_f = true if v
unless flg_validate_f
$scope.cus_validate.question[index] = false
flg_validate = false
unless flg_validate
$scope.form_survey.$valid = false
# timeout is needed for Chrome (is a bug in Chrome)
setTimeout(()->
# auto focus on the first invalid element!
frt_invalid = $('form[name=form_survey] .cus-validate.ng-invalid')[0]
if frt_invalid
frt_invalid.focus()
, 1)
return false
return true
# --- submit
$scope.submitSurvey = () ->
# check custom validate
if $scope.checkValidate()
$scope.auth_error = null
# clone scope
$scope.attempts['survey_id'] = $scope.survey_id
$scope.master = angular.copy $scope.attempts
# add new instances
$survey = new Survey()
$survey.attemptSurvey($scope.master).then (res)->
if res.status == 1
location.href = $rails.root_url + 'survey/result'
return
# --- show survey result
$scope.showResult = (survey_id)->
# common chart config
$scope.chart_config =
options:
chart:
plotBackgroundColor: null
plotBorderWidth: null
plotShadow: false
tooltip:
pointFormat: '{series.name}: <b>{point.percentage:.1f}%</b>'
# legend:
# align: 'right'
# verticalAlign: 'top'
# layout: 'vertical'
# x: 0
# y: 100
# floating: true
plotOptions:
pie:
allowPointSelect: true
cursor: 'pointer'
dataLabels:
enabled: false
format: '<b>{point.name}</b>: {point.percentage:.1f} %'
style:
color: Highcharts.theme && Highcharts.theme.contrastTextColor || 'black'
showInLegend: true
title: text: ''
series: [{
type: 'pie'
name: ''
data: []
}]
loading: false
func: (chart) ->
return
# add new instances
data_statistic = []
$survey = new Survey()
param =
survey_id: survey_id
$survey.showSurveyResult(param).then (res)->
if res.status == 1 && res.data[0] && res.data[0]['question_id']
# analyze returned data, group by question
data_statistic = []
cur_q = res.data[0]['question_id']
vi_length = res.data.length - 1
angular.forEach res.data, (v, i)->
if (cur_q != v['question_id']) || ( i == vi_length )
$scope['chart_config'+cur_q] = angular.copy $scope.chart_config
# if the last element => push it to final array!
if i == vi_length
data_statistic.push([v['option_text'], parseInt(v['count'])])
$scope['chart_config'+cur_q]['series'][0]['data'] = data_statistic
# reset flag
cur_q = v['question_id']
data_statistic = []
# push option result to array
data_statistic.push([v['option_text'], parseInt(v['count'])])
return
return
return
]
.factory 'Survey', [
'$http',
'$q',
'$rails',
'Validate',
'Common',
($http, $q, $rails, Validate, Common) ->
# -------------------------
# static variables
$common = new Common
$validate = new Validate
_url =
attempt_survey: $rails.root_url + 'api/v1/api_survey1'
show_survey_result: $rails.root_url + 'api/v1/api_survey2'
Survey = ()->
# private
# public
survey =
attemptSurvey: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.attempt_survey
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
showSurveyResult: (survey)->
deferred = $q.defer()
# $common.showLoading()
url = _url.show_survey_result
$http.post(
url,
survey: survey
).then (response) ->
data = $validate.parseResult response
# $common.hideLoading()
deferred.resolve data
return
return deferred.promise
]
|
[
{
"context": "# 'constrainToCircle' module v1.0\n# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n",
"end": 49,
"score": 0.9999063611030579,
"start": 39,
"tag": "NAME",
"value": "Marc Krenn"
},
{
"context": "oCircle' module v1.0\n# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n# probs to Tisho Georgiev and Brian",
"end": 84,
"score": 0.9999284744262695,
"start": 64,
"tag": "EMAIL",
"value": "marc.krenn@gmail.com"
},
{
"context": "by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n# probs to Tisho Georgiev and Brian M. Scott\n#\n# ",
"end": 98,
"score": 0.9992769956588745,
"start": 87,
"tag": "USERNAME",
"value": "@marc_krenn"
},
{
"context": "15 | marc.krenn@gmail.com | @marc_krenn\n# probs to Tisho Georgiev and Brian M. Scott\n#\n# Add the following line to ",
"end": 124,
"score": 0.9998680353164673,
"start": 110,
"tag": "NAME",
"value": "Tisho Georgiev"
},
{
"context": "il.com | @marc_krenn\n# probs to Tisho Georgiev and Brian M. Scott\n#\n# Add the following line to your project in Fra",
"end": 143,
"score": 0.9999041557312012,
"start": 129,
"tag": "NAME",
"value": "Brian M. Scott"
}
] | module/constrainToCircle.coffee | marckrenn/framer-constrainToCircle | 15 | # 'constrainToCircle' module v1.0
# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn
# probs to Tisho Georgiev and Brian M. Scott
#
# Add the following line to your project in Framer Studio.
# constrainToCircle = require "constrainToCircle"
d = 0.0
vX = 0.0
vY = 0.0
magV = 0.0
aX = 0.0
layerAnimates = false
# Enables dragging and constrains dragging-distance to a defined circle
exports.enable = (layer,circleCenterX,circleCenterY,radius) ->
helper = layer.copy()
helper.name = "circleConstraintHelper"
helper.opacity = 0
helper.index = 1000
helper.draggable.enabled = true
helper.draggable.momentum = false
exports.helper = helper
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.on Events.DragStart, ->
layerAnimates = false
layer.animateStop()
layer.emit Events.DragStart
exports.isDragging = true
helper.on Events.DragMove, ->
layer.emit Events.DragMove
exports.velocityX = helper.draggable.velocity.x
exports.velocityY = helper.draggable.velocity.y
exports.isDragging = true
d = (helper.x - circleCenterX) ** 2 + (helper.y - circleCenterY) ** 2
if d > radius ** 2
vX = helper.x - circleCenterX
vY = helper.y - circleCenterY
magV = Math.sqrt(vX* vX + vY * vY)
aX = circleCenterX + vX / magV * radius
aY = circleCenterY + vY / magV * radius
layer.x = aX
layer.y = aY
else
layer.x = helper.x
layer.y = helper.y
helper.on Events.DragEnd, ->
layer.emit Events.DragEnd
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.x = layer.x
helper.y = layer.y
layer.on Events.AnimationStart, ->
layerAnimates = true
layer.on Events.AnimationEnd, ->
layerAnimates = false
layer.on "change:frame", ->
if layerAnimates == true
helper.x = layer.x
helper.y = layer.y
helper.width = layer.width
helper.height = layer.height
helper.rotation = layer.rotation
helper.index = 1000
# Updates
exports.update = (layer,circleCenterX,circleCenterY,radius) ->
helper.destroy()
exports.enable(layer,circleCenterX,circleCenterY,radius)
# Disables dragging
exports.disable = (layerA) ->
helper.destroy() | 190730 | # 'constrainToCircle' module v1.0
# by <NAME>, Sept. 2015 | <EMAIL> | @marc_krenn
# probs to <NAME> and <NAME>
#
# Add the following line to your project in Framer Studio.
# constrainToCircle = require "constrainToCircle"
d = 0.0
vX = 0.0
vY = 0.0
magV = 0.0
aX = 0.0
layerAnimates = false
# Enables dragging and constrains dragging-distance to a defined circle
exports.enable = (layer,circleCenterX,circleCenterY,radius) ->
helper = layer.copy()
helper.name = "circleConstraintHelper"
helper.opacity = 0
helper.index = 1000
helper.draggable.enabled = true
helper.draggable.momentum = false
exports.helper = helper
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.on Events.DragStart, ->
layerAnimates = false
layer.animateStop()
layer.emit Events.DragStart
exports.isDragging = true
helper.on Events.DragMove, ->
layer.emit Events.DragMove
exports.velocityX = helper.draggable.velocity.x
exports.velocityY = helper.draggable.velocity.y
exports.isDragging = true
d = (helper.x - circleCenterX) ** 2 + (helper.y - circleCenterY) ** 2
if d > radius ** 2
vX = helper.x - circleCenterX
vY = helper.y - circleCenterY
magV = Math.sqrt(vX* vX + vY * vY)
aX = circleCenterX + vX / magV * radius
aY = circleCenterY + vY / magV * radius
layer.x = aX
layer.y = aY
else
layer.x = helper.x
layer.y = helper.y
helper.on Events.DragEnd, ->
layer.emit Events.DragEnd
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.x = layer.x
helper.y = layer.y
layer.on Events.AnimationStart, ->
layerAnimates = true
layer.on Events.AnimationEnd, ->
layerAnimates = false
layer.on "change:frame", ->
if layerAnimates == true
helper.x = layer.x
helper.y = layer.y
helper.width = layer.width
helper.height = layer.height
helper.rotation = layer.rotation
helper.index = 1000
# Updates
exports.update = (layer,circleCenterX,circleCenterY,radius) ->
helper.destroy()
exports.enable(layer,circleCenterX,circleCenterY,radius)
# Disables dragging
exports.disable = (layerA) ->
helper.destroy() | true | # 'constrainToCircle' module v1.0
# by PI:NAME:<NAME>END_PI, Sept. 2015 | PI:EMAIL:<EMAIL>END_PI | @marc_krenn
# probs to PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI
#
# Add the following line to your project in Framer Studio.
# constrainToCircle = require "constrainToCircle"
d = 0.0
vX = 0.0
vY = 0.0
magV = 0.0
aX = 0.0
layerAnimates = false
# Enables dragging and constrains dragging-distance to a defined circle
exports.enable = (layer,circleCenterX,circleCenterY,radius) ->
helper = layer.copy()
helper.name = "circleConstraintHelper"
helper.opacity = 0
helper.index = 1000
helper.draggable.enabled = true
helper.draggable.momentum = false
exports.helper = helper
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.on Events.DragStart, ->
layerAnimates = false
layer.animateStop()
layer.emit Events.DragStart
exports.isDragging = true
helper.on Events.DragMove, ->
layer.emit Events.DragMove
exports.velocityX = helper.draggable.velocity.x
exports.velocityY = helper.draggable.velocity.y
exports.isDragging = true
d = (helper.x - circleCenterX) ** 2 + (helper.y - circleCenterY) ** 2
if d > radius ** 2
vX = helper.x - circleCenterX
vY = helper.y - circleCenterY
magV = Math.sqrt(vX* vX + vY * vY)
aX = circleCenterX + vX / magV * radius
aY = circleCenterY + vY / magV * radius
layer.x = aX
layer.y = aY
else
layer.x = helper.x
layer.y = helper.y
helper.on Events.DragEnd, ->
layer.emit Events.DragEnd
exports.isDragging = false
exports.velocityX = 0
exports.velocityY = 0
helper.x = layer.x
helper.y = layer.y
layer.on Events.AnimationStart, ->
layerAnimates = true
layer.on Events.AnimationEnd, ->
layerAnimates = false
layer.on "change:frame", ->
if layerAnimates == true
helper.x = layer.x
helper.y = layer.y
helper.width = layer.width
helper.height = layer.height
helper.rotation = layer.rotation
helper.index = 1000
# Updates
exports.update = (layer,circleCenterX,circleCenterY,radius) ->
helper.destroy()
exports.enable(layer,circleCenterX,circleCenterY,radius)
# Disables dragging
exports.disable = (layerA) ->
helper.destroy() |
[
{
"context": "ew Rule to flag non-matching identifiers\n# @author Matthieu Larcher\n###\n\n'use strict'\n\n{isDeclarationAssignment} = re",
"end": 85,
"score": 0.9998502731323242,
"start": 69,
"tag": "NAME",
"value": "Matthieu Larcher"
}
] | src/rules/id-match.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to flag non-matching identifiers
# @author Matthieu Larcher
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require identifiers to match a specified regular expression'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/id-match'
schema: [
type: 'string'
,
type: 'object'
properties:
properties:
type: 'boolean'
]
create: (context) ->
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
pattern = context.options[0] or '^.+$'
regexp = new RegExp pattern
options = context.options[1] or {}
properties = !!options.properties
onlyDeclarations = !!options.onlyDeclarations
###*
# Checks if a string matches the provided pattern
# @param {string} name The string to check.
# @returns {boolean} if the string is a match
# @private
###
isInvalid = (name) -> not regexp.test name
###*
# Verifies if we should report an error or not based on the effective
# parent node and the identifier name.
# @param {ASTNode} effectiveParent The effective parent node of the node to be reported
# @param {string} name The identifier name of the identifier node
# @returns {boolean} whether an error should be reported or not
###
shouldReport = (effectiveParent, name) ->
effectiveParent.type isnt 'CallExpression' and
effectiveParent.type isnt 'NewExpression' and
isInvalid name
###*
# Reports an AST node as a rule violation.
# @param {ASTNode} node The node to report.
# @returns {void}
# @private
###
report = (node) ->
context.report {
node
message:
"Identifier '{{name}}' does not match the pattern '{{pattern}}'."
data: {
name: node.name
pattern
}
}
Identifier: (node) ->
{name, parent} = node
effectiveParent =
if parent.type is 'MemberExpression'
parent.parent
else
parent
switch parent.type
when 'MemberExpression'
return unless properties
# Always check object names
if parent.object.type is 'Identifier' and parent.object.name is name
if isInvalid name then report node
# Report AssignmentExpressions only if they are the left side of the assignment
else if (
effectiveParent.type is 'AssignmentExpression' and
(effectiveParent.right.type isnt 'MemberExpression' or
(effectiveParent.left.type is 'MemberExpression' and
effectiveParent.left.property.name is name))
)
if isInvalid name then report node
when 'Property'
return unless properties and parent.key.name is name
if shouldReport effectiveParent, name then report node
when 'ClassDeclaration'
return unless parent.id is node
if shouldReport effectiveParent, name then report node
else
isDeclaration =
effectiveParent.type in [
'FunctionDeclaration'
'VariableDeclarator'
] or isDeclarationAssignment effectiveParent
return if onlyDeclarations and not isDeclaration
if shouldReport effectiveParent, name then report node
| 203478 | ###*
# @fileoverview Rule to flag non-matching identifiers
# @author <NAME>
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require identifiers to match a specified regular expression'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/id-match'
schema: [
type: 'string'
,
type: 'object'
properties:
properties:
type: 'boolean'
]
create: (context) ->
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
pattern = context.options[0] or '^.+$'
regexp = new RegExp pattern
options = context.options[1] or {}
properties = !!options.properties
onlyDeclarations = !!options.onlyDeclarations
###*
# Checks if a string matches the provided pattern
# @param {string} name The string to check.
# @returns {boolean} if the string is a match
# @private
###
isInvalid = (name) -> not regexp.test name
###*
# Verifies if we should report an error or not based on the effective
# parent node and the identifier name.
# @param {ASTNode} effectiveParent The effective parent node of the node to be reported
# @param {string} name The identifier name of the identifier node
# @returns {boolean} whether an error should be reported or not
###
shouldReport = (effectiveParent, name) ->
effectiveParent.type isnt 'CallExpression' and
effectiveParent.type isnt 'NewExpression' and
isInvalid name
###*
# Reports an AST node as a rule violation.
# @param {ASTNode} node The node to report.
# @returns {void}
# @private
###
report = (node) ->
context.report {
node
message:
"Identifier '{{name}}' does not match the pattern '{{pattern}}'."
data: {
name: node.name
pattern
}
}
Identifier: (node) ->
{name, parent} = node
effectiveParent =
if parent.type is 'MemberExpression'
parent.parent
else
parent
switch parent.type
when 'MemberExpression'
return unless properties
# Always check object names
if parent.object.type is 'Identifier' and parent.object.name is name
if isInvalid name then report node
# Report AssignmentExpressions only if they are the left side of the assignment
else if (
effectiveParent.type is 'AssignmentExpression' and
(effectiveParent.right.type isnt 'MemberExpression' or
(effectiveParent.left.type is 'MemberExpression' and
effectiveParent.left.property.name is name))
)
if isInvalid name then report node
when 'Property'
return unless properties and parent.key.name is name
if shouldReport effectiveParent, name then report node
when 'ClassDeclaration'
return unless parent.id is node
if shouldReport effectiveParent, name then report node
else
isDeclaration =
effectiveParent.type in [
'FunctionDeclaration'
'VariableDeclarator'
] or isDeclarationAssignment effectiveParent
return if onlyDeclarations and not isDeclaration
if shouldReport effectiveParent, name then report node
| true | ###*
# @fileoverview Rule to flag non-matching identifiers
# @author PI:NAME:<NAME>END_PI
###
'use strict'
{isDeclarationAssignment} = require '../util/ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'require identifiers to match a specified regular expression'
category: 'Stylistic Issues'
recommended: no
url: 'https://eslint.org/docs/rules/id-match'
schema: [
type: 'string'
,
type: 'object'
properties:
properties:
type: 'boolean'
]
create: (context) ->
#--------------------------------------------------------------------------
# Helpers
#--------------------------------------------------------------------------
pattern = context.options[0] or '^.+$'
regexp = new RegExp pattern
options = context.options[1] or {}
properties = !!options.properties
onlyDeclarations = !!options.onlyDeclarations
###*
# Checks if a string matches the provided pattern
# @param {string} name The string to check.
# @returns {boolean} if the string is a match
# @private
###
isInvalid = (name) -> not regexp.test name
###*
# Verifies if we should report an error or not based on the effective
# parent node and the identifier name.
# @param {ASTNode} effectiveParent The effective parent node of the node to be reported
# @param {string} name The identifier name of the identifier node
# @returns {boolean} whether an error should be reported or not
###
shouldReport = (effectiveParent, name) ->
effectiveParent.type isnt 'CallExpression' and
effectiveParent.type isnt 'NewExpression' and
isInvalid name
###*
# Reports an AST node as a rule violation.
# @param {ASTNode} node The node to report.
# @returns {void}
# @private
###
report = (node) ->
context.report {
node
message:
"Identifier '{{name}}' does not match the pattern '{{pattern}}'."
data: {
name: node.name
pattern
}
}
Identifier: (node) ->
{name, parent} = node
effectiveParent =
if parent.type is 'MemberExpression'
parent.parent
else
parent
switch parent.type
when 'MemberExpression'
return unless properties
# Always check object names
if parent.object.type is 'Identifier' and parent.object.name is name
if isInvalid name then report node
# Report AssignmentExpressions only if they are the left side of the assignment
else if (
effectiveParent.type is 'AssignmentExpression' and
(effectiveParent.right.type isnt 'MemberExpression' or
(effectiveParent.left.type is 'MemberExpression' and
effectiveParent.left.property.name is name))
)
if isInvalid name then report node
when 'Property'
return unless properties and parent.key.name is name
if shouldReport effectiveParent, name then report node
when 'ClassDeclaration'
return unless parent.id is node
if shouldReport effectiveParent, name then report node
else
isDeclaration =
effectiveParent.type in [
'FunctionDeclaration'
'VariableDeclarator'
] or isDeclarationAssignment effectiveParent
return if onlyDeclarations and not isDeclaration
if shouldReport effectiveParent, name then report node
|
[
{
"context": " rich text editing jQuery UI widget\n# (c) 2011 Henri Bergius, IKS Consortium\n# Hallo may be freely distrib",
"end": 79,
"score": 0.9998520016670227,
"start": 66,
"tag": "NAME",
"value": "Henri Bergius"
}
] | src/widgets/dropdownbutton.coffee | git-j/hallo | 0 | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 Henri Bergius, IKS Consortium
# Hallo may be freely distributed under the MIT license
((jQuery) ->
jQuery.widget 'IKS.hallodropdownbutton',
button: null
options:
uuid: ''
label: null
icon: null
editable: null
command: null
target: '' # dropdown
setup: null
cssClass: null
_create: ->
@options.icon ?= "icon-#{@options.command.toLowerCase()}"
_init: ->
target = jQuery @options.target
target.css 'position', 'absolute'
target.addClass 'dropdown-menu'
target.hide()
@button = @_prepareButton() unless @button
@button.bind 'click', =>
jQuery('.misspelled').remove()
if target.hasClass 'open'
@_hideTarget()
return
@_showTarget()
target.bind 'click', =>
@_hideTarget()
@element.append @button
bindShowHandler: (event) ->
@_showTarget(event.target)
bindShow: (selector) ->
event_name = 'click'
jQuery(selector).live event_name, =>
@bindShowHandler()
_showTarget: (select_target) ->
jQuery(".dropdown-form:visible, .dropdown-menu:visible").each (index,item) ->
jQuery(item).trigger('hide')
target = jQuery @options.target
@options.editable.storeContentPosition()
@options.setup(select_target) if @options.setup
@_updateTargetPosition()
target.addClass 'open'
target.show()
_hideTarget: ->
target = jQuery @options.target
if ( target.hasClass('open'))
target.removeClass 'open'
target.hide()
@options.editable.restoreContentPosition()
_updateTargetPosition: ->
target = jQuery @options.target
{top, left} = @button.position()
top += @button.outerHeight()
target.css 'top', top
last_button = target.closest('.hallotoolbar').find('button:last')
last_button_pos =last_button.position().left
last_button_pos+=last_button.width()
if ( left+target.width() > last_button_pos )
target.css 'left', left - target.width()+last_button.width()
else
target.css 'left', left
_prepareButton: ->
id = "#{@options.uuid}-#{@options.command}"
button_str = "<button id=\"#{id}\" data-toggle=\"dropdown\""
button_str+= " class=\"#{@options.command}_button ui-button ui-widget ui-state-default ui-corner-all\""
button_str+= " data-target=\"##{@options.target.attr('id')}\""
button_str+= " title=\"#{@options.label}\" rel=\"#{@options.command}\""
button_str+= "></button>"
buttonEl = jQuery button_str;
buttonEl.addClass @options.cssClass if @options.cssClass
buttonEl.addClass 'btn-large' if @options.editable.options.touchScreen
button = buttonEl.button { "icons": { "primary": "ui-icon-#{@options.command}-p" }, "text": false }
button.addClass @options.cssClass if @options.cssClass
button
)(jQuery)
| 146387 | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 <NAME>, IKS Consortium
# Hallo may be freely distributed under the MIT license
((jQuery) ->
jQuery.widget 'IKS.hallodropdownbutton',
button: null
options:
uuid: ''
label: null
icon: null
editable: null
command: null
target: '' # dropdown
setup: null
cssClass: null
_create: ->
@options.icon ?= "icon-#{@options.command.toLowerCase()}"
_init: ->
target = jQuery @options.target
target.css 'position', 'absolute'
target.addClass 'dropdown-menu'
target.hide()
@button = @_prepareButton() unless @button
@button.bind 'click', =>
jQuery('.misspelled').remove()
if target.hasClass 'open'
@_hideTarget()
return
@_showTarget()
target.bind 'click', =>
@_hideTarget()
@element.append @button
bindShowHandler: (event) ->
@_showTarget(event.target)
bindShow: (selector) ->
event_name = 'click'
jQuery(selector).live event_name, =>
@bindShowHandler()
_showTarget: (select_target) ->
jQuery(".dropdown-form:visible, .dropdown-menu:visible").each (index,item) ->
jQuery(item).trigger('hide')
target = jQuery @options.target
@options.editable.storeContentPosition()
@options.setup(select_target) if @options.setup
@_updateTargetPosition()
target.addClass 'open'
target.show()
_hideTarget: ->
target = jQuery @options.target
if ( target.hasClass('open'))
target.removeClass 'open'
target.hide()
@options.editable.restoreContentPosition()
_updateTargetPosition: ->
target = jQuery @options.target
{top, left} = @button.position()
top += @button.outerHeight()
target.css 'top', top
last_button = target.closest('.hallotoolbar').find('button:last')
last_button_pos =last_button.position().left
last_button_pos+=last_button.width()
if ( left+target.width() > last_button_pos )
target.css 'left', left - target.width()+last_button.width()
else
target.css 'left', left
_prepareButton: ->
id = "#{@options.uuid}-#{@options.command}"
button_str = "<button id=\"#{id}\" data-toggle=\"dropdown\""
button_str+= " class=\"#{@options.command}_button ui-button ui-widget ui-state-default ui-corner-all\""
button_str+= " data-target=\"##{@options.target.attr('id')}\""
button_str+= " title=\"#{@options.label}\" rel=\"#{@options.command}\""
button_str+= "></button>"
buttonEl = jQuery button_str;
buttonEl.addClass @options.cssClass if @options.cssClass
buttonEl.addClass 'btn-large' if @options.editable.options.touchScreen
button = buttonEl.button { "icons": { "primary": "ui-icon-#{@options.command}-p" }, "text": false }
button.addClass @options.cssClass if @options.cssClass
button
)(jQuery)
| true | # Hallo - a rich text editing jQuery UI widget
# (c) 2011 PI:NAME:<NAME>END_PI, IKS Consortium
# Hallo may be freely distributed under the MIT license
((jQuery) ->
jQuery.widget 'IKS.hallodropdownbutton',
button: null
options:
uuid: ''
label: null
icon: null
editable: null
command: null
target: '' # dropdown
setup: null
cssClass: null
_create: ->
@options.icon ?= "icon-#{@options.command.toLowerCase()}"
_init: ->
target = jQuery @options.target
target.css 'position', 'absolute'
target.addClass 'dropdown-menu'
target.hide()
@button = @_prepareButton() unless @button
@button.bind 'click', =>
jQuery('.misspelled').remove()
if target.hasClass 'open'
@_hideTarget()
return
@_showTarget()
target.bind 'click', =>
@_hideTarget()
@element.append @button
bindShowHandler: (event) ->
@_showTarget(event.target)
bindShow: (selector) ->
event_name = 'click'
jQuery(selector).live event_name, =>
@bindShowHandler()
_showTarget: (select_target) ->
jQuery(".dropdown-form:visible, .dropdown-menu:visible").each (index,item) ->
jQuery(item).trigger('hide')
target = jQuery @options.target
@options.editable.storeContentPosition()
@options.setup(select_target) if @options.setup
@_updateTargetPosition()
target.addClass 'open'
target.show()
_hideTarget: ->
target = jQuery @options.target
if ( target.hasClass('open'))
target.removeClass 'open'
target.hide()
@options.editable.restoreContentPosition()
_updateTargetPosition: ->
target = jQuery @options.target
{top, left} = @button.position()
top += @button.outerHeight()
target.css 'top', top
last_button = target.closest('.hallotoolbar').find('button:last')
last_button_pos =last_button.position().left
last_button_pos+=last_button.width()
if ( left+target.width() > last_button_pos )
target.css 'left', left - target.width()+last_button.width()
else
target.css 'left', left
_prepareButton: ->
id = "#{@options.uuid}-#{@options.command}"
button_str = "<button id=\"#{id}\" data-toggle=\"dropdown\""
button_str+= " class=\"#{@options.command}_button ui-button ui-widget ui-state-default ui-corner-all\""
button_str+= " data-target=\"##{@options.target.attr('id')}\""
button_str+= " title=\"#{@options.label}\" rel=\"#{@options.command}\""
button_str+= "></button>"
buttonEl = jQuery button_str;
buttonEl.addClass @options.cssClass if @options.cssClass
buttonEl.addClass 'btn-large' if @options.editable.options.touchScreen
button = buttonEl.button { "icons": { "primary": "ui-icon-#{@options.command}-p" }, "text": false }
button.addClass @options.cssClass if @options.cssClass
button
)(jQuery)
|
[
{
"context": "() * notebooks.length)]\nuser = {id: 1, username: \"Livoras\", email: \"me@livoras.com\"}\n\nmodule.exports = {not",
"end": 440,
"score": 0.9994909763336182,
"start": 433,
"tag": "USERNAME",
"value": "Livoras"
},
{
"context": "gth)]\nuser = {id: 1, username: \"Livoras\", email: \"me@livoras.com\"}\n\nmodule.exports = {notebooks, activeNotebook, u",
"end": 465,
"score": 0.9999251365661621,
"start": 451,
"tag": "EMAIL",
"value": "me@livoras.com"
}
] | src/components/feifanote/mocks.coffee | livoras/feifanote | 1 | makeNote = (name)->
{
name: name
activePageIndex: Math.floor(Math.random() * 30)
pages: (makePage() for i in [1..30])
}
makePage = ->
{content: "#{Math.random()}"}
names = ['Math', 'English', 'Database', 'Python', 'Jerry', 'JavaScript', 'NodeJS', 'PHP', 'Livoras']
notebooks = (makeNote(name) for name in names)
activeNotebook = notebooks[Math.floor(Math.random() * notebooks.length)]
user = {id: 1, username: "Livoras", email: "me@livoras.com"}
module.exports = {notebooks, activeNotebook, user}
| 148136 | makeNote = (name)->
{
name: name
activePageIndex: Math.floor(Math.random() * 30)
pages: (makePage() for i in [1..30])
}
makePage = ->
{content: "#{Math.random()}"}
names = ['Math', 'English', 'Database', 'Python', 'Jerry', 'JavaScript', 'NodeJS', 'PHP', 'Livoras']
notebooks = (makeNote(name) for name in names)
activeNotebook = notebooks[Math.floor(Math.random() * notebooks.length)]
user = {id: 1, username: "Livoras", email: "<EMAIL>"}
module.exports = {notebooks, activeNotebook, user}
| true | makeNote = (name)->
{
name: name
activePageIndex: Math.floor(Math.random() * 30)
pages: (makePage() for i in [1..30])
}
makePage = ->
{content: "#{Math.random()}"}
names = ['Math', 'English', 'Database', 'Python', 'Jerry', 'JavaScript', 'NodeJS', 'PHP', 'Livoras']
notebooks = (makeNote(name) for name in names)
activeNotebook = notebooks[Math.floor(Math.random() * notebooks.length)]
user = {id: 1, username: "Livoras", email: "PI:EMAIL:<EMAIL>END_PI"}
module.exports = {notebooks, activeNotebook, user}
|
[
{
"context": "ersion 1.1.0\n@file I18n.js\n@author Welington Sampaio (http://welington.zaez.net/)\n@contact http://",
"end": 137,
"score": 0.9998999834060669,
"start": 120,
"tag": "NAME",
"value": "Welington Sampaio"
}
] | vendor/assets/javascripts/joker/I18n.coffee | zaeznet/joker-rails | 0 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.1.0
@file I18n.js
@author Welington Sampaio (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.I18n
###
Guarda todas as variaveis e objetos de
traducao, vindos do rails
@type Object
###
@translations: new Object
###
Configura o objeto
###
@getInstance: ->
Joker.I18n.t = Joker.I18n.translate
Joker.t = Joker.I18n.translate
###
Metodo que dispara as traducoes e
preenche as variaveis.
@example
# objeto de traducao
{
controller: { name: "I'm {variable}" }
}
Joker.I18n.translate('controller.name', { variable: "Joker" }) # I'm Joker
@throws Caso nao exista a chave de traducao solicitada
@param key String
chave a ser traduzida, encadear chaves com ponto '.'
@param assigns Object
objetos a serem assinados na string de traducao
@return String
###
@translate: (key, assigns={})->
_t9n = Joker.I18n.translations
key.split('.').each (value)->
_t9n = _t9n[value] if Object.has(_t9n, value)
throw "Key not found in translations: #{key}" unless Object.isString(_t9n) or Object.isArray(_t9n)
_t9n = _t9n.assign assigns if Object.isString(_t9n)
_t9n
| 166574 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.1.0
@file I18n.js
@author <NAME> (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.I18n
###
Guarda todas as variaveis e objetos de
traducao, vindos do rails
@type Object
###
@translations: new Object
###
Configura o objeto
###
@getInstance: ->
Joker.I18n.t = Joker.I18n.translate
Joker.t = Joker.I18n.translate
###
Metodo que dispara as traducoes e
preenche as variaveis.
@example
# objeto de traducao
{
controller: { name: "I'm {variable}" }
}
Joker.I18n.translate('controller.name', { variable: "Joker" }) # I'm Joker
@throws Caso nao exista a chave de traducao solicitada
@param key String
chave a ser traduzida, encadear chaves com ponto '.'
@param assigns Object
objetos a serem assinados na string de traducao
@return String
###
@translate: (key, assigns={})->
_t9n = Joker.I18n.translations
key.split('.').each (value)->
_t9n = _t9n[value] if Object.has(_t9n, value)
throw "Key not found in translations: #{key}" unless Object.isString(_t9n) or Object.isArray(_t9n)
_t9n = _t9n.assign assigns if Object.isString(_t9n)
_t9n
| true | ###
@summary Joker
@description Framework of RIAs applications
@version 1.1.0
@file I18n.js
@author PI:NAME:<NAME>END_PI (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.I18n
###
Guarda todas as variaveis e objetos de
traducao, vindos do rails
@type Object
###
@translations: new Object
###
Configura o objeto
###
@getInstance: ->
Joker.I18n.t = Joker.I18n.translate
Joker.t = Joker.I18n.translate
###
Metodo que dispara as traducoes e
preenche as variaveis.
@example
# objeto de traducao
{
controller: { name: "I'm {variable}" }
}
Joker.I18n.translate('controller.name', { variable: "Joker" }) # I'm Joker
@throws Caso nao exista a chave de traducao solicitada
@param key String
chave a ser traduzida, encadear chaves com ponto '.'
@param assigns Object
objetos a serem assinados na string de traducao
@return String
###
@translate: (key, assigns={})->
_t9n = Joker.I18n.translations
key.split('.').each (value)->
_t9n = _t9n[value] if Object.has(_t9n, value)
throw "Key not found in translations: #{key}" unless Object.isString(_t9n) or Object.isArray(_t9n)
_t9n = _t9n.assign assigns if Object.isString(_t9n)
_t9n
|
[
{
"context": "d clone array of objects\", ->\n test = [{name: 'Anzahl', anzahl: '1734'}]\n result = clone test\n ",
"end": 1489,
"score": 0.7110142707824707,
"start": 1487,
"tag": "NAME",
"value": "An"
}
] | test/mocha/clone.coffee | alinex/node-util | 0 | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
{clone} = require '../../src/index'
describe "Clone", ->
it "should copy null", ->
test = null
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone object", ->
test = {eins: 1}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone string", ->
test = "eins"
result = clone test
expect(result, "deep check").to.deep.equal test
it "should clone array", ->
test = [1, 2, 3]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone array of arrays", ->
test = [[1, [2], 3]]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result[0], "reference.list").to.not.equal test[0]
expect(result[0][1], "reference.list").to.not.equal test[0][1]
it "should clone object of arrays", ->
test = {a: [1, [2], 3]}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result.a, "reference.object").to.not.equal test.a
expect(result.a[1], "reference.list").to.not.equal test.a[1]
it "should clone array of objects", ->
test = [{name: 'Anzahl', anzahl: '1734'}]
result = clone test
expect(result, "deep check").to.deep.equal test
test[0].anzahl = 9999
expect(result, "reference").to.not.equal test
expect(result[0], "reference.object").to.not.equal test[0]
expect(result[0].anzahl, "reference.list").to.not.equal test[0].anzahl
it "should clone date", ->
test = new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone regexp", ->
test = /ab/gim
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should copy instance reference", ->
test = new Error "Test error"
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone complex structure", ->
test =
eins: 1
zwei: [2]
drei: 'drei'
vier:
array: [1, 2, 3]
error: new Error 'Test error'
regexp: /ab/
date: new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone circular references", ->
test =
eins: 1
zwei: [2]
test.drei = test.zwei
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
result.drei[0] = 3
expect(result.drei, "reference kept").to.equal result.zwei
it "should clone defined depth", ->
test =
one:
value: 1
two:
value: 1
three:
value : 1
result = clone test, 3
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
test.one.value = 1
test.one.two.value = 2
test.one.two.three.value = 3
expect(result.one.two, "reference").to.not.equal test.one.two
expect(result.one.two.three, "reference kept").to.equal test.one.two.three
| 209911 | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
{clone} = require '../../src/index'
describe "Clone", ->
it "should copy null", ->
test = null
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone object", ->
test = {eins: 1}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone string", ->
test = "eins"
result = clone test
expect(result, "deep check").to.deep.equal test
it "should clone array", ->
test = [1, 2, 3]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone array of arrays", ->
test = [[1, [2], 3]]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result[0], "reference.list").to.not.equal test[0]
expect(result[0][1], "reference.list").to.not.equal test[0][1]
it "should clone object of arrays", ->
test = {a: [1, [2], 3]}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result.a, "reference.object").to.not.equal test.a
expect(result.a[1], "reference.list").to.not.equal test.a[1]
it "should clone array of objects", ->
test = [{name: '<NAME>zahl', anzahl: '1734'}]
result = clone test
expect(result, "deep check").to.deep.equal test
test[0].anzahl = 9999
expect(result, "reference").to.not.equal test
expect(result[0], "reference.object").to.not.equal test[0]
expect(result[0].anzahl, "reference.list").to.not.equal test[0].anzahl
it "should clone date", ->
test = new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone regexp", ->
test = /ab/gim
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should copy instance reference", ->
test = new Error "Test error"
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone complex structure", ->
test =
eins: 1
zwei: [2]
drei: 'drei'
vier:
array: [1, 2, 3]
error: new Error 'Test error'
regexp: /ab/
date: new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone circular references", ->
test =
eins: 1
zwei: [2]
test.drei = test.zwei
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
result.drei[0] = 3
expect(result.drei, "reference kept").to.equal result.zwei
it "should clone defined depth", ->
test =
one:
value: 1
two:
value: 1
three:
value : 1
result = clone test, 3
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
test.one.value = 1
test.one.two.value = 2
test.one.two.three.value = 3
expect(result.one.two, "reference").to.not.equal test.one.two
expect(result.one.two.three, "reference kept").to.equal test.one.two.three
| true | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
{clone} = require '../../src/index'
describe "Clone", ->
it "should copy null", ->
test = null
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone object", ->
test = {eins: 1}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone string", ->
test = "eins"
result = clone test
expect(result, "deep check").to.deep.equal test
it "should clone array", ->
test = [1, 2, 3]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone array of arrays", ->
test = [[1, [2], 3]]
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result[0], "reference.list").to.not.equal test[0]
expect(result[0][1], "reference.list").to.not.equal test[0][1]
it "should clone object of arrays", ->
test = {a: [1, [2], 3]}
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
expect(result.a, "reference.object").to.not.equal test.a
expect(result.a[1], "reference.list").to.not.equal test.a[1]
it "should clone array of objects", ->
test = [{name: 'PI:NAME:<NAME>END_PIzahl', anzahl: '1734'}]
result = clone test
expect(result, "deep check").to.deep.equal test
test[0].anzahl = 9999
expect(result, "reference").to.not.equal test
expect(result[0], "reference.object").to.not.equal test[0]
expect(result[0].anzahl, "reference.list").to.not.equal test[0].anzahl
it "should clone date", ->
test = new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone regexp", ->
test = /ab/gim
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should copy instance reference", ->
test = new Error "Test error"
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.equal test
it "should clone complex structure", ->
test =
eins: 1
zwei: [2]
drei: 'drei'
vier:
array: [1, 2, 3]
error: new Error 'Test error'
regexp: /ab/
date: new Date()
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
it "should clone circular references", ->
test =
eins: 1
zwei: [2]
test.drei = test.zwei
result = clone test
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
result.drei[0] = 3
expect(result.drei, "reference kept").to.equal result.zwei
it "should clone defined depth", ->
test =
one:
value: 1
two:
value: 1
three:
value : 1
result = clone test, 3
expect(result, "deep check").to.deep.equal test
expect(result, "reference").to.not.equal test
test.one.value = 1
test.one.two.value = 2
test.one.two.three.value = 3
expect(result.one.two, "reference").to.not.equal test.one.two
expect(result.one.two.three, "reference kept").to.equal test.one.two.three
|
[
{
"context": " data:\n email: email\n password: password\n success: (data) ->\n # Set cookie.\n ",
"end": 322,
"score": 0.9989063143730164,
"start": 314,
"tag": "PASSWORD",
"value": "password"
}
] | app/assets/javascripts/lib/session.js.coffee | domenicosolazzo/hummingbird | 0 | Hummingbird.Session =
getAuthToken: ->
parts = document.cookie.split("auth_token=")
if parts.length == 2
parts.pop().split(';').shift()
signInWithOptionalRedirect: (email, password, redirect) ->
$.ajax "/api/v2/sign-in",
type: "POST"
data:
email: email
password: password
success: (data) ->
# Set cookie.
cookieString = "auth_token=" + data["auth_token"]
unless window.location.host == "localhost:3000"
cookieString += ";domain=." + window.location.host
cookieString += ";max-age=" + 60*60*60*24*365
cookieString += ";path=/"
document.cookie = cookieString
# Redirect to previous URL.
if redirect
window.location.href = window.lastVisitedURL
error: (jqXHR, textStatus, errorThrown) ->
# TODO handle error.
alert(errorThrown)
signIn: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, true)
signInWithoutRedirect: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, false)
signOut: ->
ic.ajax
url: "/sign-out"
type: "POST"
.then ->
window.location.href = window.location.href
| 23306 | Hummingbird.Session =
getAuthToken: ->
parts = document.cookie.split("auth_token=")
if parts.length == 2
parts.pop().split(';').shift()
signInWithOptionalRedirect: (email, password, redirect) ->
$.ajax "/api/v2/sign-in",
type: "POST"
data:
email: email
password: <PASSWORD>
success: (data) ->
# Set cookie.
cookieString = "auth_token=" + data["auth_token"]
unless window.location.host == "localhost:3000"
cookieString += ";domain=." + window.location.host
cookieString += ";max-age=" + 60*60*60*24*365
cookieString += ";path=/"
document.cookie = cookieString
# Redirect to previous URL.
if redirect
window.location.href = window.lastVisitedURL
error: (jqXHR, textStatus, errorThrown) ->
# TODO handle error.
alert(errorThrown)
signIn: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, true)
signInWithoutRedirect: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, false)
signOut: ->
ic.ajax
url: "/sign-out"
type: "POST"
.then ->
window.location.href = window.location.href
| true | Hummingbird.Session =
getAuthToken: ->
parts = document.cookie.split("auth_token=")
if parts.length == 2
parts.pop().split(';').shift()
signInWithOptionalRedirect: (email, password, redirect) ->
$.ajax "/api/v2/sign-in",
type: "POST"
data:
email: email
password: PI:PASSWORD:<PASSWORD>END_PI
success: (data) ->
# Set cookie.
cookieString = "auth_token=" + data["auth_token"]
unless window.location.host == "localhost:3000"
cookieString += ";domain=." + window.location.host
cookieString += ";max-age=" + 60*60*60*24*365
cookieString += ";path=/"
document.cookie = cookieString
# Redirect to previous URL.
if redirect
window.location.href = window.lastVisitedURL
error: (jqXHR, textStatus, errorThrown) ->
# TODO handle error.
alert(errorThrown)
signIn: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, true)
signInWithoutRedirect: (email, password) -> Hummingbird.Session.signInWithOptionalRedirect(email, password, false)
signOut: ->
ic.ajax
url: "/sign-out"
type: "POST"
.then ->
window.location.href = window.location.href
|
[
{
"context": "###\njQuery Open Carousel\n\nCopyright (c) 2013 Justin McCandless (justinmccandless.com)\n\nPermission is hereby gran",
"end": 62,
"score": 0.9998595118522644,
"start": 45,
"tag": "NAME",
"value": "Justin McCandless"
},
{
"context": "n Carousel\n\nCopyright (c) 2013 Justin McCandless (justinmccandless.com)\n\nPermission is hereby granted, free of charge, t",
"end": 84,
"score": 0.9500972032546997,
"start": 64,
"tag": "EMAIL",
"value": "justinmccandless.com"
},
{
"context": "rl/javascript-fy.html and https://gist.github.com/ddgromit/859699\n arrayShuffle = (arr) ->\n i = ar",
"end": 19831,
"score": 0.9996282458305359,
"start": 19823,
"tag": "USERNAME",
"value": "ddgromit"
}
] | src/jquery.openCarousel.coffee | Captive-Studio/jQuery-Open-Carousel | 0 | ###
jQuery Open Carousel
Copyright (c) 2013 Justin McCandless (justinmccandless.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
###
This is the main coffeescript file
Include jquery.openCarousel.js and jquery.openCarousel.css in your projects
###
# Create the Ocarousel class in the global namespace
class window.Ocarousel
### Initialize ###
ocarousel: null
ocarousel_window: null
ocarousel_container: null
indicators_container: null
frames: null
framesPre: null
framesPost: null
indicators: null
timer: null
active: 0
### Public callback ###
onSlideChanged: null
### Default Settings ###
@settings:
speed: .5 * 1000 # how long each transition is, milliseconds
period: 4 * 1000 # time between frame changes, milliseconds (Infinity will prevent autoscrolling)
transition: "scroll" # type of transition animation
perscroll: 1 # number of slides to pass over for each scroll
wrapearly: 0 # scroll to the beginning when reaching this many slides before the end
shuffle: false # setting to true will randomize the order of slides, false will keep the order given in html
indicator_fill: "#ffffff" # inactive fill color of indicator circles
indicator_r: 6 # radius of indicator circles
indicator_spacing: 6 # spacing between indicators
indicator_cy: 20 # y position of indicator circles
indicator_stroke: "#afafaf" # stroke color of indicator cirlces
indicator_strokewidth: "2" # stroke width of indicator circles
fullscreen: false # dynamically sets width of slides to width of screen
vertical: false # positions and scrolls slides vertically instead of horizontally
cycle: false # scrolled slides are appended to the end of the container to create a continuous carousel
constructor: (ocarousel) ->
me = @
# Get ocarousel divs
@ocarousel = $(ocarousel)
@ocarousel_window = $(@ocarousel).find(".ocarousel_window")
@frames = $(@ocarousel_window).children().clone()
@indicators_container = $(@ocarousel).find(".ocarousel_indicators")
@pagination_current = $(@ocarousel).find(".ocarousel_pagination_current")
@pagination_total = $(@ocarousel).find(".ocarousel_pagination_total")
# If there are 0 or 1 frames, then the carousel should not do anything!
if @frames.length > 1
# Get dynamic settings from data attributes
@settings = {}
@settings.speed = $(@ocarousel).data('ocarousel-speed') ? Ocarousel.settings.speed
@settings.period = $(@ocarousel).data('ocarousel-period') ? Ocarousel.settings.period
@settings.transition = $(@ocarousel).data('ocarousel-transition') ? Ocarousel.settings.transition
@settings.perscroll = $(@ocarousel).data('ocarousel-perscroll') ? Ocarousel.settings.perscroll
@settings.wrapearly = $(@ocarousel).data('ocarousel-wrapearly') ? Ocarousel.settings.wrapearly
@settings.shuffle = $(@ocarousel).data('ocarousel-shuffle') ? Ocarousel.settings.shuffle
@settings.indicator_fill = $(@ocarousel).data('ocarousel-indicator-fill') ? Ocarousel.settings.indicator_fill
@settings.indicator_r = $(@ocarousel).data('ocarousel-indicator-r') ? Ocarousel.settings.indicator_r
@settings.indicator_spacing = $(@ocarousel).data('ocarousel-indicator-spacing') ? Ocarousel.settings.indicator_spacing
@settings.indicator_cy = $(@ocarousel).data('ocarousel-indicator-cy') ? Ocarousel.settings.indicator_cy
@settings.indicator_stroke = $(@ocarousel).data('ocarousel-indicator-stroke') ? Ocarousel.settings.indicator_stroke
@settings.indicator_strokewidth = $(@ocarousel).data('ocarousel-indicator-strokewidth') ? Ocarousel.settings.indicator_strokewidth
@settings.fullscreen = $(@ocarousel).data('ocarousel-fullscreen') ? Ocarousel.settings.fullscreen
@settings.vertical = $(@ocarousel).data('ocarousel-vertical') ? Ocarousel.settings.vertical
@settings.cycle = $(@ocarousel).data('ocarousel-cycle') ? Ocarousel.settings.cycle
# Set up for cycle if needed
if @settings.cycle
# We need 3 copies of the children
children = $(@ocarousel_window).children().clone()
children2 = $(@ocarousel_window).children().clone()
$(@ocarousel_window).append(children)
$(@ocarousel_window).append(children2)
@frames = $(@ocarousel_window).children()
# Start at the middle copy of children
@active = @frames.length / 3
# Add the container for the slides
@ocarousel_container = document.createElement("div")
@ocarousel_container.className = "ocarousel_window_slides"
# Let everything be visible
$(@ocarousel).show()
# Stop the scroll timer
@timerStop()
# Remove the old frames from their original location outside of the container
@ocarousel_window.html("")
# Insert our container with all of the frames into the DOM
$(@ocarousel_window).get(0).appendChild(@ocarousel_container)
# Render the frames and supporting elements from data into the DOM
@render()
# Make sure the container is scrolled to the correct position
@setContainerPos()
# Start the scroll timer
@timerStart()
### Remove and reset everything in the DOM ###
render: () ->
# Shuffle the frames if shuffle is configured
if @settings.shuffle and @settings.shuffle != "false"
@frames = arrayShuffle(@frames)
# Clear the frames in the DOM and then inserts all frame from data into the DOM
$(@ocarousel_container).html("")
me = @
$(@frames).each (i) ->
# Dynamically set the width of the frames if fullscreen enabled
if me.settings.fullscreen and me.settings.fullscreen != "false"
$(this).css("width", $(window).width())
# Set slides to be vertical if vertical enabled
if me.settings.vertical
$(this).addClass("ocarousel_window_slides_vertical")
# Insert the frame
$(me.ocarousel_container).append(this)
# Render indicators if the user provided a div
if @indicators_container.length
# Clear the container
$(@indicators_container).html("")
# Setup the svg itself if supported
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicators_parent = document.createElementNS("http://www.w3.org/2000/svg", "svg")
indicators_parent.setAttribute("version", "1.1")
$(@indicators_container).get(0).appendChild(indicators_parent)
# If SVG isn't supported, we'll insert images directly into the container div
else
indicators_parent = $(@indicators_container).get(0)
# Reset and setup the circle indicators
@indicators = []
length = @frames.length
start = 0
end = @frames.length - 1
if @settings.cycle
start = @frames.length / 3
end = 2 * @frames.length / 3 - 1
length = @frames.length / 3
cx = $(@indicators_container).width() / 2 - @settings.indicator_r * length - @settings.indicator_spacing * length / 2
for i in [start..end]
# Create an indicator as SVG if supported
link = if !@settings.cycle then i else i % (@frames.length / 3)
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicator = document.createElementNS("http://www.w3.org/2000/svg", "circle")
indicator.className = "ocarousel_link"
indicator.setAttribute("data-ocarousel-link", link)
indicator.setAttribute("cx", cx)
indicator.setAttribute("cy", @settings.indicator_cy)
indicator.setAttribute("r", @settings.indicator_r)
indicator.setAttribute("stroke", @settings.indicator_stroke)
indicator.setAttribute("stroke-width", @settings.indicator_strokewidth)
indicator.setAttribute("fill", if i is @active then @settings.indicator_stroke else @settings.indicator_fill)
# Otherwise create an indicator image
else
indicator = document.createElement("div")
indicator.className = "ocarousel_link ocarousel_indicator ocarousel_indicator_" + if i is @active then "active" else "inactive"
indicator.setAttribute("data-ocarousel-link", link)
# Append it to the DOM and our array
indicators_parent.appendChild(indicator)
@indicators.push(indicator)
# Set its index as a data setAttribute
$(indicator).data("ocarousel_index", i)
# Setup the next cx
cx = cx + @settings.indicator_r * 2 + @settings.indicator_spacing
# Setup the pagination current page
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Setup the pagination total pages
if @pagination_total.length
$(@pagination_total).html(@frames.length)
# Reset and set the click event for scroll links
$(@ocarousel).find("[data-ocarousel-link]").unbind("click")
$(@ocarousel).find("[data-ocarousel-link]").bind "click", (event) ->
event.preventDefault()
goHere = $(this).data("ocarousel-link")
if goHere?
if goHere == "left" || goHere == "Left" || goHere == "l" || goHere == "L"
goHere = me.getPrev()
else if goHere == "right" || goHere == "Right" || goHere == "r" || goHere == "R"
goHere = me.getNext()
else if goHere == "first" || goHere == "First" || goHere == "beginning" || goHere == "Beginning"
goHere = me.getFirst()
else if goHere == "last" || goHere == "Last" || goHere == "end" || goHere == "End"
goHere = me.getLast()
else if me.settings.cycle
goHere = goHere + me.frames.length / 3
me.scrollTo goHere
# Set the screen resize event if fullscreen
if @settings.fullscreen
$(window).one "resize", () ->
me.render()
### Animate a transition to the given position ###
scrollTo: (index, instant = false) ->
me = @
if index?
@timerStop()
# Wrap to beginning/end if necessary
if index >= (@frames.length - @settings.wrapearly)
index = 0
else if index >= (@frames.length - @settings.perscroll)
index = @frames.length - @settings.perscroll
else if index < 0
perEnd = @frames.length - @settings.perscroll
wrapEnd = @frames.length - 1 - @settings.wrapearly
index = Math.min(perEnd, wrapEnd)
# If we're in cycle mode
if @settings.cycle
# If we're less than the middle set, move the end set to the beginning
if index < @frames.length / 3
for i in [@frames.length - 1..2 * (@frames.length / 3)]
$(@frames[i]).remove()
$(@ocarousel_container).prepend(@frames[i])
@active = @active + @frames.length / 3
index = index + @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# If we're greater than the end set, move the beginning set to the end
if index >= (@frames.length / 3) * 2
for i in [0..@frames.length / 3 - 1]
$(@frames[i]).remove()
$(@ocarousel_container).append(@frames[i])
@active = @active - @frames.length / 3
index = index - @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# Move the slides
$(@ocarousel_container).stop()
nextPos = @getPos index
# No animation
if instant
@setContainerPos(nextPos)
# Fade animation
else if @settings.transition == "fade"
if @settings.vertical
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({top: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
else
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({right: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
# Slide animation
else
if @settings.vertical
$(@ocarousel_container).animate {bottom: nextPos + "px"}, @settings.speed
else
$(@ocarousel_container).animate {right: nextPos + "px"}, @settings.speed
# Update the indicators if they exist
if @indicators?
indOld = @active
indNew = index
if @settings.cycle
indOld = indOld % (@frames.length / 3)
indNew = indNew % (@frames.length / 3)
# If SVG
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
$(@indicators[indOld]).attr "fill", @settings.indicator_fill
$(@indicators[indNew]).attr "fill", @settings.indicator_stroke
# Otherwise images
else
$(@indicators[indOld]).removeClass("ocarousel_indicator_active")
$(@indicators[indOld]).addClass("ocarousel_indicator_inactive")
$(@indicators[indNew]).addClass("ocarousel_indicator_active")
$(@indicators[indNew]).removeClass("ocarousel_indicator_inactive")
# Update the active variable
indexOld = @active
@active = index
# Update the current pagination number if it exists
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Resume the scroll timer
@timerStart()
# Call the provided callback if it exists
if typeof @onSlideChanged is 'function'
@onSlideChanged(index, indexOld)
### Returns the distance of a frame from the beginning edge of its container ###
getPos: (index) ->
if @settings.vertical
return $(@frames[index]).position().top
else
return $(@frames[index]).position().left
### Returns the index of the next slide that should be shown ###
getNext: () ->
next = @active + @settings.perscroll
if next > (@frames.length - @settings.perscroll) && next < @frames.length
next = @frames.length - @settings.perscroll
# If the choosen frame is hidden, choose the next visible one
count = @frames.length
while count && !$(@frames[next]).is(":visible")
next++
if next > @frames.length - 1
next = 0
count--
return next
### Returns the index of the next slide that should be shown before the current position ###
getPrev: () ->
prev = @active - @settings.perscroll
if prev < 0 && @active != 0
prev = 0
# If the chosen frame is hidden, choose the previous visible one
count = @frames.length
while count && !$(@frames[prev]).is(":visible")
prev--
if prev < 0
prev = @frames.length - 1
count--
return prev
### Returns the index of the last slide ###
getLast: () ->
if @settings.cycle
return 2 * @frames.length / 3 - 1
else
return @frames.length - 1
### Returns the index of the last slide ###
getFirst: () ->
if @settings.cycle
return @frames.length
else
return 0
### Starts or resumes the scroll timer ###
timerStart: () ->
me = @
if @settings.period != Infinity
@timer = setInterval (() -> me.scrollTo (me.getNext())), @settings.period
### Stops the scroll timer ###
timerStop: () ->
if @timer?
clearInterval @timer
@timer = null
### Starts the timer if it is stopped, stops the timer if it is running ###
timerToggle: () ->
if @timer?
@timerStop()
else
@timerStart()
# Move ocarousel_container to the current active div instantly
setContainerPos: (pos = @getPos(@active)) ->
if @settings.vertical
$(@ocarousel_container).animate({bottom: pos + "px"}, 0)
else
$(@ocarousel_container).animate({right: pos + "px"}, 0)
### Removes a frame, keeping the carousel in an intuitive position afterwards ###
remove: (index) ->
if index > 0 and index < (@frames.length - 1)
# Remove from data and rerender
@frames.splice(index,1)
@render()
# If the carousel is ahead of the frame being removed, prevent it from jumping forward
if @active > index
@scrollTo(@active - 1, true)
### Adds a frame, keeping the carousel in an intuitive position afterwards ###
add: (elt, index) ->
if index > 0 and index < (@frames.length - 1)
@frames.splice(index, 0, elt)
@render()
# If the carousel is ahead of or at the frame being added, prevent it from jumping backward
if @active >= index
@scrollTo(@active + 1, true)
# Randomizes the order of elements in the passed in array in place.
# Adapted from http://sedition.com/perl/javascript-fy.html and https://gist.github.com/ddgromit/859699
arrayShuffle = (arr) ->
i = arr.length
if i == 0 then return false
while --i
j = Math.floor(Math.random() * (i+1))
tempi = arr[i]
tempj = arr[j]
arr[i] = tempj
arr[j] = tempi
return arr
$(document).ready ->
$(".ocarousel").each ->
new Ocarousel(this)
| 31924 | ###
jQuery Open Carousel
Copyright (c) 2013 <NAME> (<EMAIL>)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
###
This is the main coffeescript file
Include jquery.openCarousel.js and jquery.openCarousel.css in your projects
###
# Create the Ocarousel class in the global namespace
class window.Ocarousel
### Initialize ###
ocarousel: null
ocarousel_window: null
ocarousel_container: null
indicators_container: null
frames: null
framesPre: null
framesPost: null
indicators: null
timer: null
active: 0
### Public callback ###
onSlideChanged: null
### Default Settings ###
@settings:
speed: .5 * 1000 # how long each transition is, milliseconds
period: 4 * 1000 # time between frame changes, milliseconds (Infinity will prevent autoscrolling)
transition: "scroll" # type of transition animation
perscroll: 1 # number of slides to pass over for each scroll
wrapearly: 0 # scroll to the beginning when reaching this many slides before the end
shuffle: false # setting to true will randomize the order of slides, false will keep the order given in html
indicator_fill: "#ffffff" # inactive fill color of indicator circles
indicator_r: 6 # radius of indicator circles
indicator_spacing: 6 # spacing between indicators
indicator_cy: 20 # y position of indicator circles
indicator_stroke: "#afafaf" # stroke color of indicator cirlces
indicator_strokewidth: "2" # stroke width of indicator circles
fullscreen: false # dynamically sets width of slides to width of screen
vertical: false # positions and scrolls slides vertically instead of horizontally
cycle: false # scrolled slides are appended to the end of the container to create a continuous carousel
constructor: (ocarousel) ->
me = @
# Get ocarousel divs
@ocarousel = $(ocarousel)
@ocarousel_window = $(@ocarousel).find(".ocarousel_window")
@frames = $(@ocarousel_window).children().clone()
@indicators_container = $(@ocarousel).find(".ocarousel_indicators")
@pagination_current = $(@ocarousel).find(".ocarousel_pagination_current")
@pagination_total = $(@ocarousel).find(".ocarousel_pagination_total")
# If there are 0 or 1 frames, then the carousel should not do anything!
if @frames.length > 1
# Get dynamic settings from data attributes
@settings = {}
@settings.speed = $(@ocarousel).data('ocarousel-speed') ? Ocarousel.settings.speed
@settings.period = $(@ocarousel).data('ocarousel-period') ? Ocarousel.settings.period
@settings.transition = $(@ocarousel).data('ocarousel-transition') ? Ocarousel.settings.transition
@settings.perscroll = $(@ocarousel).data('ocarousel-perscroll') ? Ocarousel.settings.perscroll
@settings.wrapearly = $(@ocarousel).data('ocarousel-wrapearly') ? Ocarousel.settings.wrapearly
@settings.shuffle = $(@ocarousel).data('ocarousel-shuffle') ? Ocarousel.settings.shuffle
@settings.indicator_fill = $(@ocarousel).data('ocarousel-indicator-fill') ? Ocarousel.settings.indicator_fill
@settings.indicator_r = $(@ocarousel).data('ocarousel-indicator-r') ? Ocarousel.settings.indicator_r
@settings.indicator_spacing = $(@ocarousel).data('ocarousel-indicator-spacing') ? Ocarousel.settings.indicator_spacing
@settings.indicator_cy = $(@ocarousel).data('ocarousel-indicator-cy') ? Ocarousel.settings.indicator_cy
@settings.indicator_stroke = $(@ocarousel).data('ocarousel-indicator-stroke') ? Ocarousel.settings.indicator_stroke
@settings.indicator_strokewidth = $(@ocarousel).data('ocarousel-indicator-strokewidth') ? Ocarousel.settings.indicator_strokewidth
@settings.fullscreen = $(@ocarousel).data('ocarousel-fullscreen') ? Ocarousel.settings.fullscreen
@settings.vertical = $(@ocarousel).data('ocarousel-vertical') ? Ocarousel.settings.vertical
@settings.cycle = $(@ocarousel).data('ocarousel-cycle') ? Ocarousel.settings.cycle
# Set up for cycle if needed
if @settings.cycle
# We need 3 copies of the children
children = $(@ocarousel_window).children().clone()
children2 = $(@ocarousel_window).children().clone()
$(@ocarousel_window).append(children)
$(@ocarousel_window).append(children2)
@frames = $(@ocarousel_window).children()
# Start at the middle copy of children
@active = @frames.length / 3
# Add the container for the slides
@ocarousel_container = document.createElement("div")
@ocarousel_container.className = "ocarousel_window_slides"
# Let everything be visible
$(@ocarousel).show()
# Stop the scroll timer
@timerStop()
# Remove the old frames from their original location outside of the container
@ocarousel_window.html("")
# Insert our container with all of the frames into the DOM
$(@ocarousel_window).get(0).appendChild(@ocarousel_container)
# Render the frames and supporting elements from data into the DOM
@render()
# Make sure the container is scrolled to the correct position
@setContainerPos()
# Start the scroll timer
@timerStart()
### Remove and reset everything in the DOM ###
render: () ->
# Shuffle the frames if shuffle is configured
if @settings.shuffle and @settings.shuffle != "false"
@frames = arrayShuffle(@frames)
# Clear the frames in the DOM and then inserts all frame from data into the DOM
$(@ocarousel_container).html("")
me = @
$(@frames).each (i) ->
# Dynamically set the width of the frames if fullscreen enabled
if me.settings.fullscreen and me.settings.fullscreen != "false"
$(this).css("width", $(window).width())
# Set slides to be vertical if vertical enabled
if me.settings.vertical
$(this).addClass("ocarousel_window_slides_vertical")
# Insert the frame
$(me.ocarousel_container).append(this)
# Render indicators if the user provided a div
if @indicators_container.length
# Clear the container
$(@indicators_container).html("")
# Setup the svg itself if supported
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicators_parent = document.createElementNS("http://www.w3.org/2000/svg", "svg")
indicators_parent.setAttribute("version", "1.1")
$(@indicators_container).get(0).appendChild(indicators_parent)
# If SVG isn't supported, we'll insert images directly into the container div
else
indicators_parent = $(@indicators_container).get(0)
# Reset and setup the circle indicators
@indicators = []
length = @frames.length
start = 0
end = @frames.length - 1
if @settings.cycle
start = @frames.length / 3
end = 2 * @frames.length / 3 - 1
length = @frames.length / 3
cx = $(@indicators_container).width() / 2 - @settings.indicator_r * length - @settings.indicator_spacing * length / 2
for i in [start..end]
# Create an indicator as SVG if supported
link = if !@settings.cycle then i else i % (@frames.length / 3)
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicator = document.createElementNS("http://www.w3.org/2000/svg", "circle")
indicator.className = "ocarousel_link"
indicator.setAttribute("data-ocarousel-link", link)
indicator.setAttribute("cx", cx)
indicator.setAttribute("cy", @settings.indicator_cy)
indicator.setAttribute("r", @settings.indicator_r)
indicator.setAttribute("stroke", @settings.indicator_stroke)
indicator.setAttribute("stroke-width", @settings.indicator_strokewidth)
indicator.setAttribute("fill", if i is @active then @settings.indicator_stroke else @settings.indicator_fill)
# Otherwise create an indicator image
else
indicator = document.createElement("div")
indicator.className = "ocarousel_link ocarousel_indicator ocarousel_indicator_" + if i is @active then "active" else "inactive"
indicator.setAttribute("data-ocarousel-link", link)
# Append it to the DOM and our array
indicators_parent.appendChild(indicator)
@indicators.push(indicator)
# Set its index as a data setAttribute
$(indicator).data("ocarousel_index", i)
# Setup the next cx
cx = cx + @settings.indicator_r * 2 + @settings.indicator_spacing
# Setup the pagination current page
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Setup the pagination total pages
if @pagination_total.length
$(@pagination_total).html(@frames.length)
# Reset and set the click event for scroll links
$(@ocarousel).find("[data-ocarousel-link]").unbind("click")
$(@ocarousel).find("[data-ocarousel-link]").bind "click", (event) ->
event.preventDefault()
goHere = $(this).data("ocarousel-link")
if goHere?
if goHere == "left" || goHere == "Left" || goHere == "l" || goHere == "L"
goHere = me.getPrev()
else if goHere == "right" || goHere == "Right" || goHere == "r" || goHere == "R"
goHere = me.getNext()
else if goHere == "first" || goHere == "First" || goHere == "beginning" || goHere == "Beginning"
goHere = me.getFirst()
else if goHere == "last" || goHere == "Last" || goHere == "end" || goHere == "End"
goHere = me.getLast()
else if me.settings.cycle
goHere = goHere + me.frames.length / 3
me.scrollTo goHere
# Set the screen resize event if fullscreen
if @settings.fullscreen
$(window).one "resize", () ->
me.render()
### Animate a transition to the given position ###
scrollTo: (index, instant = false) ->
me = @
if index?
@timerStop()
# Wrap to beginning/end if necessary
if index >= (@frames.length - @settings.wrapearly)
index = 0
else if index >= (@frames.length - @settings.perscroll)
index = @frames.length - @settings.perscroll
else if index < 0
perEnd = @frames.length - @settings.perscroll
wrapEnd = @frames.length - 1 - @settings.wrapearly
index = Math.min(perEnd, wrapEnd)
# If we're in cycle mode
if @settings.cycle
# If we're less than the middle set, move the end set to the beginning
if index < @frames.length / 3
for i in [@frames.length - 1..2 * (@frames.length / 3)]
$(@frames[i]).remove()
$(@ocarousel_container).prepend(@frames[i])
@active = @active + @frames.length / 3
index = index + @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# If we're greater than the end set, move the beginning set to the end
if index >= (@frames.length / 3) * 2
for i in [0..@frames.length / 3 - 1]
$(@frames[i]).remove()
$(@ocarousel_container).append(@frames[i])
@active = @active - @frames.length / 3
index = index - @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# Move the slides
$(@ocarousel_container).stop()
nextPos = @getPos index
# No animation
if instant
@setContainerPos(nextPos)
# Fade animation
else if @settings.transition == "fade"
if @settings.vertical
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({top: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
else
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({right: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
# Slide animation
else
if @settings.vertical
$(@ocarousel_container).animate {bottom: nextPos + "px"}, @settings.speed
else
$(@ocarousel_container).animate {right: nextPos + "px"}, @settings.speed
# Update the indicators if they exist
if @indicators?
indOld = @active
indNew = index
if @settings.cycle
indOld = indOld % (@frames.length / 3)
indNew = indNew % (@frames.length / 3)
# If SVG
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
$(@indicators[indOld]).attr "fill", @settings.indicator_fill
$(@indicators[indNew]).attr "fill", @settings.indicator_stroke
# Otherwise images
else
$(@indicators[indOld]).removeClass("ocarousel_indicator_active")
$(@indicators[indOld]).addClass("ocarousel_indicator_inactive")
$(@indicators[indNew]).addClass("ocarousel_indicator_active")
$(@indicators[indNew]).removeClass("ocarousel_indicator_inactive")
# Update the active variable
indexOld = @active
@active = index
# Update the current pagination number if it exists
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Resume the scroll timer
@timerStart()
# Call the provided callback if it exists
if typeof @onSlideChanged is 'function'
@onSlideChanged(index, indexOld)
### Returns the distance of a frame from the beginning edge of its container ###
getPos: (index) ->
if @settings.vertical
return $(@frames[index]).position().top
else
return $(@frames[index]).position().left
### Returns the index of the next slide that should be shown ###
getNext: () ->
next = @active + @settings.perscroll
if next > (@frames.length - @settings.perscroll) && next < @frames.length
next = @frames.length - @settings.perscroll
# If the choosen frame is hidden, choose the next visible one
count = @frames.length
while count && !$(@frames[next]).is(":visible")
next++
if next > @frames.length - 1
next = 0
count--
return next
### Returns the index of the next slide that should be shown before the current position ###
getPrev: () ->
prev = @active - @settings.perscroll
if prev < 0 && @active != 0
prev = 0
# If the chosen frame is hidden, choose the previous visible one
count = @frames.length
while count && !$(@frames[prev]).is(":visible")
prev--
if prev < 0
prev = @frames.length - 1
count--
return prev
### Returns the index of the last slide ###
getLast: () ->
if @settings.cycle
return 2 * @frames.length / 3 - 1
else
return @frames.length - 1
### Returns the index of the last slide ###
getFirst: () ->
if @settings.cycle
return @frames.length
else
return 0
### Starts or resumes the scroll timer ###
timerStart: () ->
me = @
if @settings.period != Infinity
@timer = setInterval (() -> me.scrollTo (me.getNext())), @settings.period
### Stops the scroll timer ###
timerStop: () ->
if @timer?
clearInterval @timer
@timer = null
### Starts the timer if it is stopped, stops the timer if it is running ###
timerToggle: () ->
if @timer?
@timerStop()
else
@timerStart()
# Move ocarousel_container to the current active div instantly
setContainerPos: (pos = @getPos(@active)) ->
if @settings.vertical
$(@ocarousel_container).animate({bottom: pos + "px"}, 0)
else
$(@ocarousel_container).animate({right: pos + "px"}, 0)
### Removes a frame, keeping the carousel in an intuitive position afterwards ###
remove: (index) ->
if index > 0 and index < (@frames.length - 1)
# Remove from data and rerender
@frames.splice(index,1)
@render()
# If the carousel is ahead of the frame being removed, prevent it from jumping forward
if @active > index
@scrollTo(@active - 1, true)
### Adds a frame, keeping the carousel in an intuitive position afterwards ###
add: (elt, index) ->
if index > 0 and index < (@frames.length - 1)
@frames.splice(index, 0, elt)
@render()
# If the carousel is ahead of or at the frame being added, prevent it from jumping backward
if @active >= index
@scrollTo(@active + 1, true)
# Randomizes the order of elements in the passed in array in place.
# Adapted from http://sedition.com/perl/javascript-fy.html and https://gist.github.com/ddgromit/859699
arrayShuffle = (arr) ->
i = arr.length
if i == 0 then return false
while --i
j = Math.floor(Math.random() * (i+1))
tempi = arr[i]
tempj = arr[j]
arr[i] = tempj
arr[j] = tempi
return arr
$(document).ready ->
$(".ocarousel").each ->
new Ocarousel(this)
| true | ###
jQuery Open Carousel
Copyright (c) 2013 PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
###
This is the main coffeescript file
Include jquery.openCarousel.js and jquery.openCarousel.css in your projects
###
# Create the Ocarousel class in the global namespace
class window.Ocarousel
### Initialize ###
ocarousel: null
ocarousel_window: null
ocarousel_container: null
indicators_container: null
frames: null
framesPre: null
framesPost: null
indicators: null
timer: null
active: 0
### Public callback ###
onSlideChanged: null
### Default Settings ###
@settings:
speed: .5 * 1000 # how long each transition is, milliseconds
period: 4 * 1000 # time between frame changes, milliseconds (Infinity will prevent autoscrolling)
transition: "scroll" # type of transition animation
perscroll: 1 # number of slides to pass over for each scroll
wrapearly: 0 # scroll to the beginning when reaching this many slides before the end
shuffle: false # setting to true will randomize the order of slides, false will keep the order given in html
indicator_fill: "#ffffff" # inactive fill color of indicator circles
indicator_r: 6 # radius of indicator circles
indicator_spacing: 6 # spacing between indicators
indicator_cy: 20 # y position of indicator circles
indicator_stroke: "#afafaf" # stroke color of indicator cirlces
indicator_strokewidth: "2" # stroke width of indicator circles
fullscreen: false # dynamically sets width of slides to width of screen
vertical: false # positions and scrolls slides vertically instead of horizontally
cycle: false # scrolled slides are appended to the end of the container to create a continuous carousel
constructor: (ocarousel) ->
me = @
# Get ocarousel divs
@ocarousel = $(ocarousel)
@ocarousel_window = $(@ocarousel).find(".ocarousel_window")
@frames = $(@ocarousel_window).children().clone()
@indicators_container = $(@ocarousel).find(".ocarousel_indicators")
@pagination_current = $(@ocarousel).find(".ocarousel_pagination_current")
@pagination_total = $(@ocarousel).find(".ocarousel_pagination_total")
# If there are 0 or 1 frames, then the carousel should not do anything!
if @frames.length > 1
# Get dynamic settings from data attributes
@settings = {}
@settings.speed = $(@ocarousel).data('ocarousel-speed') ? Ocarousel.settings.speed
@settings.period = $(@ocarousel).data('ocarousel-period') ? Ocarousel.settings.period
@settings.transition = $(@ocarousel).data('ocarousel-transition') ? Ocarousel.settings.transition
@settings.perscroll = $(@ocarousel).data('ocarousel-perscroll') ? Ocarousel.settings.perscroll
@settings.wrapearly = $(@ocarousel).data('ocarousel-wrapearly') ? Ocarousel.settings.wrapearly
@settings.shuffle = $(@ocarousel).data('ocarousel-shuffle') ? Ocarousel.settings.shuffle
@settings.indicator_fill = $(@ocarousel).data('ocarousel-indicator-fill') ? Ocarousel.settings.indicator_fill
@settings.indicator_r = $(@ocarousel).data('ocarousel-indicator-r') ? Ocarousel.settings.indicator_r
@settings.indicator_spacing = $(@ocarousel).data('ocarousel-indicator-spacing') ? Ocarousel.settings.indicator_spacing
@settings.indicator_cy = $(@ocarousel).data('ocarousel-indicator-cy') ? Ocarousel.settings.indicator_cy
@settings.indicator_stroke = $(@ocarousel).data('ocarousel-indicator-stroke') ? Ocarousel.settings.indicator_stroke
@settings.indicator_strokewidth = $(@ocarousel).data('ocarousel-indicator-strokewidth') ? Ocarousel.settings.indicator_strokewidth
@settings.fullscreen = $(@ocarousel).data('ocarousel-fullscreen') ? Ocarousel.settings.fullscreen
@settings.vertical = $(@ocarousel).data('ocarousel-vertical') ? Ocarousel.settings.vertical
@settings.cycle = $(@ocarousel).data('ocarousel-cycle') ? Ocarousel.settings.cycle
# Set up for cycle if needed
if @settings.cycle
# We need 3 copies of the children
children = $(@ocarousel_window).children().clone()
children2 = $(@ocarousel_window).children().clone()
$(@ocarousel_window).append(children)
$(@ocarousel_window).append(children2)
@frames = $(@ocarousel_window).children()
# Start at the middle copy of children
@active = @frames.length / 3
# Add the container for the slides
@ocarousel_container = document.createElement("div")
@ocarousel_container.className = "ocarousel_window_slides"
# Let everything be visible
$(@ocarousel).show()
# Stop the scroll timer
@timerStop()
# Remove the old frames from their original location outside of the container
@ocarousel_window.html("")
# Insert our container with all of the frames into the DOM
$(@ocarousel_window).get(0).appendChild(@ocarousel_container)
# Render the frames and supporting elements from data into the DOM
@render()
# Make sure the container is scrolled to the correct position
@setContainerPos()
# Start the scroll timer
@timerStart()
### Remove and reset everything in the DOM ###
render: () ->
# Shuffle the frames if shuffle is configured
if @settings.shuffle and @settings.shuffle != "false"
@frames = arrayShuffle(@frames)
# Clear the frames in the DOM and then inserts all frame from data into the DOM
$(@ocarousel_container).html("")
me = @
$(@frames).each (i) ->
# Dynamically set the width of the frames if fullscreen enabled
if me.settings.fullscreen and me.settings.fullscreen != "false"
$(this).css("width", $(window).width())
# Set slides to be vertical if vertical enabled
if me.settings.vertical
$(this).addClass("ocarousel_window_slides_vertical")
# Insert the frame
$(me.ocarousel_container).append(this)
# Render indicators if the user provided a div
if @indicators_container.length
# Clear the container
$(@indicators_container).html("")
# Setup the svg itself if supported
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicators_parent = document.createElementNS("http://www.w3.org/2000/svg", "svg")
indicators_parent.setAttribute("version", "1.1")
$(@indicators_container).get(0).appendChild(indicators_parent)
# If SVG isn't supported, we'll insert images directly into the container div
else
indicators_parent = $(@indicators_container).get(0)
# Reset and setup the circle indicators
@indicators = []
length = @frames.length
start = 0
end = @frames.length - 1
if @settings.cycle
start = @frames.length / 3
end = 2 * @frames.length / 3 - 1
length = @frames.length / 3
cx = $(@indicators_container).width() / 2 - @settings.indicator_r * length - @settings.indicator_spacing * length / 2
for i in [start..end]
# Create an indicator as SVG if supported
link = if !@settings.cycle then i else i % (@frames.length / 3)
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
indicator = document.createElementNS("http://www.w3.org/2000/svg", "circle")
indicator.className = "ocarousel_link"
indicator.setAttribute("data-ocarousel-link", link)
indicator.setAttribute("cx", cx)
indicator.setAttribute("cy", @settings.indicator_cy)
indicator.setAttribute("r", @settings.indicator_r)
indicator.setAttribute("stroke", @settings.indicator_stroke)
indicator.setAttribute("stroke-width", @settings.indicator_strokewidth)
indicator.setAttribute("fill", if i is @active then @settings.indicator_stroke else @settings.indicator_fill)
# Otherwise create an indicator image
else
indicator = document.createElement("div")
indicator.className = "ocarousel_link ocarousel_indicator ocarousel_indicator_" + if i is @active then "active" else "inactive"
indicator.setAttribute("data-ocarousel-link", link)
# Append it to the DOM and our array
indicators_parent.appendChild(indicator)
@indicators.push(indicator)
# Set its index as a data setAttribute
$(indicator).data("ocarousel_index", i)
# Setup the next cx
cx = cx + @settings.indicator_r * 2 + @settings.indicator_spacing
# Setup the pagination current page
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Setup the pagination total pages
if @pagination_total.length
$(@pagination_total).html(@frames.length)
# Reset and set the click event for scroll links
$(@ocarousel).find("[data-ocarousel-link]").unbind("click")
$(@ocarousel).find("[data-ocarousel-link]").bind "click", (event) ->
event.preventDefault()
goHere = $(this).data("ocarousel-link")
if goHere?
if goHere == "left" || goHere == "Left" || goHere == "l" || goHere == "L"
goHere = me.getPrev()
else if goHere == "right" || goHere == "Right" || goHere == "r" || goHere == "R"
goHere = me.getNext()
else if goHere == "first" || goHere == "First" || goHere == "beginning" || goHere == "Beginning"
goHere = me.getFirst()
else if goHere == "last" || goHere == "Last" || goHere == "end" || goHere == "End"
goHere = me.getLast()
else if me.settings.cycle
goHere = goHere + me.frames.length / 3
me.scrollTo goHere
# Set the screen resize event if fullscreen
if @settings.fullscreen
$(window).one "resize", () ->
me.render()
### Animate a transition to the given position ###
scrollTo: (index, instant = false) ->
me = @
if index?
@timerStop()
# Wrap to beginning/end if necessary
if index >= (@frames.length - @settings.wrapearly)
index = 0
else if index >= (@frames.length - @settings.perscroll)
index = @frames.length - @settings.perscroll
else if index < 0
perEnd = @frames.length - @settings.perscroll
wrapEnd = @frames.length - 1 - @settings.wrapearly
index = Math.min(perEnd, wrapEnd)
# If we're in cycle mode
if @settings.cycle
# If we're less than the middle set, move the end set to the beginning
if index < @frames.length / 3
for i in [@frames.length - 1..2 * (@frames.length / 3)]
$(@frames[i]).remove()
$(@ocarousel_container).prepend(@frames[i])
@active = @active + @frames.length / 3
index = index + @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# If we're greater than the end set, move the beginning set to the end
if index >= (@frames.length / 3) * 2
for i in [0..@frames.length / 3 - 1]
$(@frames[i]).remove()
$(@ocarousel_container).append(@frames[i])
@active = @active - @frames.length / 3
index = index - @frames.length / 3
@frames = $(@ocarousel_container).children()
@setContainerPos()
# Move the slides
$(@ocarousel_container).stop()
nextPos = @getPos index
# No animation
if instant
@setContainerPos(nextPos)
# Fade animation
else if @settings.transition == "fade"
if @settings.vertical
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({top: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
else
$(@ocarousel_container).fadeOut(@settings.speed, null)
.animate({right: nextPos + "px"}, 0)
.fadeIn(me.settings.speed)
# Slide animation
else
if @settings.vertical
$(@ocarousel_container).animate {bottom: nextPos + "px"}, @settings.speed
else
$(@ocarousel_container).animate {right: nextPos + "px"}, @settings.speed
# Update the indicators if they exist
if @indicators?
indOld = @active
indNew = index
if @settings.cycle
indOld = indOld % (@frames.length / 3)
indNew = indNew % (@frames.length / 3)
# If SVG
if document.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1")
$(@indicators[indOld]).attr "fill", @settings.indicator_fill
$(@indicators[indNew]).attr "fill", @settings.indicator_stroke
# Otherwise images
else
$(@indicators[indOld]).removeClass("ocarousel_indicator_active")
$(@indicators[indOld]).addClass("ocarousel_indicator_inactive")
$(@indicators[indNew]).addClass("ocarousel_indicator_active")
$(@indicators[indNew]).removeClass("ocarousel_indicator_inactive")
# Update the active variable
indexOld = @active
@active = index
# Update the current pagination number if it exists
if @pagination_current.length
$(@pagination_current).html(@active + 1)
# Resume the scroll timer
@timerStart()
# Call the provided callback if it exists
if typeof @onSlideChanged is 'function'
@onSlideChanged(index, indexOld)
### Returns the distance of a frame from the beginning edge of its container ###
getPos: (index) ->
if @settings.vertical
return $(@frames[index]).position().top
else
return $(@frames[index]).position().left
### Returns the index of the next slide that should be shown ###
getNext: () ->
next = @active + @settings.perscroll
if next > (@frames.length - @settings.perscroll) && next < @frames.length
next = @frames.length - @settings.perscroll
# If the choosen frame is hidden, choose the next visible one
count = @frames.length
while count && !$(@frames[next]).is(":visible")
next++
if next > @frames.length - 1
next = 0
count--
return next
### Returns the index of the next slide that should be shown before the current position ###
getPrev: () ->
prev = @active - @settings.perscroll
if prev < 0 && @active != 0
prev = 0
# If the chosen frame is hidden, choose the previous visible one
count = @frames.length
while count && !$(@frames[prev]).is(":visible")
prev--
if prev < 0
prev = @frames.length - 1
count--
return prev
### Returns the index of the last slide ###
getLast: () ->
if @settings.cycle
return 2 * @frames.length / 3 - 1
else
return @frames.length - 1
### Returns the index of the last slide ###
getFirst: () ->
if @settings.cycle
return @frames.length
else
return 0
### Starts or resumes the scroll timer ###
timerStart: () ->
me = @
if @settings.period != Infinity
@timer = setInterval (() -> me.scrollTo (me.getNext())), @settings.period
### Stops the scroll timer ###
timerStop: () ->
if @timer?
clearInterval @timer
@timer = null
### Starts the timer if it is stopped, stops the timer if it is running ###
timerToggle: () ->
if @timer?
@timerStop()
else
@timerStart()
# Move ocarousel_container to the current active div instantly
setContainerPos: (pos = @getPos(@active)) ->
if @settings.vertical
$(@ocarousel_container).animate({bottom: pos + "px"}, 0)
else
$(@ocarousel_container).animate({right: pos + "px"}, 0)
### Removes a frame, keeping the carousel in an intuitive position afterwards ###
remove: (index) ->
if index > 0 and index < (@frames.length - 1)
# Remove from data and rerender
@frames.splice(index,1)
@render()
# If the carousel is ahead of the frame being removed, prevent it from jumping forward
if @active > index
@scrollTo(@active - 1, true)
### Adds a frame, keeping the carousel in an intuitive position afterwards ###
add: (elt, index) ->
if index > 0 and index < (@frames.length - 1)
@frames.splice(index, 0, elt)
@render()
# If the carousel is ahead of or at the frame being added, prevent it from jumping backward
if @active >= index
@scrollTo(@active + 1, true)
# Randomizes the order of elements in the passed in array in place.
# Adapted from http://sedition.com/perl/javascript-fy.html and https://gist.github.com/ddgromit/859699
arrayShuffle = (arr) ->
i = arr.length
if i == 0 then return false
while --i
j = Math.floor(Math.random() * (i+1))
tempi = arr[i]
tempj = arr[j]
arr[i] = tempj
arr[j] = tempi
return arr
$(document).ready ->
$(".ocarousel").each ->
new Ocarousel(this)
|
[
{
"context": " name: \"人のいちらん\"\n path: \"top\"\n ,\n name: \"ひとことめっせーじ\"\n path: \"message\"\n ]\n\n $scope.toggleList = -",
"end": 193,
"score": 0.9530409574508667,
"start": 184,
"tag": "NAME",
"value": "ひとことめっせーじ"
}
] | assets/js/controller/RootController.coffee | ky0615/atc_tram | 0 | angular.module "application"
.controller "RootController", ($rootScope, $scope, $mdSidenav, $timeout, $state)->
$scope.pageList = [
name: "人のいちらん"
path: "top"
,
name: "ひとことめっせーじ"
path: "message"
]
$scope.toggleList = ->
$mdSidenav("left").toggle()
$scope.clickItem = (item)->
$state.go item.path
| 678 | angular.module "application"
.controller "RootController", ($rootScope, $scope, $mdSidenav, $timeout, $state)->
$scope.pageList = [
name: "人のいちらん"
path: "top"
,
name: "<NAME>"
path: "message"
]
$scope.toggleList = ->
$mdSidenav("left").toggle()
$scope.clickItem = (item)->
$state.go item.path
| true | angular.module "application"
.controller "RootController", ($rootScope, $scope, $mdSidenav, $timeout, $state)->
$scope.pageList = [
name: "人のいちらん"
path: "top"
,
name: "PI:NAME:<NAME>END_PI"
path: "message"
]
$scope.toggleList = ->
$mdSidenav("left").toggle()
$scope.clickItem = (item)->
$state.go item.path
|
[
{
"context": "preventDefault()\n params =\n username: $(\"#username\").val()\n password: $(\"#password\").val()\n ",
"end": 710,
"score": 0.710910975933075,
"start": 702,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username: $(\"#username\").val()\n password: $(\"#password\").val()\n admin: $(\"#admin\").prop('checked')\n\n @us",
"end": 753,
"score": 0.9839153289794922,
"start": 735,
"tag": "PASSWORD",
"value": "$(\"#password\").val"
}
] | app/assets/javascripts/views/settings/modal.coffee | okapusta/skirace | 0 | class Skirace.Views.Settings.Modal extends Backbone.View
template: JST['settings/modal']
el: $ '.application-container'
userForm: '#user-form'
events:
'click .modal-close' : 'settingsClose'
'click #user-add' : 'addUser'
'click #save-settings' : 'saveSettings'
initialize: ->
@collection = new Skirace.Collections.Users()
@collection.fetch({
success: (data) ->
Modal.prototype.render(data.models)
})
render: (users) ->
$(@el).append @template({users: users, contests: contests()})
@populateForm()
settingsClose: ->
$('.settings-modal').remove()
addUser: (event) ->
event.preventDefault()
params =
username: $("#username").val()
password: $("#password").val()
admin: $("#admin").prop('checked')
@user = new Skirace.Models.User(params)
@user.save( {} ,
success: ->
role = if params.admin then "<i class='fa fa-check-circle' />" else ""
$("#user-table tbody").append("<tr><td>"+params.username+"</td><td>"+role+"</td></tr>")
error: (model, response, opts) ->
)
return
saveSettings: (event) ->
$.ajax(
type: 'POST',
url: '/settings',
data: $("#contest-form").serialize(),
success: ->
Modal.prototype.settingsClose();
)
event.preventDefault()
populateForm: ->
$.ajax(
type: 'GET',
url: '/settings',
success: (data) ->
$.each JSON.parse(data), (k, v) ->
element = $('[name='+k+']', $("#contest-form"))
switch element.attr('type')
when 'text'
element.val(v);
when 'checkbox'
if v
element.prop('checked', true)
if k == 'public_contest'
select = $('#public-contest-dropdown-select')
select.show()
select[0].selectedIndex = v;
else if k == 'multi_contest'
$('p#multi-contest').show()
else
element.prop('checked', false)
) | 151015 | class Skirace.Views.Settings.Modal extends Backbone.View
template: JST['settings/modal']
el: $ '.application-container'
userForm: '#user-form'
events:
'click .modal-close' : 'settingsClose'
'click #user-add' : 'addUser'
'click #save-settings' : 'saveSettings'
initialize: ->
@collection = new Skirace.Collections.Users()
@collection.fetch({
success: (data) ->
Modal.prototype.render(data.models)
})
render: (users) ->
$(@el).append @template({users: users, contests: contests()})
@populateForm()
settingsClose: ->
$('.settings-modal').remove()
addUser: (event) ->
event.preventDefault()
params =
username: $("#username").val()
password: <PASSWORD>()
admin: $("#admin").prop('checked')
@user = new Skirace.Models.User(params)
@user.save( {} ,
success: ->
role = if params.admin then "<i class='fa fa-check-circle' />" else ""
$("#user-table tbody").append("<tr><td>"+params.username+"</td><td>"+role+"</td></tr>")
error: (model, response, opts) ->
)
return
saveSettings: (event) ->
$.ajax(
type: 'POST',
url: '/settings',
data: $("#contest-form").serialize(),
success: ->
Modal.prototype.settingsClose();
)
event.preventDefault()
populateForm: ->
$.ajax(
type: 'GET',
url: '/settings',
success: (data) ->
$.each JSON.parse(data), (k, v) ->
element = $('[name='+k+']', $("#contest-form"))
switch element.attr('type')
when 'text'
element.val(v);
when 'checkbox'
if v
element.prop('checked', true)
if k == 'public_contest'
select = $('#public-contest-dropdown-select')
select.show()
select[0].selectedIndex = v;
else if k == 'multi_contest'
$('p#multi-contest').show()
else
element.prop('checked', false)
) | true | class Skirace.Views.Settings.Modal extends Backbone.View
template: JST['settings/modal']
el: $ '.application-container'
userForm: '#user-form'
events:
'click .modal-close' : 'settingsClose'
'click #user-add' : 'addUser'
'click #save-settings' : 'saveSettings'
initialize: ->
@collection = new Skirace.Collections.Users()
@collection.fetch({
success: (data) ->
Modal.prototype.render(data.models)
})
render: (users) ->
$(@el).append @template({users: users, contests: contests()})
@populateForm()
settingsClose: ->
$('.settings-modal').remove()
addUser: (event) ->
event.preventDefault()
params =
username: $("#username").val()
password: PI:PASSWORD:<PASSWORD>END_PI()
admin: $("#admin").prop('checked')
@user = new Skirace.Models.User(params)
@user.save( {} ,
success: ->
role = if params.admin then "<i class='fa fa-check-circle' />" else ""
$("#user-table tbody").append("<tr><td>"+params.username+"</td><td>"+role+"</td></tr>")
error: (model, response, opts) ->
)
return
saveSettings: (event) ->
$.ajax(
type: 'POST',
url: '/settings',
data: $("#contest-form").serialize(),
success: ->
Modal.prototype.settingsClose();
)
event.preventDefault()
populateForm: ->
$.ajax(
type: 'GET',
url: '/settings',
success: (data) ->
$.each JSON.parse(data), (k, v) ->
element = $('[name='+k+']', $("#contest-form"))
switch element.attr('type')
when 'text'
element.val(v);
when 'checkbox'
if v
element.prop('checked', true)
if k == 'public_contest'
select = $('#public-contest-dropdown-select')
select.show()
select[0].selectedIndex = v;
else if k == 'multi_contest'
$('p#multi-contest').show()
else
element.prop('checked', false)
) |
[
{
"context": " 'checkin'\n focus: 'username'\n refill: true\n clear_f",
"end": 143,
"score": 0.9995492100715637,
"start": 135,
"tag": "USERNAME",
"value": "username"
},
{
"context": "ll: true\n clear_fields:\t\t['username', 'password1', 'password2', 'email']\n handlers",
"end": 211,
"score": 0.9994572997093201,
"start": 203,
"tag": "USERNAME",
"value": "username"
},
{
"context": " true\n clear_fields:\t\t['username', 'password1', 'password2', 'email']\n handlers:\n h200:",
"end": 224,
"score": 0.9969028234481812,
"start": 215,
"tag": "PASSWORD",
"value": "password1"
},
{
"context": "rue\n clear_fields:\t\t['username', 'password1', 'password2', 'email']\n handlers:\n h200: (respons",
"end": 237,
"score": 0.9970661997795105,
"start": 228,
"tag": "PASSWORD",
"value": "password2"
}
] | static/script/pages/registration.coffee | happz/settlers | 1 | $(window).bind 'page_startup', () ->
new window.hlib.Form
fid: 'checkin'
focus: 'username'
refill: true
clear_fields: ['username', 'password1', 'password2', 'email']
handlers:
h200: (response, form) ->
form.info.success 'New account created, you may log in', true
redirect = () ->
window.hlib.redirect '/login/'
$('body').everyTime '15s', redirect
| 129475 | $(window).bind 'page_startup', () ->
new window.hlib.Form
fid: 'checkin'
focus: 'username'
refill: true
clear_fields: ['username', '<PASSWORD>', '<PASSWORD>', 'email']
handlers:
h200: (response, form) ->
form.info.success 'New account created, you may log in', true
redirect = () ->
window.hlib.redirect '/login/'
$('body').everyTime '15s', redirect
| true | $(window).bind 'page_startup', () ->
new window.hlib.Form
fid: 'checkin'
focus: 'username'
refill: true
clear_fields: ['username', 'PI:PASSWORD:<PASSWORD>END_PI', 'PI:PASSWORD:<PASSWORD>END_PI', 'email']
handlers:
h200: (response, form) ->
form.info.success 'New account created, you may log in', true
redirect = () ->
window.hlib.redirect '/login/'
$('body').everyTime '15s', redirect
|
[
{
"context": "t.use new localPassport {\n usernameField: 'inputEmail'\n passwordField: 'inputPassword'\n }, (e",
"end": 1577,
"score": 0.989698588848114,
"start": 1567,
"tag": "USERNAME",
"value": "inputEmail"
},
{
"context": "ernameField: 'inputEmail'\n passwordField: 'inputPassword'\n }, (email, password, done) ->\n db.findUse",
"end": 1616,
"score": 0.996303379535675,
"start": 1603,
"tag": "PASSWORD",
"value": "inputPassword"
},
{
"context": " return\n else\n user =\n name: name\n email: email\n password: bc",
"end": 3445,
"score": 0.6699864268302917,
"start": 3441,
"tag": "NAME",
"value": "name"
},
{
"context": "ame\n email: email\n password: bcrypt.hashSync password, 8\n\n db.saveUser user, (",
"end": 3499,
"score": 0.927577793598175,
"start": 3493,
"tag": "PASSWORD",
"value": "bcrypt"
},
{
"context": "email: email\n password: bcrypt.hashSync password, 8\n\n db.saveUser user, (err, success, id) ",
"end": 3517,
"score": 0.5011659264564514,
"start": 3509,
"tag": "PASSWORD",
"value": "password"
}
] | src/private/server.coffee | rpalmaotero/inventorium | 0 | express = require 'express'
path = require 'path'
morgan = require 'morgan'
cookieParser = require 'cookie-parser'
session = require 'express-session'
bodyParser = require 'body-parser'
multer = require 'multer'
connectFlash = require 'connect-flash'
passport = require 'passport'
localPassport = require 'passport-local'
bcrypt = require 'bcryptjs'
db = require './lib/db'
rInterface = require './lib/r-interface'
app = do express
port = process.env.PORT || 2000
env = process.env.NODE_ENV || "dev"
# Static content
app.use '/static/css', express.static 'public/css'
app.use '/static/fonts', express.static 'public/fonts'
app.use '/static/i', express.static 'public/i'
app.use '/static/js', express.static 'public/js'
# Jade
app.set 'views', path.join(__dirname, "../src/public/jade")
app.set 'view engine', 'jade'
# Logger and proxy handling
if env == "dev"
app.use morgan('dev')
else if env == "production"
app.set 'trust proxy'
app.use morgan 'combined'
# Sessions and flash data
app.use cookieParser()
app.use session(
cookie:
maxAge: 1000 * 60 * 60 * 24 * 31
secret: "easy money"
saveUninitialized: false
resave: false
store: db.getSessionStore session
maxAge: new Date(Date.now() + 1000 * 60 * 60 * 24 * 31)
rolling: true
)
app.use connectFlash()
# Setup db
db.setup()
# Passport authentication
app.use passport.initialize()
app.use passport.session()
passport.use new localPassport {
usernameField: 'inputEmail'
passwordField: 'inputPassword'
}, (email, password, done) ->
db.findUserByEmail email, (err, user) ->
# Database error
if err
return done err
# Unknown user
if !user
return done err, false
# Match!
if bcrypt.compareSync password, user.password
return done null, user
# Incorrect password
else
return done null, false
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
db.findUserById id, (err, user) ->
done err, user
# Middleware to add render data
app.use (req, res, next) ->
res.locals.user = req.user
#res.locals.baseUrl = 'https://' + req.get('host')
res.locals.baseUrl = 'http://' + req.get('host')
do next
# Middleware to ensure https:// is being used
#app.use (req, res, next) ->
# if req.secure
# do next
# return
# res.redirect 'https://' + req.headers.host + req.url
app.get '/', (req, res) ->
res.render 'index'
app.get '/signup', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'signup'
app.post '/signup', bodyParser.urlencoded(extended: false), (req, res) ->
name = req.body.inputName
email = req.body.inputEmail
password = req.body.inputPassword
confirmPassword = req.body.inputConfirmPassword
invalidFields = []
if name == ""
invalidFields.push 'inputName'
if !validEmail email
invalidFields.push 'inputEmail'
if !(password == confirmPassword) || password.length == 0
invalidFields.push 'inputPassword'
if invalidFields.length > 0
res.render 'signup', invalidFields: invalidFields
return
else
user =
name: name
email: email
password: bcrypt.hashSync password, 8
db.saveUser user, (err, success, id) ->
if success
user["id"] = id
req.login user, (err) ->
req.flash 'firstName', name.split(" ")[0]
res.redirect '/dashboard'
return
else
res.render 'signup', serverError: true
return
app.get '/login', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'login'
app.post '/login', bodyParser.urlencoded(extended: false), passport.authenticate 'local', {
successRedirect: '/dashboard'
failureRedirect: '/login',
failureFlash: true
}
app.get '/thanks', (req, res) ->
res.render 'thanks', name: req.flash("firstName")
app.get '/dashboard', (req, res) ->
if typeof req.user == 'undefined'
res.redirect '/login'
return
db.getProductsByUserId req.user.id, (err, products) ->
res.render 'dashboard', products: products
app.get '/producto/crear', (req, res) ->
res.render 'producto/crear'
app.post '/producto/crear', multer(dest: path.join(__dirname, '../', 'uploads')), (req, res) ->
product =
name: req.body.inputName
ownerId: req.user.id
status: 'pending'
filePath: req.files.inputFile.path
initialStock: req.body.inputStock
db.saveProduct product, (err, success, id) ->
if success
rInterface.getHoltWintersPrediction(id, product.filePath, db.savePrediction)
res.redirect '/dashboard'
app.get '/producto/:id', (req, res) ->
db.getProductById req.params.id, (err, product) ->
if product.ownerId != req.user.id
res.redirect '/dashboard'
return
res.render 'producto/ver', {product: product}
app.get '/prediccion/:id', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id
res.redirect '/dashboard'
return
res.json prediction
app.get '/prediccion/:id/descargar', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id || prediction.status == "pending"
res.redirect '/dashboard'
return
res.download prediction.results.resultsPath
app.get '/logout', (req, res) ->
do req.logout
res.redirect '/'
app.get '/about-us', (req, res) ->
res.render 'about-us'
validEmail = (email) ->
re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test email
app.listen port
console.log '[BIGSALES-WEB] Listening on port ' + port
| 142475 | express = require 'express'
path = require 'path'
morgan = require 'morgan'
cookieParser = require 'cookie-parser'
session = require 'express-session'
bodyParser = require 'body-parser'
multer = require 'multer'
connectFlash = require 'connect-flash'
passport = require 'passport'
localPassport = require 'passport-local'
bcrypt = require 'bcryptjs'
db = require './lib/db'
rInterface = require './lib/r-interface'
app = do express
port = process.env.PORT || 2000
env = process.env.NODE_ENV || "dev"
# Static content
app.use '/static/css', express.static 'public/css'
app.use '/static/fonts', express.static 'public/fonts'
app.use '/static/i', express.static 'public/i'
app.use '/static/js', express.static 'public/js'
# Jade
app.set 'views', path.join(__dirname, "../src/public/jade")
app.set 'view engine', 'jade'
# Logger and proxy handling
if env == "dev"
app.use morgan('dev')
else if env == "production"
app.set 'trust proxy'
app.use morgan 'combined'
# Sessions and flash data
app.use cookieParser()
app.use session(
cookie:
maxAge: 1000 * 60 * 60 * 24 * 31
secret: "easy money"
saveUninitialized: false
resave: false
store: db.getSessionStore session
maxAge: new Date(Date.now() + 1000 * 60 * 60 * 24 * 31)
rolling: true
)
app.use connectFlash()
# Setup db
db.setup()
# Passport authentication
app.use passport.initialize()
app.use passport.session()
passport.use new localPassport {
usernameField: 'inputEmail'
passwordField: '<PASSWORD>'
}, (email, password, done) ->
db.findUserByEmail email, (err, user) ->
# Database error
if err
return done err
# Unknown user
if !user
return done err, false
# Match!
if bcrypt.compareSync password, user.password
return done null, user
# Incorrect password
else
return done null, false
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
db.findUserById id, (err, user) ->
done err, user
# Middleware to add render data
app.use (req, res, next) ->
res.locals.user = req.user
#res.locals.baseUrl = 'https://' + req.get('host')
res.locals.baseUrl = 'http://' + req.get('host')
do next
# Middleware to ensure https:// is being used
#app.use (req, res, next) ->
# if req.secure
# do next
# return
# res.redirect 'https://' + req.headers.host + req.url
app.get '/', (req, res) ->
res.render 'index'
app.get '/signup', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'signup'
app.post '/signup', bodyParser.urlencoded(extended: false), (req, res) ->
name = req.body.inputName
email = req.body.inputEmail
password = req.body.inputPassword
confirmPassword = req.body.inputConfirmPassword
invalidFields = []
if name == ""
invalidFields.push 'inputName'
if !validEmail email
invalidFields.push 'inputEmail'
if !(password == confirmPassword) || password.length == 0
invalidFields.push 'inputPassword'
if invalidFields.length > 0
res.render 'signup', invalidFields: invalidFields
return
else
user =
name: <NAME>
email: email
password: <PASSWORD>.hashSync <PASSWORD>, 8
db.saveUser user, (err, success, id) ->
if success
user["id"] = id
req.login user, (err) ->
req.flash 'firstName', name.split(" ")[0]
res.redirect '/dashboard'
return
else
res.render 'signup', serverError: true
return
app.get '/login', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'login'
app.post '/login', bodyParser.urlencoded(extended: false), passport.authenticate 'local', {
successRedirect: '/dashboard'
failureRedirect: '/login',
failureFlash: true
}
app.get '/thanks', (req, res) ->
res.render 'thanks', name: req.flash("firstName")
app.get '/dashboard', (req, res) ->
if typeof req.user == 'undefined'
res.redirect '/login'
return
db.getProductsByUserId req.user.id, (err, products) ->
res.render 'dashboard', products: products
app.get '/producto/crear', (req, res) ->
res.render 'producto/crear'
app.post '/producto/crear', multer(dest: path.join(__dirname, '../', 'uploads')), (req, res) ->
product =
name: req.body.inputName
ownerId: req.user.id
status: 'pending'
filePath: req.files.inputFile.path
initialStock: req.body.inputStock
db.saveProduct product, (err, success, id) ->
if success
rInterface.getHoltWintersPrediction(id, product.filePath, db.savePrediction)
res.redirect '/dashboard'
app.get '/producto/:id', (req, res) ->
db.getProductById req.params.id, (err, product) ->
if product.ownerId != req.user.id
res.redirect '/dashboard'
return
res.render 'producto/ver', {product: product}
app.get '/prediccion/:id', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id
res.redirect '/dashboard'
return
res.json prediction
app.get '/prediccion/:id/descargar', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id || prediction.status == "pending"
res.redirect '/dashboard'
return
res.download prediction.results.resultsPath
app.get '/logout', (req, res) ->
do req.logout
res.redirect '/'
app.get '/about-us', (req, res) ->
res.render 'about-us'
validEmail = (email) ->
re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test email
app.listen port
console.log '[BIGSALES-WEB] Listening on port ' + port
| true | express = require 'express'
path = require 'path'
morgan = require 'morgan'
cookieParser = require 'cookie-parser'
session = require 'express-session'
bodyParser = require 'body-parser'
multer = require 'multer'
connectFlash = require 'connect-flash'
passport = require 'passport'
localPassport = require 'passport-local'
bcrypt = require 'bcryptjs'
db = require './lib/db'
rInterface = require './lib/r-interface'
app = do express
port = process.env.PORT || 2000
env = process.env.NODE_ENV || "dev"
# Static content
app.use '/static/css', express.static 'public/css'
app.use '/static/fonts', express.static 'public/fonts'
app.use '/static/i', express.static 'public/i'
app.use '/static/js', express.static 'public/js'
# Jade
app.set 'views', path.join(__dirname, "../src/public/jade")
app.set 'view engine', 'jade'
# Logger and proxy handling
if env == "dev"
app.use morgan('dev')
else if env == "production"
app.set 'trust proxy'
app.use morgan 'combined'
# Sessions and flash data
app.use cookieParser()
app.use session(
cookie:
maxAge: 1000 * 60 * 60 * 24 * 31
secret: "easy money"
saveUninitialized: false
resave: false
store: db.getSessionStore session
maxAge: new Date(Date.now() + 1000 * 60 * 60 * 24 * 31)
rolling: true
)
app.use connectFlash()
# Setup db
db.setup()
# Passport authentication
app.use passport.initialize()
app.use passport.session()
passport.use new localPassport {
usernameField: 'inputEmail'
passwordField: 'PI:PASSWORD:<PASSWORD>END_PI'
}, (email, password, done) ->
db.findUserByEmail email, (err, user) ->
# Database error
if err
return done err
# Unknown user
if !user
return done err, false
# Match!
if bcrypt.compareSync password, user.password
return done null, user
# Incorrect password
else
return done null, false
passport.serializeUser (user, done) ->
done null, user.id
passport.deserializeUser (id, done) ->
db.findUserById id, (err, user) ->
done err, user
# Middleware to add render data
app.use (req, res, next) ->
res.locals.user = req.user
#res.locals.baseUrl = 'https://' + req.get('host')
res.locals.baseUrl = 'http://' + req.get('host')
do next
# Middleware to ensure https:// is being used
#app.use (req, res, next) ->
# if req.secure
# do next
# return
# res.redirect 'https://' + req.headers.host + req.url
app.get '/', (req, res) ->
res.render 'index'
app.get '/signup', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'signup'
app.post '/signup', bodyParser.urlencoded(extended: false), (req, res) ->
name = req.body.inputName
email = req.body.inputEmail
password = req.body.inputPassword
confirmPassword = req.body.inputConfirmPassword
invalidFields = []
if name == ""
invalidFields.push 'inputName'
if !validEmail email
invalidFields.push 'inputEmail'
if !(password == confirmPassword) || password.length == 0
invalidFields.push 'inputPassword'
if invalidFields.length > 0
res.render 'signup', invalidFields: invalidFields
return
else
user =
name: PI:NAME:<NAME>END_PI
email: email
password: PI:PASSWORD:<PASSWORD>END_PI.hashSync PI:PASSWORD:<PASSWORD>END_PI, 8
db.saveUser user, (err, success, id) ->
if success
user["id"] = id
req.login user, (err) ->
req.flash 'firstName', name.split(" ")[0]
res.redirect '/dashboard'
return
else
res.render 'signup', serverError: true
return
app.get '/login', (req, res) ->
if typeof req.user != 'undefined'
res.redirect '/dashboard'
return
res.render 'login'
app.post '/login', bodyParser.urlencoded(extended: false), passport.authenticate 'local', {
successRedirect: '/dashboard'
failureRedirect: '/login',
failureFlash: true
}
app.get '/thanks', (req, res) ->
res.render 'thanks', name: req.flash("firstName")
app.get '/dashboard', (req, res) ->
if typeof req.user == 'undefined'
res.redirect '/login'
return
db.getProductsByUserId req.user.id, (err, products) ->
res.render 'dashboard', products: products
app.get '/producto/crear', (req, res) ->
res.render 'producto/crear'
app.post '/producto/crear', multer(dest: path.join(__dirname, '../', 'uploads')), (req, res) ->
product =
name: req.body.inputName
ownerId: req.user.id
status: 'pending'
filePath: req.files.inputFile.path
initialStock: req.body.inputStock
db.saveProduct product, (err, success, id) ->
if success
rInterface.getHoltWintersPrediction(id, product.filePath, db.savePrediction)
res.redirect '/dashboard'
app.get '/producto/:id', (req, res) ->
db.getProductById req.params.id, (err, product) ->
if product.ownerId != req.user.id
res.redirect '/dashboard'
return
res.render 'producto/ver', {product: product}
app.get '/prediccion/:id', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id
res.redirect '/dashboard'
return
res.json prediction
app.get '/prediccion/:id/descargar', (req, res) ->
db.getPredictionByProductId req.params.id, (err, prediction) ->
if prediction.ownerId != req.user.id || prediction.status == "pending"
res.redirect '/dashboard'
return
res.download prediction.results.resultsPath
app.get '/logout', (req, res) ->
do req.logout
res.redirect '/'
app.get '/about-us', (req, res) ->
res.render 'about-us'
validEmail = (email) ->
re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test email
app.listen port
console.log '[BIGSALES-WEB] Listening on port ' + port
|
[
{
"context": "js\n\n PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.",
"end": 215,
"score": 0.999845027923584,
"start": 198,
"tag": "NAME",
"value": "Benjamin Blundell"
},
{
"context": " PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.com\n\n This so",
"end": 231,
"score": 0.9999309182167053,
"start": 218,
"tag": "EMAIL",
"value": "ben@pxljs.com"
}
] | src/pxl.coffee | OniDaito/pxljs | 1 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
Benjamin Blundell - ben@pxljs.com
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- Resources
* http://www.yuiblog.com/blog/2007/06/12/module-pattern/
* http://www.plexical.com/blog/2012/01/25/writing-coffeescript-for-browser-and-nod/
* https://github.com/field/FieldKit.js
- TODO
* Need a shorthand here for sure!
###
# ## PXL entry point
# Setting up a namespace this way instead of with a function as we are using npm and
# Browserify. We effectively go with a mixin strategy, treating the namespace as an
# object (which it always was). We override the effects of rquires with util.extend
# Without util.extend we dont quite get the namespace effect when we browserify
PXL = {}
GL = {} # Global GL - Is overwritten often - Shorthand for PXL.Context.gl
util = require './util/util'
# extend - adds objects to our master coffeegl object, namespace style
extend = ->
switch arguments.length
when 1
util.extend PXL, arguments[0]
when 2
pkg = arguments[0]
PXL[pkg] = {} if not PXL[pkg]?
util.extend PXL[pkg], arguments[1]
# If we are in a browser add to the window object
window.PXL = PXL if window?
window.GL = GL if window?
# Add extra classes to our coffeegl namespace
extend require './core/app'
extend require './core/node'
extend "Math", require './math/math'
extend "Math", require './math/curve'
extend "Math", require './math//math_functions'
extend "Colour", require './colour/colour'
extend "Geometry", require './geometry/primitive'
extend "Geometry", require './geometry/shape'
extend "Geometry", require './geometry/plane'
extend "Import", require './import/obj'
extend "Import", require './import/md5'
extend "GL", require './gl/shader'
extend "GL", require './gl/uber_shader_paths'
extend "GL", require './gl/uber_shader'
extend "GL", require './gl/fbo'
extend "GL", require './gl/texture'
extend "GL", require './gl/webgl'
extend "Util", require './util/request'
extend "Util", require './util/promise'
extend "Util", require './util/util'
extend "Util", require './util/signal'
extend "Util", require './util/log'
extend "Util", require './util/voronoi'
extend "Util", require './util/medial_axis'
extend "Util", require './util/webcam'
extend "Util", require './util/noise'
extend "Util", require './util/cache_var'
extend "Interact", require './interact/key'
extend "Interact", require './interact/mouse'
extend "Interact", require './interact/touch'
extend "Camera", require './camera/camera'
extend "Light", require './light/light'
extend "Material", require './material/material'
extend "Material", require './material/basic'
extend "Material", require './material/phong'
extend "Material", require './material/depth'
extend "Material", require './material/normal'
extend "Animation", require './animation/animation'
# _setupFrame - the accepted method for setting up onEachFrame in various browsers
# TODO - We should record the apps listening on this so we can pause individual ones?
PXL.applications = []
_setupFrame = (root) ->
if root.requestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
requestAnimationFrame _cb
_cb()
else if root.webkitRequestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
webkitRequestAnimationFrame _cb
_cb()
else if root.mozRequestAnimationFrame
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
mozRequestAnimationFrame _cb
_cb()
else
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_go = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
setInterval _go, 1000 / 60
root.onEachFrame = onEachFrame
if window?
_setupFrame(window)
module.exports =
PXL : PXL
GL : GL
| 203324 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
<NAME> - <EMAIL>
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- Resources
* http://www.yuiblog.com/blog/2007/06/12/module-pattern/
* http://www.plexical.com/blog/2012/01/25/writing-coffeescript-for-browser-and-nod/
* https://github.com/field/FieldKit.js
- TODO
* Need a shorthand here for sure!
###
# ## PXL entry point
# Setting up a namespace this way instead of with a function as we are using npm and
# Browserify. We effectively go with a mixin strategy, treating the namespace as an
# object (which it always was). We override the effects of rquires with util.extend
# Without util.extend we dont quite get the namespace effect when we browserify
PXL = {}
GL = {} # Global GL - Is overwritten often - Shorthand for PXL.Context.gl
util = require './util/util'
# extend - adds objects to our master coffeegl object, namespace style
extend = ->
switch arguments.length
when 1
util.extend PXL, arguments[0]
when 2
pkg = arguments[0]
PXL[pkg] = {} if not PXL[pkg]?
util.extend PXL[pkg], arguments[1]
# If we are in a browser add to the window object
window.PXL = PXL if window?
window.GL = GL if window?
# Add extra classes to our coffeegl namespace
extend require './core/app'
extend require './core/node'
extend "Math", require './math/math'
extend "Math", require './math/curve'
extend "Math", require './math//math_functions'
extend "Colour", require './colour/colour'
extend "Geometry", require './geometry/primitive'
extend "Geometry", require './geometry/shape'
extend "Geometry", require './geometry/plane'
extend "Import", require './import/obj'
extend "Import", require './import/md5'
extend "GL", require './gl/shader'
extend "GL", require './gl/uber_shader_paths'
extend "GL", require './gl/uber_shader'
extend "GL", require './gl/fbo'
extend "GL", require './gl/texture'
extend "GL", require './gl/webgl'
extend "Util", require './util/request'
extend "Util", require './util/promise'
extend "Util", require './util/util'
extend "Util", require './util/signal'
extend "Util", require './util/log'
extend "Util", require './util/voronoi'
extend "Util", require './util/medial_axis'
extend "Util", require './util/webcam'
extend "Util", require './util/noise'
extend "Util", require './util/cache_var'
extend "Interact", require './interact/key'
extend "Interact", require './interact/mouse'
extend "Interact", require './interact/touch'
extend "Camera", require './camera/camera'
extend "Light", require './light/light'
extend "Material", require './material/material'
extend "Material", require './material/basic'
extend "Material", require './material/phong'
extend "Material", require './material/depth'
extend "Material", require './material/normal'
extend "Animation", require './animation/animation'
# _setupFrame - the accepted method for setting up onEachFrame in various browsers
# TODO - We should record the apps listening on this so we can pause individual ones?
PXL.applications = []
_setupFrame = (root) ->
if root.requestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
requestAnimationFrame _cb
_cb()
else if root.webkitRequestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
webkitRequestAnimationFrame _cb
_cb()
else if root.mozRequestAnimationFrame
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
mozRequestAnimationFrame _cb
_cb()
else
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_go = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
setInterval _go, 1000 / 60
root.onEachFrame = onEachFrame
if window?
_setupFrame(window)
module.exports =
PXL : PXL
GL : GL
| true | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
PI:NAME:<NAME>END_PI - PI:EMAIL:<EMAIL>END_PI
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- Resources
* http://www.yuiblog.com/blog/2007/06/12/module-pattern/
* http://www.plexical.com/blog/2012/01/25/writing-coffeescript-for-browser-and-nod/
* https://github.com/field/FieldKit.js
- TODO
* Need a shorthand here for sure!
###
# ## PXL entry point
# Setting up a namespace this way instead of with a function as we are using npm and
# Browserify. We effectively go with a mixin strategy, treating the namespace as an
# object (which it always was). We override the effects of rquires with util.extend
# Without util.extend we dont quite get the namespace effect when we browserify
PXL = {}
GL = {} # Global GL - Is overwritten often - Shorthand for PXL.Context.gl
util = require './util/util'
# extend - adds objects to our master coffeegl object, namespace style
extend = ->
switch arguments.length
when 1
util.extend PXL, arguments[0]
when 2
pkg = arguments[0]
PXL[pkg] = {} if not PXL[pkg]?
util.extend PXL[pkg], arguments[1]
# If we are in a browser add to the window object
window.PXL = PXL if window?
window.GL = GL if window?
# Add extra classes to our coffeegl namespace
extend require './core/app'
extend require './core/node'
extend "Math", require './math/math'
extend "Math", require './math/curve'
extend "Math", require './math//math_functions'
extend "Colour", require './colour/colour'
extend "Geometry", require './geometry/primitive'
extend "Geometry", require './geometry/shape'
extend "Geometry", require './geometry/plane'
extend "Import", require './import/obj'
extend "Import", require './import/md5'
extend "GL", require './gl/shader'
extend "GL", require './gl/uber_shader_paths'
extend "GL", require './gl/uber_shader'
extend "GL", require './gl/fbo'
extend "GL", require './gl/texture'
extend "GL", require './gl/webgl'
extend "Util", require './util/request'
extend "Util", require './util/promise'
extend "Util", require './util/util'
extend "Util", require './util/signal'
extend "Util", require './util/log'
extend "Util", require './util/voronoi'
extend "Util", require './util/medial_axis'
extend "Util", require './util/webcam'
extend "Util", require './util/noise'
extend "Util", require './util/cache_var'
extend "Interact", require './interact/key'
extend "Interact", require './interact/mouse'
extend "Interact", require './interact/touch'
extend "Camera", require './camera/camera'
extend "Light", require './light/light'
extend "Material", require './material/material'
extend "Material", require './material/basic'
extend "Material", require './material/phong'
extend "Material", require './material/depth'
extend "Material", require './material/normal'
extend "Animation", require './animation/animation'
# _setupFrame - the accepted method for setting up onEachFrame in various browsers
# TODO - We should record the apps listening on this so we can pause individual ones?
PXL.applications = []
_setupFrame = (root) ->
if root.requestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
requestAnimationFrame _cb
_cb()
else if root.webkitRequestAnimationFrame
onEachFrame = (context, run) ->
# Check to make sure we arent duplicating contexts and run funcs (due to restarting)
for [c,r] in PXL.applications
if c == context and r == run
return
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
webkitRequestAnimationFrame _cb
_cb()
else if root.mozRequestAnimationFrame
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_cb = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
mozRequestAnimationFrame _cb
_cb()
else
onEachFrame = (context, run) ->
PXL.applications.push [context,run]
_go = () ->
for app in PXL.applications
PXL.Context.switchContext app[0]
app[1].call app[0]
setInterval _go, 1000 / 60
root.onEachFrame = onEachFrame
if window?
_setupFrame(window)
module.exports =
PXL : PXL
GL : GL
|
[
{
"context": "nd details are available at:\n# https://github.com/rcbops/opencenter or upon written request.\n#\n# You may o",
"end": 656,
"score": 0.7827126979827881,
"start": 650,
"tag": "USERNAME",
"value": "rcbops"
},
{
"context": "user, pass) ->\n dashboard.authUser user\n token = \"#{user}:#{pass}\"\n dashboard.authHeader = Authorization: \"Basic #{",
"end": 3593,
"score": 0.9110277891159058,
"start": 3576,
"tag": "KEY",
"value": "\"#{user}:#{pass}\""
}
] | source/coffee/dashboard.coffee | rcbops/opencenter-dashboard | 0 | # OpenCenter™ is Copyright 2013 by Rackspace US, Inc.
# ###############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This version
# of OpenCenter includes Rackspace trademarks and logos, and in accordance with
# Section 6 of the License, the provision of commercial support services in
# conjunction with a version of OpenCenter which includes Rackspace trademarks
# and logos is prohibited. OpenCenter source code and details are available at:
# https://github.com/rcbops/opencenter or upon written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this notice,
# is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# ###############################################################################
# Define Array::filter if not using ECMA5
unless Array::filter
Array::filter = (cb) ->
el for el in @ when cb el
# Create and store namespace
dashboard = exports?.dashboard ? @dashboard = {}
dashboard.selector = (cb, def) ->
selected = ko.observable def ? {} unless selected?
cb def if cb? and def?
ko.computed
read: ->
selected()
write: (data) ->
selected data
cb data if cb?
# Object -> Array mapper
dashboard.toArray = (obj) ->
array = []
for prop of obj
if obj.hasOwnProperty(prop)
array.push
key: prop
value: obj[prop]
array # Return mapped array
dashboard.getPopoverPlacement = (tip, element) ->
isWithinBounds = (elementPosition) ->
boundTop < elementPosition.top and boundLeft < elementPosition.left and boundRight > (elementPosition.left + actualWidth) and boundBottom > (elementPosition.top + actualHeight)
$element = $ element
pos = $.extend {}, $element.offset(),
width: element.offsetWidth
height: element.offsetHeight
actualWidth = 283
actualHeight = 117
boundTop = $(document).scrollTop()
boundLeft = $(document).scrollLeft()
boundRight = boundLeft + $(window).width()
boundBottom = boundTop + $(window).height()
elementAbove =
top: pos.top - actualHeight
left: pos.left + pos.width / 2 - actualWidth / 2
elementBelow =
top: pos.top + pos.height
left: pos.left + pos.width / 2 - actualWidth / 2
elementLeft =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left - actualWidth
elementRight =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left + pos.width
above = isWithinBounds elementAbove
below = isWithinBounds elementBelow
left = isWithinBounds elementLeft
right = isWithinBounds elementRight
(if above then "top" else (if below then "bottom" else (if left then "left" else (if right then "right" else "right"))))
# Keep track of AJAX success/failure
dashboard.siteEnabled = ko.observable true
dashboard.siteDisable = -> dashboard.siteEnabled false
dashboard.siteEnable = -> dashboard.siteEnabled true
# Toggle task/log pane
dashboard.displayTaskLogPane = ko.observable false
# Fill in auth header with user/pass
dashboard.makeBasicAuth = (user, pass) ->
dashboard.authUser user
token = "#{user}:#{pass}"
dashboard.authHeader = Authorization: "Basic #{btoa token}"
# Auth bits
dashboard.authHeader = {}
dashboard.authUser = ko.observable ""
dashboard.authCheck = ko.computed ->
if dashboard.authUser() isnt "" then true else false
dashboard.authLogout = ->
# Clear out all the things
model = dashboard.indexModel
dashboard.authHeader = {}
dashboard.authUser ""
model.keyItems = {}
model.tmpItems []
# Try grabbing new nodes; will trigger login form if needed
dashboard.getNodes "/octr/nodes/", model.tmpItems, model.keyItems
# Guard to spin requests while logging in
dashboard.loggingIn = false
dashboard.drawStepProgress = ->
$form = $("form#inputForm")
$multiStepForm = $form.find(".carousel")
$formBody = $form.find(".modal-body")
$formControls = $form.find(".modal-footer")
if $multiStepForm.length and $formControls.length
$back = $formControls.find(".back")
$next = $formControls.find(".next")
$submit = $formControls.find(".submit")
slideCount = $multiStepForm.find('.carousel-inner .item').length
if slideCount is 1
$back.hide()
$next.hide()
$submit.show()
else
str = ""
count = 0
percentWidth = 100 / slideCount
while count < slideCount
str += "<div id=\"progress-bar-" + (count + 1) + "\" class=\"progress-bar\" style=\"width:" + percentWidth + "%;\"></div>"
count++
$progressMeter = $("#progress-meter")
$progressMeter.remove() if $progressMeter.length
$progressMeter = $('<div id="progress-meter">' + str + '</div>').prependTo($formBody)
$back.attr "disabled", true
$submit.hide()
$multiStepForm.on "slid", "", ->
$this = $(this)
$progressMeter.find(".progress-bar").removeClass "filled"
$activeProgressBars = $progressMeter.find('.progress-bar').slice 0, parseInt $(".carousel-inner .item.active").index() + 1, 10
$activeProgressBars.addClass "filled"
$formControls.find("button").show().removeAttr "disabled"
if $this.find(".carousel-inner .item:first").hasClass("active")
$back.attr "disabled", true
$submit.hide()
else if $this.find(".carousel-inner .item:last").hasClass("active")
$next.hide()
$submit.show()
else
$submit.hide()
# Modal helpers
dashboard.showModal = (id) ->
$(".modal").not(id).modal "hide"
dashboard.drawStepProgress() if id is '#indexInputModal'
$(id).modal("show").on "shown", ->
$(id).find("input").first().focus()
dashboard.hideModal = (id) ->
$(id).modal "hide"
# Track AJAX requests keyed by URL
dashboard.pendingRequests = {}
# Kill requests by regex matching url
dashboard.killRequests = (match) ->
for k,v of dashboard.pendingRequests
if match.test k
v.abort()
# AJAX wrapper which auto-retries on error
dashboard.ajax = (type, url, data, success, error, timeout, statusCode) ->
req = ->
if dashboard.loggingIn # If logging in
setTimeout req, 1000 # Spin request
else
dashboard.pendingRequests[url] = $.ajax # Call and store request
type: type
url: url
data: data
headers: dashboard.authHeader # Add basic auth
success: (data) ->
dashboard.siteEnable() # Enable site
dashboard.hideModal "#indexNoConnectionModal" # Hide immediately
req.backoff = 250 # Reset on success
success data if success?
error: (jqXHR, textStatus, errorThrown) ->
retry = error jqXHR, textStatus, errorThrown if error?
if jqXHR.status is 401 # Unauthorized!
dashboard.loggingIn = true # Block other requests
dashboard.showModal "#indexLoginModal" # Gimmeh logins
setTimeout req, 1000 # Requeue this one
else if retry is true and type is "GET" # Opted in and not a POST
setTimeout req, req.backoff # Retry with incremental backoff
unless jqXHR.status is 0 # Didn't timeout
dashboard.siteDisable() # Don't disable on repolls and such
req.backoff *= 2 if req.backoff < 32000 # Do eet
complete: -> delete dashboard.pendingRequests[url] # Clean up our request
statusCode: statusCode
dataType: "json"
contentType: "application/json; charset=utf-8"
timeout: timeout
req.backoff = 250 # Start at 0.25 sec
req()
# Request wrappers
dashboard.get = (url, success, error, timeout, statusCode) ->
dashboard.ajax "GET", url, null, success, error, timeout, statusCode
dashboard.post = (url, data, success, error, timeout, statusCode) ->
dashboard.ajax "POST", url, data, success, error, timeout, statusCode
# Basic JS/JSON grabber
dashboard.getData = (url, cb, err) ->
dashboard.get url, (data) ->
cb data if cb?
, err ? -> true # Retry
# Use the mapping plugin on a JS object, optional mapping mapping (yo dawg), wrap for array
dashboard.mapData = (data, pin, map={}, wrap=true) ->
data = [data] if wrap
ko.mapping.fromJS data, map, pin
# Get and map data, f'reals
dashboard.getMappedData = (url, pin, map={}, wrap=true) ->
dashboard.get url, (data) ->
dashboard.mapData data, pin, map, wrap
, -> true # Retry
# Parse node array into a flat, keyed boject, injecting children for traversal
dashboard.parseNodes = (data, keyed={}) ->
root = {} # We might not find a root; make sure it's empty each call
# Index node list by ID, merging/updating if keyed was provided
for node in data?.nodes ? []
# Stub if missing
node.dash ?= {}
node.dash.actions ?= []
node.dash.statusClass ?= ko.observable "disabled_state"
node.dash.statusText ?= ko.observable "Unknown"
node.dash.locked ?= ko.observable false
node.dash.children ?= {}
node.dash.hovered ?= keyed[nid]?.dash.hovered ? false
node.facts ?= {}
node.facts.backends ?= []
nid = node.id
if keyed[nid]? # Updating existing node?
pid = keyed[nid].facts?.parent_id # Grab current parent
if pid? and pid isnt node.facts?.parent_id # If new parent is different
dashboard.killPopovers() # We're moving so kill popovers
keyed[nid].dash.hovered = false # And cancel hovers
delete keyed[pid].dash.children[nid] # Remove node from old parent's children
keyed[nid] = node # Add/update node
# Build child arrays
for id of keyed
node = keyed[id]
pid = node.facts?.parent_id
if pid? # Has parent ID?
pnode = keyed?[pid]
if pnode? # Parent exists?
pnode.dash.children[id] = node # Add to parent's children
else # We're an orphan (broken data or from previous merge)
delete keyed[id] # No mercy for orphans!
else if id is "1" # Mebbe root node?
root = node # Point at it
else # Invalid root node!
delete keyed[id] # Pew Pew!
# Node staleness checker
stale = (node) ->
if node?.attrs?.last_checkin? # Have we checked in at all?
if Math.abs(+node.attrs.last_checkin - +dashboard.txID) > 90 then true # Hasn't checked in for 3 cycles
else false
else false
# Fill other properties
for id of keyed
node = keyed[id]
if node?.attrs?.last_task is "failed"
dashboard.setError node
else if stale(node) or node?.attrs?.last_task is "rollback"
dashboard.setWarning node
else if node.task_id?
dashboard.setBusy node
else if node.facts.maintenance_mode
dashboard.setDisabled node
else
dashboard.setGood node
if node.dash.hovered
dashboard.updatePopover $("[data-bind~='popper'],[data-id='#{id}']"), node, true # Update matching popover
# If we have a non-empty display name, set the name to it
if node?.attrs?.display_name? and !!node.attrs.display_name
node.name = node.attrs.display_name
node.dash.agents = (v for k,v of node.dash.children when "agent" in v.facts.backends)
node.dash.containers = (v for k,v of node.dash.children when "container" in v.facts.backends)
if node?.attrs?.locked # Node is locked
node.dash.locked true
root # Return root for mapping
dashboard.setError = (node) ->
node.dash.statusClass "error_state"
node.dash.statusText "Error"
node.dash.locked false
dashboard.setWarning = (node) ->
node.dash.statusClass "processing_state"
node.dash.statusText "Warning"
node.dash.locked false
dashboard.setBusy = (node) ->
node.dash.statusClass "warning_state"
node.dash.statusText "Busy"
node.dash.locked true
dashboard.setGood = (node) ->
node.dash.statusClass "ok_state"
node.dash.statusText "Good"
node.dash.locked false
dashboard.setDisabled = (node) ->
node.dash.statusClass "disabled_state"
node.dash.statusText "Disabled"
# Process nodes and map to pin
dashboard.updateNodes = (data, pin, keys) ->
dashboard.mapData dashboard.parseNodes(data, keys), pin
# Get and process nodes from url
dashboard.getNodes = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateNodes data, pin, keys
, -> true # Retry
# Long-poll for node changes and do the right things on changes
dashboard.pollNodes = (cb, timeout) ->
repoll = (trans) ->
if trans? # Have transaction data?
dashboard.sKey = trans.session_key
dashboard.txID = trans.txid
poll "/octr/nodes/updates/#{dashboard.sKey}/#{dashboard.txID}?poll" # Build URL
else # Get you some
dashboard.getData "/octr/updates", (pass) ->
repoll pass?.transaction # Push it back through
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data?.nodes if cb?
repoll data?.transaction
, (jqXHR, textStatus, errorThrown) -> # Error; can retry after this cb
switch jqXHR.status
when 410 # Gone
repoll() # Cycle transaction
dashboard.getNodes "/octr/nodes/", dashboard.indexModel.tmpItems, dashboard.indexModel.keyItems
else
true # Retry otherwise
, timeout
repoll() # DO EET
# Just map the tasks
dashboard.updateTasks = (data, pin, keys) ->
dashboard.mapData dashboard.parseTasks(data, keys), pin, {}, false # Don't wrap
# Get and process tasks from url
dashboard.getTasks = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateTasks data, pin, keys
#, -> true # Retry
# Dumb polling for now
dashboard.pollTasks = (cb, timeout) ->
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data if cb?
setTimeout poll, timeout, url
, (jqXHR, textStatus, errorThrown) ->
true # Retry on failure
, timeout
poll "/octr/tasks/" # Do it
dashboard.parseTasks = (data, keyed) ->
ids = [] # List of new IDs
# Parse new tasks
tasks = for task in data.tasks
id = task.id # Grab
ids.push id # Push
unless task.action in ["logfile.tail", "logfile.watch"] # Don't show log tasks
task.dash = {} # Stub our config storage
switch task.state
when "pending","delivered","running"
task.dash.statusClass = "warning_state" # Busy
when "timeout"
task.dash.statusClass = "processing_state" # Warning
when "cancelled"
task.dash.statusClass = "error_state" # Error
when "done"
task.dash.statusClass = "ok_state" # Good
if task?.result?.result_code # Non-zero result is bad
task.dash.statusClass = "error_state" # Error
if keyed[id]? # Updating existing task?
task.dash.active = keyed[id].dash.active # Track selected status
else task.dash.active = false
task.dash.label = "#{task.id}: #{task.action} [#{task.state}]"
if task.dash.active # If a task is selected
dashboard.indexModel.wsTaskTitle task.dash.label # Update the title
keyed[id] = task # Set and return it
else continue # Skip it
# So we can update logpane when the active task is reaped/none are selected
activeCount = 0
# Prune
for k of keyed
unless +k in ids # Coerce to int, lulz
delete keyed[k] # Toss reaped tasks
else
activeCount++ # Got an active (selected) one!
if !activeCount # If none were selected
# Reset log pane bits
dashboard.indexModel.wsTaskTitle "Select a task to view its log"
dashboard.indexModel.wsTaskLog "..."
tasks # Return list
dashboard.popoverOptions =
html: true
delay: 0
trigger: "manual"
animation: false
placement: dashboard.getPopoverPlacement
container: 'body'
dashboard.killPopovers = ->
$("[data-bind~='popper']").popover "hide"
$(".popover").remove()
dashboard.updatePopover = (el, obj, show=false) ->
opts = dashboard.popoverOptions
doIt = (task) ->
opts["title"] =
#TODO: Figure out why this fires twice: console.log "title"
"""
#{obj.name ? "Details"}
<ul class="backend-list tags">
#{('<li><div class="item">' + backend + '</div></li>' for backend in obj.facts.backends).join('')}
</ul>
"""
opts["content"] =
"""
<dl class="node-data">
<dt>ID</dt>
<dd>#{obj.id}</dd>
<dt>Status</dt>
<dd>#{obj.dash.statusText()}</dd>
<dt>Task</dt>
<dd>#{task ? 'idle'}</dd>
<dt>Last Task</dt>
<dd>#{obj?.attrs?.last_task ? 'unknown'}</dd>
</dl>
"""
$(el).popover opts
if show
dashboard.killPopovers()
$(el).popover "show"
if obj?.task_id?
dashboard.get "/octr/tasks/#{obj.task_id}"
, (data) -> doIt data?.task?.action
, -> doIt()
else
doIt()
dashboard.convertValueType = (type) ->
switch type
when "password" then "password"
else "text"
| 2376 | # OpenCenter™ is Copyright 2013 by Rackspace US, Inc.
# ###############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This version
# of OpenCenter includes Rackspace trademarks and logos, and in accordance with
# Section 6 of the License, the provision of commercial support services in
# conjunction with a version of OpenCenter which includes Rackspace trademarks
# and logos is prohibited. OpenCenter source code and details are available at:
# https://github.com/rcbops/opencenter or upon written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this notice,
# is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# ###############################################################################
# Define Array::filter if not using ECMA5
unless Array::filter
Array::filter = (cb) ->
el for el in @ when cb el
# Create and store namespace
dashboard = exports?.dashboard ? @dashboard = {}
dashboard.selector = (cb, def) ->
selected = ko.observable def ? {} unless selected?
cb def if cb? and def?
ko.computed
read: ->
selected()
write: (data) ->
selected data
cb data if cb?
# Object -> Array mapper
dashboard.toArray = (obj) ->
array = []
for prop of obj
if obj.hasOwnProperty(prop)
array.push
key: prop
value: obj[prop]
array # Return mapped array
dashboard.getPopoverPlacement = (tip, element) ->
isWithinBounds = (elementPosition) ->
boundTop < elementPosition.top and boundLeft < elementPosition.left and boundRight > (elementPosition.left + actualWidth) and boundBottom > (elementPosition.top + actualHeight)
$element = $ element
pos = $.extend {}, $element.offset(),
width: element.offsetWidth
height: element.offsetHeight
actualWidth = 283
actualHeight = 117
boundTop = $(document).scrollTop()
boundLeft = $(document).scrollLeft()
boundRight = boundLeft + $(window).width()
boundBottom = boundTop + $(window).height()
elementAbove =
top: pos.top - actualHeight
left: pos.left + pos.width / 2 - actualWidth / 2
elementBelow =
top: pos.top + pos.height
left: pos.left + pos.width / 2 - actualWidth / 2
elementLeft =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left - actualWidth
elementRight =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left + pos.width
above = isWithinBounds elementAbove
below = isWithinBounds elementBelow
left = isWithinBounds elementLeft
right = isWithinBounds elementRight
(if above then "top" else (if below then "bottom" else (if left then "left" else (if right then "right" else "right"))))
# Keep track of AJAX success/failure
dashboard.siteEnabled = ko.observable true
dashboard.siteDisable = -> dashboard.siteEnabled false
dashboard.siteEnable = -> dashboard.siteEnabled true
# Toggle task/log pane
dashboard.displayTaskLogPane = ko.observable false
# Fill in auth header with user/pass
dashboard.makeBasicAuth = (user, pass) ->
dashboard.authUser user
token = <KEY>
dashboard.authHeader = Authorization: "Basic #{btoa token}"
# Auth bits
dashboard.authHeader = {}
dashboard.authUser = ko.observable ""
dashboard.authCheck = ko.computed ->
if dashboard.authUser() isnt "" then true else false
dashboard.authLogout = ->
# Clear out all the things
model = dashboard.indexModel
dashboard.authHeader = {}
dashboard.authUser ""
model.keyItems = {}
model.tmpItems []
# Try grabbing new nodes; will trigger login form if needed
dashboard.getNodes "/octr/nodes/", model.tmpItems, model.keyItems
# Guard to spin requests while logging in
dashboard.loggingIn = false
dashboard.drawStepProgress = ->
$form = $("form#inputForm")
$multiStepForm = $form.find(".carousel")
$formBody = $form.find(".modal-body")
$formControls = $form.find(".modal-footer")
if $multiStepForm.length and $formControls.length
$back = $formControls.find(".back")
$next = $formControls.find(".next")
$submit = $formControls.find(".submit")
slideCount = $multiStepForm.find('.carousel-inner .item').length
if slideCount is 1
$back.hide()
$next.hide()
$submit.show()
else
str = ""
count = 0
percentWidth = 100 / slideCount
while count < slideCount
str += "<div id=\"progress-bar-" + (count + 1) + "\" class=\"progress-bar\" style=\"width:" + percentWidth + "%;\"></div>"
count++
$progressMeter = $("#progress-meter")
$progressMeter.remove() if $progressMeter.length
$progressMeter = $('<div id="progress-meter">' + str + '</div>').prependTo($formBody)
$back.attr "disabled", true
$submit.hide()
$multiStepForm.on "slid", "", ->
$this = $(this)
$progressMeter.find(".progress-bar").removeClass "filled"
$activeProgressBars = $progressMeter.find('.progress-bar').slice 0, parseInt $(".carousel-inner .item.active").index() + 1, 10
$activeProgressBars.addClass "filled"
$formControls.find("button").show().removeAttr "disabled"
if $this.find(".carousel-inner .item:first").hasClass("active")
$back.attr "disabled", true
$submit.hide()
else if $this.find(".carousel-inner .item:last").hasClass("active")
$next.hide()
$submit.show()
else
$submit.hide()
# Modal helpers
dashboard.showModal = (id) ->
$(".modal").not(id).modal "hide"
dashboard.drawStepProgress() if id is '#indexInputModal'
$(id).modal("show").on "shown", ->
$(id).find("input").first().focus()
dashboard.hideModal = (id) ->
$(id).modal "hide"
# Track AJAX requests keyed by URL
dashboard.pendingRequests = {}
# Kill requests by regex matching url
dashboard.killRequests = (match) ->
for k,v of dashboard.pendingRequests
if match.test k
v.abort()
# AJAX wrapper which auto-retries on error
dashboard.ajax = (type, url, data, success, error, timeout, statusCode) ->
req = ->
if dashboard.loggingIn # If logging in
setTimeout req, 1000 # Spin request
else
dashboard.pendingRequests[url] = $.ajax # Call and store request
type: type
url: url
data: data
headers: dashboard.authHeader # Add basic auth
success: (data) ->
dashboard.siteEnable() # Enable site
dashboard.hideModal "#indexNoConnectionModal" # Hide immediately
req.backoff = 250 # Reset on success
success data if success?
error: (jqXHR, textStatus, errorThrown) ->
retry = error jqXHR, textStatus, errorThrown if error?
if jqXHR.status is 401 # Unauthorized!
dashboard.loggingIn = true # Block other requests
dashboard.showModal "#indexLoginModal" # Gimmeh logins
setTimeout req, 1000 # Requeue this one
else if retry is true and type is "GET" # Opted in and not a POST
setTimeout req, req.backoff # Retry with incremental backoff
unless jqXHR.status is 0 # Didn't timeout
dashboard.siteDisable() # Don't disable on repolls and such
req.backoff *= 2 if req.backoff < 32000 # Do eet
complete: -> delete dashboard.pendingRequests[url] # Clean up our request
statusCode: statusCode
dataType: "json"
contentType: "application/json; charset=utf-8"
timeout: timeout
req.backoff = 250 # Start at 0.25 sec
req()
# Request wrappers
dashboard.get = (url, success, error, timeout, statusCode) ->
dashboard.ajax "GET", url, null, success, error, timeout, statusCode
dashboard.post = (url, data, success, error, timeout, statusCode) ->
dashboard.ajax "POST", url, data, success, error, timeout, statusCode
# Basic JS/JSON grabber
dashboard.getData = (url, cb, err) ->
dashboard.get url, (data) ->
cb data if cb?
, err ? -> true # Retry
# Use the mapping plugin on a JS object, optional mapping mapping (yo dawg), wrap for array
dashboard.mapData = (data, pin, map={}, wrap=true) ->
data = [data] if wrap
ko.mapping.fromJS data, map, pin
# Get and map data, f'reals
dashboard.getMappedData = (url, pin, map={}, wrap=true) ->
dashboard.get url, (data) ->
dashboard.mapData data, pin, map, wrap
, -> true # Retry
# Parse node array into a flat, keyed boject, injecting children for traversal
dashboard.parseNodes = (data, keyed={}) ->
root = {} # We might not find a root; make sure it's empty each call
# Index node list by ID, merging/updating if keyed was provided
for node in data?.nodes ? []
# Stub if missing
node.dash ?= {}
node.dash.actions ?= []
node.dash.statusClass ?= ko.observable "disabled_state"
node.dash.statusText ?= ko.observable "Unknown"
node.dash.locked ?= ko.observable false
node.dash.children ?= {}
node.dash.hovered ?= keyed[nid]?.dash.hovered ? false
node.facts ?= {}
node.facts.backends ?= []
nid = node.id
if keyed[nid]? # Updating existing node?
pid = keyed[nid].facts?.parent_id # Grab current parent
if pid? and pid isnt node.facts?.parent_id # If new parent is different
dashboard.killPopovers() # We're moving so kill popovers
keyed[nid].dash.hovered = false # And cancel hovers
delete keyed[pid].dash.children[nid] # Remove node from old parent's children
keyed[nid] = node # Add/update node
# Build child arrays
for id of keyed
node = keyed[id]
pid = node.facts?.parent_id
if pid? # Has parent ID?
pnode = keyed?[pid]
if pnode? # Parent exists?
pnode.dash.children[id] = node # Add to parent's children
else # We're an orphan (broken data or from previous merge)
delete keyed[id] # No mercy for orphans!
else if id is "1" # Mebbe root node?
root = node # Point at it
else # Invalid root node!
delete keyed[id] # Pew Pew!
# Node staleness checker
stale = (node) ->
if node?.attrs?.last_checkin? # Have we checked in at all?
if Math.abs(+node.attrs.last_checkin - +dashboard.txID) > 90 then true # Hasn't checked in for 3 cycles
else false
else false
# Fill other properties
for id of keyed
node = keyed[id]
if node?.attrs?.last_task is "failed"
dashboard.setError node
else if stale(node) or node?.attrs?.last_task is "rollback"
dashboard.setWarning node
else if node.task_id?
dashboard.setBusy node
else if node.facts.maintenance_mode
dashboard.setDisabled node
else
dashboard.setGood node
if node.dash.hovered
dashboard.updatePopover $("[data-bind~='popper'],[data-id='#{id}']"), node, true # Update matching popover
# If we have a non-empty display name, set the name to it
if node?.attrs?.display_name? and !!node.attrs.display_name
node.name = node.attrs.display_name
node.dash.agents = (v for k,v of node.dash.children when "agent" in v.facts.backends)
node.dash.containers = (v for k,v of node.dash.children when "container" in v.facts.backends)
if node?.attrs?.locked # Node is locked
node.dash.locked true
root # Return root for mapping
dashboard.setError = (node) ->
node.dash.statusClass "error_state"
node.dash.statusText "Error"
node.dash.locked false
dashboard.setWarning = (node) ->
node.dash.statusClass "processing_state"
node.dash.statusText "Warning"
node.dash.locked false
dashboard.setBusy = (node) ->
node.dash.statusClass "warning_state"
node.dash.statusText "Busy"
node.dash.locked true
dashboard.setGood = (node) ->
node.dash.statusClass "ok_state"
node.dash.statusText "Good"
node.dash.locked false
dashboard.setDisabled = (node) ->
node.dash.statusClass "disabled_state"
node.dash.statusText "Disabled"
# Process nodes and map to pin
dashboard.updateNodes = (data, pin, keys) ->
dashboard.mapData dashboard.parseNodes(data, keys), pin
# Get and process nodes from url
dashboard.getNodes = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateNodes data, pin, keys
, -> true # Retry
# Long-poll for node changes and do the right things on changes
dashboard.pollNodes = (cb, timeout) ->
repoll = (trans) ->
if trans? # Have transaction data?
dashboard.sKey = trans.session_key
dashboard.txID = trans.txid
poll "/octr/nodes/updates/#{dashboard.sKey}/#{dashboard.txID}?poll" # Build URL
else # Get you some
dashboard.getData "/octr/updates", (pass) ->
repoll pass?.transaction # Push it back through
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data?.nodes if cb?
repoll data?.transaction
, (jqXHR, textStatus, errorThrown) -> # Error; can retry after this cb
switch jqXHR.status
when 410 # Gone
repoll() # Cycle transaction
dashboard.getNodes "/octr/nodes/", dashboard.indexModel.tmpItems, dashboard.indexModel.keyItems
else
true # Retry otherwise
, timeout
repoll() # DO EET
# Just map the tasks
dashboard.updateTasks = (data, pin, keys) ->
dashboard.mapData dashboard.parseTasks(data, keys), pin, {}, false # Don't wrap
# Get and process tasks from url
dashboard.getTasks = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateTasks data, pin, keys
#, -> true # Retry
# Dumb polling for now
dashboard.pollTasks = (cb, timeout) ->
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data if cb?
setTimeout poll, timeout, url
, (jqXHR, textStatus, errorThrown) ->
true # Retry on failure
, timeout
poll "/octr/tasks/" # Do it
dashboard.parseTasks = (data, keyed) ->
ids = [] # List of new IDs
# Parse new tasks
tasks = for task in data.tasks
id = task.id # Grab
ids.push id # Push
unless task.action in ["logfile.tail", "logfile.watch"] # Don't show log tasks
task.dash = {} # Stub our config storage
switch task.state
when "pending","delivered","running"
task.dash.statusClass = "warning_state" # Busy
when "timeout"
task.dash.statusClass = "processing_state" # Warning
when "cancelled"
task.dash.statusClass = "error_state" # Error
when "done"
task.dash.statusClass = "ok_state" # Good
if task?.result?.result_code # Non-zero result is bad
task.dash.statusClass = "error_state" # Error
if keyed[id]? # Updating existing task?
task.dash.active = keyed[id].dash.active # Track selected status
else task.dash.active = false
task.dash.label = "#{task.id}: #{task.action} [#{task.state}]"
if task.dash.active # If a task is selected
dashboard.indexModel.wsTaskTitle task.dash.label # Update the title
keyed[id] = task # Set and return it
else continue # Skip it
# So we can update logpane when the active task is reaped/none are selected
activeCount = 0
# Prune
for k of keyed
unless +k in ids # Coerce to int, lulz
delete keyed[k] # Toss reaped tasks
else
activeCount++ # Got an active (selected) one!
if !activeCount # If none were selected
# Reset log pane bits
dashboard.indexModel.wsTaskTitle "Select a task to view its log"
dashboard.indexModel.wsTaskLog "..."
tasks # Return list
dashboard.popoverOptions =
html: true
delay: 0
trigger: "manual"
animation: false
placement: dashboard.getPopoverPlacement
container: 'body'
dashboard.killPopovers = ->
$("[data-bind~='popper']").popover "hide"
$(".popover").remove()
dashboard.updatePopover = (el, obj, show=false) ->
opts = dashboard.popoverOptions
doIt = (task) ->
opts["title"] =
#TODO: Figure out why this fires twice: console.log "title"
"""
#{obj.name ? "Details"}
<ul class="backend-list tags">
#{('<li><div class="item">' + backend + '</div></li>' for backend in obj.facts.backends).join('')}
</ul>
"""
opts["content"] =
"""
<dl class="node-data">
<dt>ID</dt>
<dd>#{obj.id}</dd>
<dt>Status</dt>
<dd>#{obj.dash.statusText()}</dd>
<dt>Task</dt>
<dd>#{task ? 'idle'}</dd>
<dt>Last Task</dt>
<dd>#{obj?.attrs?.last_task ? 'unknown'}</dd>
</dl>
"""
$(el).popover opts
if show
dashboard.killPopovers()
$(el).popover "show"
if obj?.task_id?
dashboard.get "/octr/tasks/#{obj.task_id}"
, (data) -> doIt data?.task?.action
, -> doIt()
else
doIt()
dashboard.convertValueType = (type) ->
switch type
when "password" then "password"
else "text"
| true | # OpenCenter™ is Copyright 2013 by Rackspace US, Inc.
# ###############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This version
# of OpenCenter includes Rackspace trademarks and logos, and in accordance with
# Section 6 of the License, the provision of commercial support services in
# conjunction with a version of OpenCenter which includes Rackspace trademarks
# and logos is prohibited. OpenCenter source code and details are available at:
# https://github.com/rcbops/opencenter or upon written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this notice,
# is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# ###############################################################################
# Define Array::filter if not using ECMA5
unless Array::filter
Array::filter = (cb) ->
el for el in @ when cb el
# Create and store namespace
dashboard = exports?.dashboard ? @dashboard = {}
dashboard.selector = (cb, def) ->
selected = ko.observable def ? {} unless selected?
cb def if cb? and def?
ko.computed
read: ->
selected()
write: (data) ->
selected data
cb data if cb?
# Object -> Array mapper
dashboard.toArray = (obj) ->
array = []
for prop of obj
if obj.hasOwnProperty(prop)
array.push
key: prop
value: obj[prop]
array # Return mapped array
dashboard.getPopoverPlacement = (tip, element) ->
isWithinBounds = (elementPosition) ->
boundTop < elementPosition.top and boundLeft < elementPosition.left and boundRight > (elementPosition.left + actualWidth) and boundBottom > (elementPosition.top + actualHeight)
$element = $ element
pos = $.extend {}, $element.offset(),
width: element.offsetWidth
height: element.offsetHeight
actualWidth = 283
actualHeight = 117
boundTop = $(document).scrollTop()
boundLeft = $(document).scrollLeft()
boundRight = boundLeft + $(window).width()
boundBottom = boundTop + $(window).height()
elementAbove =
top: pos.top - actualHeight
left: pos.left + pos.width / 2 - actualWidth / 2
elementBelow =
top: pos.top + pos.height
left: pos.left + pos.width / 2 - actualWidth / 2
elementLeft =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left - actualWidth
elementRight =
top: pos.top + pos.height / 2 - actualHeight / 2
left: pos.left + pos.width
above = isWithinBounds elementAbove
below = isWithinBounds elementBelow
left = isWithinBounds elementLeft
right = isWithinBounds elementRight
(if above then "top" else (if below then "bottom" else (if left then "left" else (if right then "right" else "right"))))
# Keep track of AJAX success/failure
dashboard.siteEnabled = ko.observable true
dashboard.siteDisable = -> dashboard.siteEnabled false
dashboard.siteEnable = -> dashboard.siteEnabled true
# Toggle task/log pane
dashboard.displayTaskLogPane = ko.observable false
# Fill in auth header with user/pass
dashboard.makeBasicAuth = (user, pass) ->
dashboard.authUser user
token = PI:KEY:<KEY>END_PI
dashboard.authHeader = Authorization: "Basic #{btoa token}"
# Auth bits
dashboard.authHeader = {}
dashboard.authUser = ko.observable ""
dashboard.authCheck = ko.computed ->
if dashboard.authUser() isnt "" then true else false
dashboard.authLogout = ->
# Clear out all the things
model = dashboard.indexModel
dashboard.authHeader = {}
dashboard.authUser ""
model.keyItems = {}
model.tmpItems []
# Try grabbing new nodes; will trigger login form if needed
dashboard.getNodes "/octr/nodes/", model.tmpItems, model.keyItems
# Guard to spin requests while logging in
dashboard.loggingIn = false
dashboard.drawStepProgress = ->
$form = $("form#inputForm")
$multiStepForm = $form.find(".carousel")
$formBody = $form.find(".modal-body")
$formControls = $form.find(".modal-footer")
if $multiStepForm.length and $formControls.length
$back = $formControls.find(".back")
$next = $formControls.find(".next")
$submit = $formControls.find(".submit")
slideCount = $multiStepForm.find('.carousel-inner .item').length
if slideCount is 1
$back.hide()
$next.hide()
$submit.show()
else
str = ""
count = 0
percentWidth = 100 / slideCount
while count < slideCount
str += "<div id=\"progress-bar-" + (count + 1) + "\" class=\"progress-bar\" style=\"width:" + percentWidth + "%;\"></div>"
count++
$progressMeter = $("#progress-meter")
$progressMeter.remove() if $progressMeter.length
$progressMeter = $('<div id="progress-meter">' + str + '</div>').prependTo($formBody)
$back.attr "disabled", true
$submit.hide()
$multiStepForm.on "slid", "", ->
$this = $(this)
$progressMeter.find(".progress-bar").removeClass "filled"
$activeProgressBars = $progressMeter.find('.progress-bar').slice 0, parseInt $(".carousel-inner .item.active").index() + 1, 10
$activeProgressBars.addClass "filled"
$formControls.find("button").show().removeAttr "disabled"
if $this.find(".carousel-inner .item:first").hasClass("active")
$back.attr "disabled", true
$submit.hide()
else if $this.find(".carousel-inner .item:last").hasClass("active")
$next.hide()
$submit.show()
else
$submit.hide()
# Modal helpers
dashboard.showModal = (id) ->
$(".modal").not(id).modal "hide"
dashboard.drawStepProgress() if id is '#indexInputModal'
$(id).modal("show").on "shown", ->
$(id).find("input").first().focus()
dashboard.hideModal = (id) ->
$(id).modal "hide"
# Track AJAX requests keyed by URL
dashboard.pendingRequests = {}
# Kill requests by regex matching url
dashboard.killRequests = (match) ->
for k,v of dashboard.pendingRequests
if match.test k
v.abort()
# AJAX wrapper which auto-retries on error
dashboard.ajax = (type, url, data, success, error, timeout, statusCode) ->
req = ->
if dashboard.loggingIn # If logging in
setTimeout req, 1000 # Spin request
else
dashboard.pendingRequests[url] = $.ajax # Call and store request
type: type
url: url
data: data
headers: dashboard.authHeader # Add basic auth
success: (data) ->
dashboard.siteEnable() # Enable site
dashboard.hideModal "#indexNoConnectionModal" # Hide immediately
req.backoff = 250 # Reset on success
success data if success?
error: (jqXHR, textStatus, errorThrown) ->
retry = error jqXHR, textStatus, errorThrown if error?
if jqXHR.status is 401 # Unauthorized!
dashboard.loggingIn = true # Block other requests
dashboard.showModal "#indexLoginModal" # Gimmeh logins
setTimeout req, 1000 # Requeue this one
else if retry is true and type is "GET" # Opted in and not a POST
setTimeout req, req.backoff # Retry with incremental backoff
unless jqXHR.status is 0 # Didn't timeout
dashboard.siteDisable() # Don't disable on repolls and such
req.backoff *= 2 if req.backoff < 32000 # Do eet
complete: -> delete dashboard.pendingRequests[url] # Clean up our request
statusCode: statusCode
dataType: "json"
contentType: "application/json; charset=utf-8"
timeout: timeout
req.backoff = 250 # Start at 0.25 sec
req()
# Request wrappers
dashboard.get = (url, success, error, timeout, statusCode) ->
dashboard.ajax "GET", url, null, success, error, timeout, statusCode
dashboard.post = (url, data, success, error, timeout, statusCode) ->
dashboard.ajax "POST", url, data, success, error, timeout, statusCode
# Basic JS/JSON grabber
dashboard.getData = (url, cb, err) ->
dashboard.get url, (data) ->
cb data if cb?
, err ? -> true # Retry
# Use the mapping plugin on a JS object, optional mapping mapping (yo dawg), wrap for array
dashboard.mapData = (data, pin, map={}, wrap=true) ->
data = [data] if wrap
ko.mapping.fromJS data, map, pin
# Get and map data, f'reals
dashboard.getMappedData = (url, pin, map={}, wrap=true) ->
dashboard.get url, (data) ->
dashboard.mapData data, pin, map, wrap
, -> true # Retry
# Parse node array into a flat, keyed boject, injecting children for traversal
dashboard.parseNodes = (data, keyed={}) ->
root = {} # We might not find a root; make sure it's empty each call
# Index node list by ID, merging/updating if keyed was provided
for node in data?.nodes ? []
# Stub if missing
node.dash ?= {}
node.dash.actions ?= []
node.dash.statusClass ?= ko.observable "disabled_state"
node.dash.statusText ?= ko.observable "Unknown"
node.dash.locked ?= ko.observable false
node.dash.children ?= {}
node.dash.hovered ?= keyed[nid]?.dash.hovered ? false
node.facts ?= {}
node.facts.backends ?= []
nid = node.id
if keyed[nid]? # Updating existing node?
pid = keyed[nid].facts?.parent_id # Grab current parent
if pid? and pid isnt node.facts?.parent_id # If new parent is different
dashboard.killPopovers() # We're moving so kill popovers
keyed[nid].dash.hovered = false # And cancel hovers
delete keyed[pid].dash.children[nid] # Remove node from old parent's children
keyed[nid] = node # Add/update node
# Build child arrays
for id of keyed
node = keyed[id]
pid = node.facts?.parent_id
if pid? # Has parent ID?
pnode = keyed?[pid]
if pnode? # Parent exists?
pnode.dash.children[id] = node # Add to parent's children
else # We're an orphan (broken data or from previous merge)
delete keyed[id] # No mercy for orphans!
else if id is "1" # Mebbe root node?
root = node # Point at it
else # Invalid root node!
delete keyed[id] # Pew Pew!
# Node staleness checker
stale = (node) ->
if node?.attrs?.last_checkin? # Have we checked in at all?
if Math.abs(+node.attrs.last_checkin - +dashboard.txID) > 90 then true # Hasn't checked in for 3 cycles
else false
else false
# Fill other properties
for id of keyed
node = keyed[id]
if node?.attrs?.last_task is "failed"
dashboard.setError node
else if stale(node) or node?.attrs?.last_task is "rollback"
dashboard.setWarning node
else if node.task_id?
dashboard.setBusy node
else if node.facts.maintenance_mode
dashboard.setDisabled node
else
dashboard.setGood node
if node.dash.hovered
dashboard.updatePopover $("[data-bind~='popper'],[data-id='#{id}']"), node, true # Update matching popover
# If we have a non-empty display name, set the name to it
if node?.attrs?.display_name? and !!node.attrs.display_name
node.name = node.attrs.display_name
node.dash.agents = (v for k,v of node.dash.children when "agent" in v.facts.backends)
node.dash.containers = (v for k,v of node.dash.children when "container" in v.facts.backends)
if node?.attrs?.locked # Node is locked
node.dash.locked true
root # Return root for mapping
dashboard.setError = (node) ->
node.dash.statusClass "error_state"
node.dash.statusText "Error"
node.dash.locked false
dashboard.setWarning = (node) ->
node.dash.statusClass "processing_state"
node.dash.statusText "Warning"
node.dash.locked false
dashboard.setBusy = (node) ->
node.dash.statusClass "warning_state"
node.dash.statusText "Busy"
node.dash.locked true
dashboard.setGood = (node) ->
node.dash.statusClass "ok_state"
node.dash.statusText "Good"
node.dash.locked false
dashboard.setDisabled = (node) ->
node.dash.statusClass "disabled_state"
node.dash.statusText "Disabled"
# Process nodes and map to pin
dashboard.updateNodes = (data, pin, keys) ->
dashboard.mapData dashboard.parseNodes(data, keys), pin
# Get and process nodes from url
dashboard.getNodes = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateNodes data, pin, keys
, -> true # Retry
# Long-poll for node changes and do the right things on changes
dashboard.pollNodes = (cb, timeout) ->
repoll = (trans) ->
if trans? # Have transaction data?
dashboard.sKey = trans.session_key
dashboard.txID = trans.txid
poll "/octr/nodes/updates/#{dashboard.sKey}/#{dashboard.txID}?poll" # Build URL
else # Get you some
dashboard.getData "/octr/updates", (pass) ->
repoll pass?.transaction # Push it back through
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data?.nodes if cb?
repoll data?.transaction
, (jqXHR, textStatus, errorThrown) -> # Error; can retry after this cb
switch jqXHR.status
when 410 # Gone
repoll() # Cycle transaction
dashboard.getNodes "/octr/nodes/", dashboard.indexModel.tmpItems, dashboard.indexModel.keyItems
else
true # Retry otherwise
, timeout
repoll() # DO EET
# Just map the tasks
dashboard.updateTasks = (data, pin, keys) ->
dashboard.mapData dashboard.parseTasks(data, keys), pin, {}, false # Don't wrap
# Get and process tasks from url
dashboard.getTasks = (url, pin, keys) ->
dashboard.get url, (data) ->
dashboard.updateTasks data, pin, keys
#, -> true # Retry
# Dumb polling for now
dashboard.pollTasks = (cb, timeout) ->
poll = (url) ->
dashboard.get url
, (data) -> # Success
cb data if cb?
setTimeout poll, timeout, url
, (jqXHR, textStatus, errorThrown) ->
true # Retry on failure
, timeout
poll "/octr/tasks/" # Do it
dashboard.parseTasks = (data, keyed) ->
ids = [] # List of new IDs
# Parse new tasks
tasks = for task in data.tasks
id = task.id # Grab
ids.push id # Push
unless task.action in ["logfile.tail", "logfile.watch"] # Don't show log tasks
task.dash = {} # Stub our config storage
switch task.state
when "pending","delivered","running"
task.dash.statusClass = "warning_state" # Busy
when "timeout"
task.dash.statusClass = "processing_state" # Warning
when "cancelled"
task.dash.statusClass = "error_state" # Error
when "done"
task.dash.statusClass = "ok_state" # Good
if task?.result?.result_code # Non-zero result is bad
task.dash.statusClass = "error_state" # Error
if keyed[id]? # Updating existing task?
task.dash.active = keyed[id].dash.active # Track selected status
else task.dash.active = false
task.dash.label = "#{task.id}: #{task.action} [#{task.state}]"
if task.dash.active # If a task is selected
dashboard.indexModel.wsTaskTitle task.dash.label # Update the title
keyed[id] = task # Set and return it
else continue # Skip it
# So we can update logpane when the active task is reaped/none are selected
activeCount = 0
# Prune
for k of keyed
unless +k in ids # Coerce to int, lulz
delete keyed[k] # Toss reaped tasks
else
activeCount++ # Got an active (selected) one!
if !activeCount # If none were selected
# Reset log pane bits
dashboard.indexModel.wsTaskTitle "Select a task to view its log"
dashboard.indexModel.wsTaskLog "..."
tasks # Return list
dashboard.popoverOptions =
html: true
delay: 0
trigger: "manual"
animation: false
placement: dashboard.getPopoverPlacement
container: 'body'
dashboard.killPopovers = ->
$("[data-bind~='popper']").popover "hide"
$(".popover").remove()
dashboard.updatePopover = (el, obj, show=false) ->
opts = dashboard.popoverOptions
doIt = (task) ->
opts["title"] =
#TODO: Figure out why this fires twice: console.log "title"
"""
#{obj.name ? "Details"}
<ul class="backend-list tags">
#{('<li><div class="item">' + backend + '</div></li>' for backend in obj.facts.backends).join('')}
</ul>
"""
opts["content"] =
"""
<dl class="node-data">
<dt>ID</dt>
<dd>#{obj.id}</dd>
<dt>Status</dt>
<dd>#{obj.dash.statusText()}</dd>
<dt>Task</dt>
<dd>#{task ? 'idle'}</dd>
<dt>Last Task</dt>
<dd>#{obj?.attrs?.last_task ? 'unknown'}</dd>
</dl>
"""
$(el).popover opts
if show
dashboard.killPopovers()
$(el).popover "show"
if obj?.task_id?
dashboard.get "/octr/tasks/#{obj.task_id}"
, (data) -> doIt data?.task?.action
, -> doIt()
else
doIt()
dashboard.convertValueType = (type) ->
switch type
when "password" then "password"
else "text"
|
[
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 865,
"score": 0.9992544054985046,
"start": 858,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 902,
"score": 0.99737149477005,
"start": 894,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 949,
"score": 0.999927818775177,
"start": 926,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"aut",
"end": 984,
"score": 0.9996579885482788,
"start": 974,
"tag": "USERNAME",
"value": "tenten0213"
},
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 1174,
"score": 0.9993627071380615,
"start": 1167,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 1211,
"score": 0.9980969429016113,
"start": 1203,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 1258,
"score": 0.9999268651008606,
"start": 1235,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"pri",
"end": 1293,
"score": 0.9996805191040039,
"start": 1283,
"tag": "USERNAME",
"value": "tenten0213"
},
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 2812,
"score": 0.9997603297233582,
"start": 2805,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 2849,
"score": 0.999436616897583,
"start": 2841,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 2896,
"score": 0.9999318718910217,
"start": 2873,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"pri",
"end": 2931,
"score": 0.999660313129425,
"start": 2921,
"tag": "USERNAME",
"value": "tenten0213"
},
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 4455,
"score": 0.999659538269043,
"start": 4448,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 4492,
"score": 0.9995059370994568,
"start": 4484,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 4539,
"score": 0.9999289512634277,
"start": 4516,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"pri",
"end": 4574,
"score": 0.9996135830879211,
"start": 4564,
"tag": "USERNAME",
"value": "tenten0213"
},
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 4904,
"score": 0.9993646740913391,
"start": 4897,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 4941,
"score": 0.9987761378288269,
"start": 4933,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 4988,
"score": 0.9999277591705322,
"start": 4965,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"",
"end": 5020,
"score": 0.9995485544204712,
"start": 5013,
"tag": "USERNAME",
"value": "tenten0"
},
{
"context": " \"identity_url\": null,\n \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n ",
"end": 5213,
"score": 0.9998319745063782,
"start": 5206,
"tag": "NAME",
"value": "Amanuma"
},
{
"context": " \"lastname\": \"Amanuma\",\n \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n",
"end": 5250,
"score": 0.999760091304779,
"start": 5242,
"tag": "NAME",
"value": "Takehito"
},
{
"context": " \"firstname\": \"Takehito\",\n \"mail\": \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n ",
"end": 5297,
"score": 0.9999257922172546,
"start": 5274,
"tag": "EMAIL",
"value": "takehito.0213@gmail.com"
},
{
"context": " \"takehito.0213@gmail.com\",\n \"login\": \"tenten0213\",\n \"id\": 3\n },\n \"pri",
"end": 5332,
"score": 0.9995560050010681,
"start": 5322,
"tag": "USERNAME",
"value": "tenten0213"
}
] | src/test/hubot-redmine-notifier_test.coffee | BrutalBirdie/redbot-redmine-notifier | 0 | 'use strict'
process.env.PORT = 0 # pick a random port for this test
Hubot = require('hubot')
Path = require('path')
request = require('supertest')
sinon = require('sinon')
adapterPath = Path.join Path.dirname(require.resolve 'hubot'), "src", "adapters"
robot = Hubot.loadBot adapterPath, "shell", true, "MochaHubot"
hubot_redmine_notifier = require('../scripts/hubot-redmine-notifier')(robot)
test_data = [
{
"name": "new issue",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "new issue with no assignee",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: \nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": null,
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "update issue",
"expected_out": "[Test] tenten0213 updated Bug#19\nSubject: Something is wrong\nStatus: InProgress\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/19",
"body" : {
"payload": {
"url": "http://example.com/issues/19",
"journal": {
"details": "",
"author": {
"icon_url": "http://www.gravatar.com/avatar/example"
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"private_notes": false,
"created_on": null,
"notes": "",
"id": null
},
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "Amanuma",
"firstname": "Takehito",
"mail": "takehito.0213@gmail.com",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Bug",
"id": 1
},
"parent_id": null,
"root_id": 19,
"closed_on": null,
"updated_on": "2014-10-01T10:28:28Z",
"created_on": "2014-10-01T09:44:12Z",
"description": "",
"subject": "Something is wrong",
"id": 19,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 14,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "InProgress",
"id": 2
}
},
"action": "updated"
}
}
}
]
url = "/hubot/redmine-notify?room=tenten"
for test in test_data then do (test) ->
describe test.name, ()->
before (done) ->
robot.adapter.send = sinon.spy()
endfunc = (err, res) ->
throw err if err
do done
request(robot.router)
.post(url)
.send(JSON.stringify(test.body))
.expect(200)
.end(endfunc)
it 'Robot sent out respond', ()->
robot.adapter.send.called.should.be.true
it 'Robot sent to right room', ()->
send_arg = robot.adapter.send.getCall(0).args[0]
send_arg.room.should.eql 'tenten'
it 'Robot sent right message', ()->
robot.adapter.send.getCall(0).args[1].should.eql test.expected_out
| 28298 | 'use strict'
process.env.PORT = 0 # pick a random port for this test
Hubot = require('hubot')
Path = require('path')
request = require('supertest')
sinon = require('sinon')
adapterPath = Path.join Path.dirname(require.resolve 'hubot'), "src", "adapters"
robot = Hubot.loadBot adapterPath, "shell", true, "MochaHubot"
hubot_redmine_notifier = require('../scripts/hubot-redmine-notifier')(robot)
test_data = [
{
"name": "new issue",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "new issue with no assignee",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: \nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": null,
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "update issue",
"expected_out": "[Test] tenten0213 updated Bug#19\nSubject: Something is wrong\nStatus: InProgress\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/19",
"body" : {
"payload": {
"url": "http://example.com/issues/19",
"journal": {
"details": "",
"author": {
"icon_url": "http://www.gravatar.com/avatar/example"
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"private_notes": false,
"created_on": null,
"notes": "",
"id": null
},
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "<NAME>",
"firstname": "<NAME>",
"mail": "<EMAIL>",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Bug",
"id": 1
},
"parent_id": null,
"root_id": 19,
"closed_on": null,
"updated_on": "2014-10-01T10:28:28Z",
"created_on": "2014-10-01T09:44:12Z",
"description": "",
"subject": "Something is wrong",
"id": 19,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 14,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "InProgress",
"id": 2
}
},
"action": "updated"
}
}
}
]
url = "/hubot/redmine-notify?room=tenten"
for test in test_data then do (test) ->
describe test.name, ()->
before (done) ->
robot.adapter.send = sinon.spy()
endfunc = (err, res) ->
throw err if err
do done
request(robot.router)
.post(url)
.send(JSON.stringify(test.body))
.expect(200)
.end(endfunc)
it 'Robot sent out respond', ()->
robot.adapter.send.called.should.be.true
it 'Robot sent to right room', ()->
send_arg = robot.adapter.send.getCall(0).args[0]
send_arg.room.should.eql 'tenten'
it 'Robot sent right message', ()->
robot.adapter.send.getCall(0).args[1].should.eql test.expected_out
| true | 'use strict'
process.env.PORT = 0 # pick a random port for this test
Hubot = require('hubot')
Path = require('path')
request = require('supertest')
sinon = require('sinon')
adapterPath = Path.join Path.dirname(require.resolve 'hubot'), "src", "adapters"
robot = Hubot.loadBot adapterPath, "shell", true, "MochaHubot"
hubot_redmine_notifier = require('../scripts/hubot-redmine-notifier')(robot)
test_data = [
{
"name": "new issue",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "new issue with no assignee",
"expected_out": "[Test] tenten0213 opened Feature#20\nSubject: Some Feature\nStatus: New\nPriority: Normal\nAssignee: \nURL: http://example.com/issues/20",
"body": {
"payload": {
"url": "http://example.com/issues/20",
"issue": {
"assignee": null,
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Feature",
"id": 2
},
"parent_id": null,
"root_id": 20,
"closed_on": null,
"updated_on": "2014-10-01T10:34:03Z",
"created_on": "2014-10-01T10:34:03Z",
"description": "test",
"subject": "Some Feature",
"id": 20,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 0,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "New",
"id": 1
}
},
"action": "opened"
}
}
},
{
"name": "update issue",
"expected_out": "[Test] tenten0213 updated Bug#19\nSubject: Something is wrong\nStatus: InProgress\nPriority: Normal\nAssignee: tenten0213\nURL: http://example.com/issues/19",
"body" : {
"payload": {
"url": "http://example.com/issues/19",
"journal": {
"details": "",
"author": {
"icon_url": "http://www.gravatar.com/avatar/example"
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"private_notes": false,
"created_on": null,
"notes": "",
"id": null
},
"issue": {
"assignee": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"author": {
"icon_url": "http://www.gravatar.com/avatar/example",
"identity_url": null,
"lastname": "PI:NAME:<NAME>END_PI",
"firstname": "PI:NAME:<NAME>END_PI",
"mail": "PI:EMAIL:<EMAIL>END_PI",
"login": "tenten0213",
"id": 3
},
"priority": {
"name": "Normal",
"id": 2
},
"tracker": {
"name": "Bug",
"id": 1
},
"parent_id": null,
"root_id": 19,
"closed_on": null,
"updated_on": "2014-10-01T10:28:28Z",
"created_on": "2014-10-01T09:44:12Z",
"description": "",
"subject": "Something is wrong",
"id": 19,
"done_ratio": 0,
"start_date": "2014-10-01",
"due_date": null,
"estimated_hours": null,
"is_private": false,
"lock_version": 14,
"project": {
"homepage": "",
"created_on": "2014-09-29T01:19:34Z",
"description": "",
"name": "Test",
"identifier": "test",
"id": 1
},
"status": {
"name": "InProgress",
"id": 2
}
},
"action": "updated"
}
}
}
]
url = "/hubot/redmine-notify?room=tenten"
for test in test_data then do (test) ->
describe test.name, ()->
before (done) ->
robot.adapter.send = sinon.spy()
endfunc = (err, res) ->
throw err if err
do done
request(robot.router)
.post(url)
.send(JSON.stringify(test.body))
.expect(200)
.end(endfunc)
it 'Robot sent out respond', ()->
robot.adapter.send.called.should.be.true
it 'Robot sent to right room', ()->
send_arg = robot.adapter.send.getCall(0).args[0]
send_arg.room.should.eql 'tenten'
it 'Robot sent right message', ()->
robot.adapter.send.getCall(0).args[1].should.eql test.expected_out
|
[
{
"context": "###\n * bag\n * getbag.io\n *\n * Copyright (c) 2015 Ryan Gaus\n * Licensed under the MIT license.\n###\n\nmongoose ",
"end": 58,
"score": 0.9998548626899719,
"start": 49,
"tag": "NAME",
"value": "Ryan Gaus"
}
] | src/models/list_model.coffee | 1egoman/bag-node | 0 | ###
* bag
* getbag.io
*
* Copyright (c) 2015 Ryan Gaus
* Licensed under the MIT license.
###
mongoose = require 'mongoose'
list = mongoose.Schema
name: String
desc: String
tags: Array
checked: Boolean
user: String
contents: Array
contentsLists: Array
list.set 'versionKey', false
module.exports = mongoose.model 'List', list
| 33257 | ###
* bag
* getbag.io
*
* Copyright (c) 2015 <NAME>
* Licensed under the MIT license.
###
mongoose = require 'mongoose'
list = mongoose.Schema
name: String
desc: String
tags: Array
checked: Boolean
user: String
contents: Array
contentsLists: Array
list.set 'versionKey', false
module.exports = mongoose.model 'List', list
| true | ###
* bag
* getbag.io
*
* Copyright (c) 2015 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
mongoose = require 'mongoose'
list = mongoose.Schema
name: String
desc: String
tags: Array
checked: Boolean
user: String
contents: Array
contentsLists: Array
list.set 'versionKey', false
module.exports = mongoose.model 'List', list
|
[
{
"context": "URRENCY\n exchange = process.env.EXCHANGE\n pair = \"#{token}-#{currency}\"\n\n console.log \"Looking up #{pair} on #{exchang",
"end": 546,
"score": 0.9490038752555847,
"start": 526,
"tag": "KEY",
"value": "\"#{token}-#{currency"
}
] | lambdas/feed-handler/src/index.coffee | DEWMRAX/rbot | 3 | liqui = require './liqui'
binance = require './binance'
bittrex = require './bittrex'
poloniex = require './poloniex'
kraken = require './kraken'
gdax = require './gdax'
itbit = require './itbit'
bitflyer = require './bitflyer'
bitstamp = require './bitstamp'
AWS = require 'aws-sdk'
AWS.config.update
region: "us-east-1"
docClient = new AWS.DynamoDB.DocumentClient()
exports.handler = (event, context, callback) ->
token = process.env.TOKEN
currency = process.env.CURRENCY
exchange = process.env.EXCHANGE
pair = "#{token}-#{currency}"
console.log "Looking up #{pair} on #{exchange}"
feed_handler = switch exchange
when 'LIQUI' then liqui
when 'BINANCE' then binance
when 'BITTREX' then bittrex
when 'POLO' then poloniex
when 'KRAKEN' then kraken
when 'GDAX' then gdax
when 'ITBIT' then itbit
when 'BITFLYER' then bitflyer
when 'BITSTAMP' then bitstamp
await feed_handler.get_book token, currency, callback, defer book
params =
TableName: 'orderbooks'
Item:
pair: pair
exchange: exchange
asks: book.asks
bids: book.bids
timestamp: Date.now()
await docClient.put params, defer err
if err
console.log 'Unable to insert quote', exchange, pair, '. Error JSON:', (JSON.stringify err, null, 2)
return callback err
else
console.log 'Quote insert succeeded', exchange, pair, '.'
return callback null, 'OK'
| 15482 | liqui = require './liqui'
binance = require './binance'
bittrex = require './bittrex'
poloniex = require './poloniex'
kraken = require './kraken'
gdax = require './gdax'
itbit = require './itbit'
bitflyer = require './bitflyer'
bitstamp = require './bitstamp'
AWS = require 'aws-sdk'
AWS.config.update
region: "us-east-1"
docClient = new AWS.DynamoDB.DocumentClient()
exports.handler = (event, context, callback) ->
token = process.env.TOKEN
currency = process.env.CURRENCY
exchange = process.env.EXCHANGE
pair = <KEY>}"
console.log "Looking up #{pair} on #{exchange}"
feed_handler = switch exchange
when 'LIQUI' then liqui
when 'BINANCE' then binance
when 'BITTREX' then bittrex
when 'POLO' then poloniex
when 'KRAKEN' then kraken
when 'GDAX' then gdax
when 'ITBIT' then itbit
when 'BITFLYER' then bitflyer
when 'BITSTAMP' then bitstamp
await feed_handler.get_book token, currency, callback, defer book
params =
TableName: 'orderbooks'
Item:
pair: pair
exchange: exchange
asks: book.asks
bids: book.bids
timestamp: Date.now()
await docClient.put params, defer err
if err
console.log 'Unable to insert quote', exchange, pair, '. Error JSON:', (JSON.stringify err, null, 2)
return callback err
else
console.log 'Quote insert succeeded', exchange, pair, '.'
return callback null, 'OK'
| true | liqui = require './liqui'
binance = require './binance'
bittrex = require './bittrex'
poloniex = require './poloniex'
kraken = require './kraken'
gdax = require './gdax'
itbit = require './itbit'
bitflyer = require './bitflyer'
bitstamp = require './bitstamp'
AWS = require 'aws-sdk'
AWS.config.update
region: "us-east-1"
docClient = new AWS.DynamoDB.DocumentClient()
exports.handler = (event, context, callback) ->
token = process.env.TOKEN
currency = process.env.CURRENCY
exchange = process.env.EXCHANGE
pair = PI:KEY:<KEY>END_PI}"
console.log "Looking up #{pair} on #{exchange}"
feed_handler = switch exchange
when 'LIQUI' then liqui
when 'BINANCE' then binance
when 'BITTREX' then bittrex
when 'POLO' then poloniex
when 'KRAKEN' then kraken
when 'GDAX' then gdax
when 'ITBIT' then itbit
when 'BITFLYER' then bitflyer
when 'BITSTAMP' then bitstamp
await feed_handler.get_book token, currency, callback, defer book
params =
TableName: 'orderbooks'
Item:
pair: pair
exchange: exchange
asks: book.asks
bids: book.bids
timestamp: Date.now()
await docClient.put params, defer err
if err
console.log 'Unable to insert quote', exchange, pair, '. Error JSON:', (JSON.stringify err, null, 2)
return callback err
else
console.log 'Quote insert succeeded', exchange, pair, '.'
return callback null, 'OK'
|
[
{
"context": "................\n when 'osntag'\n $key = '<tag'\n if tree.ukids.i_close? then typ",
"end": 9551,
"score": 0.8600507974624634,
"start": 9548,
"tag": "KEY",
"value": "tag"
}
] | src/htmlish.grammar.coffee | loveencounterflow/paragate | 0 |
'use strict'
############################################################################################################
CND = require 'cnd'
badge = 'PARAGATE/GRAMMARS/HTMLISH'
rpr = CND.rpr
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ assign
jr } = CND
# CHVTN = require 'chevrotain'
{ new_datom
lets
freeze } = ( new ( require 'datom' ).Datom { dirty: false, } ).export()
types = require './types'
{ isa
type_of
validate } = types
GRAMMAR = require './grammar'
{ HTMLISH: PGTH } = require 'intertext'
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@lexer_modes =
#.........................................................................................................
outside_mode:
o_escaped: { match: /\\./u, }
o_comment: { match: /<!--[\s\S]*?-->/, line_breaks: true, }
o_cdata: { match: /<!\[CDATA\[[\s\S]*?]]>/, }
o_doctype: { match: /<!DOCTYPE\s+[^>]*>/, }
o_xmldecl: { match: /<\?xml\s+[\s\S]*?\?>/, }
o_pi: { match: /<\?[\s\S]*?\?>/, }
i_slash_open: { match: /<\//, push_mode: "inside_mode", }
i_open: { match: /</, push_mode: "inside_mode", }
o_text: { match: /[^<\\]+/, }
#.........................................................................................................
inside_mode:
i_close: { match: />/, pop_mode: true, }
i_special_close: { match: /\?>/, pop_mode: true, }
i_slash_close: { match: /\/>/, pop_mode: true, }
stm_slash1: { match: /\/(?!>)/, push_mode: 'slashtext_mode', }
i_slash: { match: /\//, }
v_equals: { match: /\s*=\s*/, push_mode: 'value_mode', }
i_name: { match: /[^\s!?=\{\[\(<\/>\)\]\}'"]+/, }
i_whitespace: { match: /[ \t\r\n]/, skip: true, }
#.........................................................................................................
slashtext_mode:
stm_slash2: { match: /\//, switch_mode: "outside_mode", }
stm_text: { match: /[^\/]+/, }
#.........................................................................................................
value_mode:
v_value: { match: /"[^"]*"|'[^']*'|[^>\s\/]+/, pop_mode: true, }
#-----------------------------------------------------------------------------------------------------------
@summarize = ( t ) ->
# `t` is an object whose keys are token names and whose values are token patterns
#---------------------------------------------------------------------------------------------------------
@RULE 'document', =>
@MANY =>
@OR [
{ ALT: => @CONSUME t.o_escaped }
{ ALT: => @CONSUME t.o_doctype }
{ ALT: => @CONSUME t.o_xmldecl }
{ ALT: => @CONSUME t.o_pi }
{ ALT: => @CONSUME t.o_cdata }
{ ALT: => @CONSUME t.o_comment }
{ ALT: => @CONSUME t.o_text }
{ ALT: => @CONSUME t.stm_text }
{ ALT: => @SUBRULE @osntag }
{ ALT: => @SUBRULE @ctag }
{ ALT: => @CONSUME t.stm_slash2 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'osntag', => ### `<a b=c>`, `<a b=c/>`, or `<a b=c/` ###
@CONSUME t.i_open
@CONSUME t.i_name
@OPTION => @SUBRULE @attributes
@OR [
{ ALT: => @CONSUME t.i_close }
{ ALT: => @CONSUME t.i_slash_close }
{ ALT: => @CONSUME t.stm_slash1 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'ctag', => ### `</a>` ###
@CONSUME t.i_slash_open
@CONSUME t.i_name
@CONSUME t.i_close
#---------------------------------------------------------------------------------------------------------
@RULE 'attributes', =>
@AT_LEAST_ONE => @SUBRULE @attribute
#---------------------------------------------------------------------------------------------------------
@RULE 'attribute', =>
@CONSUME t.i_name
@OPTION =>
@CONSUME t.v_equals
@CONSUME t.v_value
#-----------------------------------------------------------------------------------------------------------
dd = ( d ) ->
### TAINT implement as optional functionality of `DATOM.new_datom()` ###
for k of d
delete d[ k ] if d[ k ] in [ undefined, null, '', ]
return d
#-----------------------------------------------------------------------------------------------------------
strip_quotes = ( x ) ->
return x unless isa.text x
return x[ 1 ... x.length - 1 ] if /^".*"$/.test x
return x[ 1 ... x.length - 1 ] if /^'.*'$/.test x
return x
#-----------------------------------------------------------------------------------------------------------
@linearize = ( source, tree, level = 0 ) ->
return null unless tree?
#.........................................................................................................
{ name: token_name
$key
start
stop
text
$vnr } = tree
#.........................................................................................................
if $key is '^token'
switch token_name
when 'o_escaped' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω1^', }
when 'o_text', 'stm_text' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω2^', }
when 'stm_slash2' then yield dd { $key: '>tag', type: 'nctag', start, stop, text, $vnr, $: '^Ω3^', }
when 'o_comment' then yield dd { $key: '^comment', start, stop, text, $vnr, $: '^Ω4^', }
when 'o_pi' then yield dd { $key: '^pi', start, stop, text, $vnr, $: '^Ω5^', }
when 'o_doctype' then yield dd { $key: '^doctype', start, stop, text, $vnr, $: '^Ω6^', }
when 'o_cdata'
start1 = start + 9
stop2 = stop - 3
text1 = source[ start ... start1 ]
text2 = source[ start1 ... stop2 ]
text3 = source[ stop2 ... stop ]
yield dd { $key: '<cdata', start, stop: start1, text: text1, $vnr, $: '^Ω7^', }
yield dd { $key: '^text', start: start1, stop: stop2, text: text2, $vnr, $: '^Ω8^', } if text2 isnt ''
yield dd { $key: '>cdata', start: stop2, stop, text: text3, $vnr, $: '^Ω9^', }
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω10^', }
return null
throw new Error "^445^ unknown $key #{rpr $key}" unless $key in [ '^document', '^node', ]
#.........................................................................................................
{ ukids } = tree
### NOTE we assume that unique kids exist and that values are stored in source order ###
for _, ukid of ukids
$vnr = ukid.$vnr
break
#.........................................................................................................
if $key is '^document'
unless @settings.bare
yield dd { $key: '<document', start: 0, stop: 0, source, errors: tree.errors, $vnr: [ -Infinity, ], $: '^Ω11^', }
for subtree in tree.kids
yield from @linearize source, subtree, level + 1
x = text.length
unless @settings.bare
yield dd { $key: '>document', start: x, stop: x, $vnr: [ Infinity, ], $: '^Ω12^', }
return null
#.........................................................................................................
return null unless ( name = tree.ukids?.i_name?.text )? ### may happen when parsing errors occur ###
switch token_name
#.......................................................................................................
when 'osntag'
$key = '<tag'
if tree.ukids.i_close? then type = 'otag'
else if tree.ukids.i_slash_close? then type = 'stag'; $key = '^tag'
else if tree.ukids.stm_slash1? then type = 'ntag'
if ( attributes = tree.ukids.attributes )?
atrs = {}
for attribute in attributes.kids
k = attribute.ukids.i_name.text
v = strip_quotes attribute.ukids.v_value?.text ? true
atrs[ k ] = v
d = { $key, name, type, text, start, stop, atrs, $vnr, $: '^Ω13^', }
else
d = { $key, name, type, text, start, stop, $vnr, $: '^Ω14^', }
#.....................................................................................................
# parse compact tag name:
if d.name? and d.name isnt ''
e = @_parse_compact_tagname d.name
if e.id?
if d.id?
throw new Error "^paragate/htmlish/linearize@1^ duplicate IDs in #{rpr d}"
d.id = e.id
if e.prefix?
if d.prefix?
throw new Error "^paragate/htmlish/linearize@1^ duplicate prefixes in #{rpr d}"
d.prefix = e.prefix
if e.class?
clasz = if d.class? then ( new Set d.class.split /\s+/ ) else ( new Set() )
clasz.add c for c in e.class
d.class = [ clasz..., ]
if e.name?
d.name = e.name
#.....................................................................................................
yield dd d
#.......................................................................................................
when 'ctag'
yield dd { $key: '>tag', name, type: 'ctag', text, start, stop, $vnr, $: '^Ω15^', }
#.......................................................................................................
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω16^', }
return null
#-----------------------------------------------------------------------------------------------------------
@_parse_compact_tagname = ( text ) -> PGTH.parse_compact_tagname text, true
#-----------------------------------------------------------------------------------------------------------
$parse = ( grammar = null ) ->
SP = require 'steampipes'
grammar ?= new new_grammar { bare: true, }
line_nr = 0
return SP.$ ( line, send ) ->
line_nr++
send new_datom '^newline', { $vnr: [ line_nr, 0, ], $: '^Ω17^', }
for d in grammar.parse line
send lets d, ( d ) -> d.$vnr[ 0 ] = line_nr
return null
#-----------------------------------------------------------------------------------------------------------
merge_texts = ( d1, d2 ) ->
# { '$key': '^text', start: 0, stop: 7, text: 'before ', '$vnr': [ 1, 1 ], '$': '^Ω18^' }
R =
$key: '^text'
start: d1.start
stop: d2.stop
text: d1.text + d2.text
$vnr: d1.$vnr
$: d1.$
return R
#-----------------------------------------------------------------------------------------------------------
parse = ( P... ) ->
tokens = @_parse P...
R = []
prv_d = null
for d, idx in tokens
if ( d.$key is '^text' )
if ( prv_d? ) and ( prv_d.$key is '^text' )
prv_d = merge_texts prv_d, d
continue
prv_d = d
continue
R.push prv_d if prv_d
prv_d = null
R.push d
R.push prv_d if prv_d?
return freeze R
############################################################################################################
### TAINT this seems backwards (but works?) ###
MAIN = @
new_grammar = ( settings ) ->
R = GRAMMAR.new_grammar 'Htmlish', MAIN, settings
R._parse = R.parse
R.parse = parse
return R
grammar = new_grammar()
Htmlish_grammar = grammar.constructor
module.exports = { Htmlish_grammar, grammar, new_grammar, $parse, }
| 145459 |
'use strict'
############################################################################################################
CND = require 'cnd'
badge = 'PARAGATE/GRAMMARS/HTMLISH'
rpr = CND.rpr
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ assign
jr } = CND
# CHVTN = require 'chevrotain'
{ new_datom
lets
freeze } = ( new ( require 'datom' ).Datom { dirty: false, } ).export()
types = require './types'
{ isa
type_of
validate } = types
GRAMMAR = require './grammar'
{ HTMLISH: PGTH } = require 'intertext'
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@lexer_modes =
#.........................................................................................................
outside_mode:
o_escaped: { match: /\\./u, }
o_comment: { match: /<!--[\s\S]*?-->/, line_breaks: true, }
o_cdata: { match: /<!\[CDATA\[[\s\S]*?]]>/, }
o_doctype: { match: /<!DOCTYPE\s+[^>]*>/, }
o_xmldecl: { match: /<\?xml\s+[\s\S]*?\?>/, }
o_pi: { match: /<\?[\s\S]*?\?>/, }
i_slash_open: { match: /<\//, push_mode: "inside_mode", }
i_open: { match: /</, push_mode: "inside_mode", }
o_text: { match: /[^<\\]+/, }
#.........................................................................................................
inside_mode:
i_close: { match: />/, pop_mode: true, }
i_special_close: { match: /\?>/, pop_mode: true, }
i_slash_close: { match: /\/>/, pop_mode: true, }
stm_slash1: { match: /\/(?!>)/, push_mode: 'slashtext_mode', }
i_slash: { match: /\//, }
v_equals: { match: /\s*=\s*/, push_mode: 'value_mode', }
i_name: { match: /[^\s!?=\{\[\(<\/>\)\]\}'"]+/, }
i_whitespace: { match: /[ \t\r\n]/, skip: true, }
#.........................................................................................................
slashtext_mode:
stm_slash2: { match: /\//, switch_mode: "outside_mode", }
stm_text: { match: /[^\/]+/, }
#.........................................................................................................
value_mode:
v_value: { match: /"[^"]*"|'[^']*'|[^>\s\/]+/, pop_mode: true, }
#-----------------------------------------------------------------------------------------------------------
@summarize = ( t ) ->
# `t` is an object whose keys are token names and whose values are token patterns
#---------------------------------------------------------------------------------------------------------
@RULE 'document', =>
@MANY =>
@OR [
{ ALT: => @CONSUME t.o_escaped }
{ ALT: => @CONSUME t.o_doctype }
{ ALT: => @CONSUME t.o_xmldecl }
{ ALT: => @CONSUME t.o_pi }
{ ALT: => @CONSUME t.o_cdata }
{ ALT: => @CONSUME t.o_comment }
{ ALT: => @CONSUME t.o_text }
{ ALT: => @CONSUME t.stm_text }
{ ALT: => @SUBRULE @osntag }
{ ALT: => @SUBRULE @ctag }
{ ALT: => @CONSUME t.stm_slash2 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'osntag', => ### `<a b=c>`, `<a b=c/>`, or `<a b=c/` ###
@CONSUME t.i_open
@CONSUME t.i_name
@OPTION => @SUBRULE @attributes
@OR [
{ ALT: => @CONSUME t.i_close }
{ ALT: => @CONSUME t.i_slash_close }
{ ALT: => @CONSUME t.stm_slash1 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'ctag', => ### `</a>` ###
@CONSUME t.i_slash_open
@CONSUME t.i_name
@CONSUME t.i_close
#---------------------------------------------------------------------------------------------------------
@RULE 'attributes', =>
@AT_LEAST_ONE => @SUBRULE @attribute
#---------------------------------------------------------------------------------------------------------
@RULE 'attribute', =>
@CONSUME t.i_name
@OPTION =>
@CONSUME t.v_equals
@CONSUME t.v_value
#-----------------------------------------------------------------------------------------------------------
dd = ( d ) ->
### TAINT implement as optional functionality of `DATOM.new_datom()` ###
for k of d
delete d[ k ] if d[ k ] in [ undefined, null, '', ]
return d
#-----------------------------------------------------------------------------------------------------------
strip_quotes = ( x ) ->
return x unless isa.text x
return x[ 1 ... x.length - 1 ] if /^".*"$/.test x
return x[ 1 ... x.length - 1 ] if /^'.*'$/.test x
return x
#-----------------------------------------------------------------------------------------------------------
@linearize = ( source, tree, level = 0 ) ->
return null unless tree?
#.........................................................................................................
{ name: token_name
$key
start
stop
text
$vnr } = tree
#.........................................................................................................
if $key is '^token'
switch token_name
when 'o_escaped' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω1^', }
when 'o_text', 'stm_text' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω2^', }
when 'stm_slash2' then yield dd { $key: '>tag', type: 'nctag', start, stop, text, $vnr, $: '^Ω3^', }
when 'o_comment' then yield dd { $key: '^comment', start, stop, text, $vnr, $: '^Ω4^', }
when 'o_pi' then yield dd { $key: '^pi', start, stop, text, $vnr, $: '^Ω5^', }
when 'o_doctype' then yield dd { $key: '^doctype', start, stop, text, $vnr, $: '^Ω6^', }
when 'o_cdata'
start1 = start + 9
stop2 = stop - 3
text1 = source[ start ... start1 ]
text2 = source[ start1 ... stop2 ]
text3 = source[ stop2 ... stop ]
yield dd { $key: '<cdata', start, stop: start1, text: text1, $vnr, $: '^Ω7^', }
yield dd { $key: '^text', start: start1, stop: stop2, text: text2, $vnr, $: '^Ω8^', } if text2 isnt ''
yield dd { $key: '>cdata', start: stop2, stop, text: text3, $vnr, $: '^Ω9^', }
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω10^', }
return null
throw new Error "^445^ unknown $key #{rpr $key}" unless $key in [ '^document', '^node', ]
#.........................................................................................................
{ ukids } = tree
### NOTE we assume that unique kids exist and that values are stored in source order ###
for _, ukid of ukids
$vnr = ukid.$vnr
break
#.........................................................................................................
if $key is '^document'
unless @settings.bare
yield dd { $key: '<document', start: 0, stop: 0, source, errors: tree.errors, $vnr: [ -Infinity, ], $: '^Ω11^', }
for subtree in tree.kids
yield from @linearize source, subtree, level + 1
x = text.length
unless @settings.bare
yield dd { $key: '>document', start: x, stop: x, $vnr: [ Infinity, ], $: '^Ω12^', }
return null
#.........................................................................................................
return null unless ( name = tree.ukids?.i_name?.text )? ### may happen when parsing errors occur ###
switch token_name
#.......................................................................................................
when 'osntag'
$key = '<<KEY>'
if tree.ukids.i_close? then type = 'otag'
else if tree.ukids.i_slash_close? then type = 'stag'; $key = '^tag'
else if tree.ukids.stm_slash1? then type = 'ntag'
if ( attributes = tree.ukids.attributes )?
atrs = {}
for attribute in attributes.kids
k = attribute.ukids.i_name.text
v = strip_quotes attribute.ukids.v_value?.text ? true
atrs[ k ] = v
d = { $key, name, type, text, start, stop, atrs, $vnr, $: '^Ω13^', }
else
d = { $key, name, type, text, start, stop, $vnr, $: '^Ω14^', }
#.....................................................................................................
# parse compact tag name:
if d.name? and d.name isnt ''
e = @_parse_compact_tagname d.name
if e.id?
if d.id?
throw new Error "^paragate/htmlish/linearize@1^ duplicate IDs in #{rpr d}"
d.id = e.id
if e.prefix?
if d.prefix?
throw new Error "^paragate/htmlish/linearize@1^ duplicate prefixes in #{rpr d}"
d.prefix = e.prefix
if e.class?
clasz = if d.class? then ( new Set d.class.split /\s+/ ) else ( new Set() )
clasz.add c for c in e.class
d.class = [ clasz..., ]
if e.name?
d.name = e.name
#.....................................................................................................
yield dd d
#.......................................................................................................
when 'ctag'
yield dd { $key: '>tag', name, type: 'ctag', text, start, stop, $vnr, $: '^Ω15^', }
#.......................................................................................................
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω16^', }
return null
#-----------------------------------------------------------------------------------------------------------
@_parse_compact_tagname = ( text ) -> PGTH.parse_compact_tagname text, true
#-----------------------------------------------------------------------------------------------------------
$parse = ( grammar = null ) ->
SP = require 'steampipes'
grammar ?= new new_grammar { bare: true, }
line_nr = 0
return SP.$ ( line, send ) ->
line_nr++
send new_datom '^newline', { $vnr: [ line_nr, 0, ], $: '^Ω17^', }
for d in grammar.parse line
send lets d, ( d ) -> d.$vnr[ 0 ] = line_nr
return null
#-----------------------------------------------------------------------------------------------------------
merge_texts = ( d1, d2 ) ->
# { '$key': '^text', start: 0, stop: 7, text: 'before ', '$vnr': [ 1, 1 ], '$': '^Ω18^' }
R =
$key: '^text'
start: d1.start
stop: d2.stop
text: d1.text + d2.text
$vnr: d1.$vnr
$: d1.$
return R
#-----------------------------------------------------------------------------------------------------------
parse = ( P... ) ->
tokens = @_parse P...
R = []
prv_d = null
for d, idx in tokens
if ( d.$key is '^text' )
if ( prv_d? ) and ( prv_d.$key is '^text' )
prv_d = merge_texts prv_d, d
continue
prv_d = d
continue
R.push prv_d if prv_d
prv_d = null
R.push d
R.push prv_d if prv_d?
return freeze R
############################################################################################################
### TAINT this seems backwards (but works?) ###
MAIN = @
new_grammar = ( settings ) ->
R = GRAMMAR.new_grammar 'Htmlish', MAIN, settings
R._parse = R.parse
R.parse = parse
return R
grammar = new_grammar()
Htmlish_grammar = grammar.constructor
module.exports = { Htmlish_grammar, grammar, new_grammar, $parse, }
| true |
'use strict'
############################################################################################################
CND = require 'cnd'
badge = 'PARAGATE/GRAMMARS/HTMLISH'
rpr = CND.rpr
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
{ assign
jr } = CND
# CHVTN = require 'chevrotain'
{ new_datom
lets
freeze } = ( new ( require 'datom' ).Datom { dirty: false, } ).export()
types = require './types'
{ isa
type_of
validate } = types
GRAMMAR = require './grammar'
{ HTMLISH: PGTH } = require 'intertext'
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@lexer_modes =
#.........................................................................................................
outside_mode:
o_escaped: { match: /\\./u, }
o_comment: { match: /<!--[\s\S]*?-->/, line_breaks: true, }
o_cdata: { match: /<!\[CDATA\[[\s\S]*?]]>/, }
o_doctype: { match: /<!DOCTYPE\s+[^>]*>/, }
o_xmldecl: { match: /<\?xml\s+[\s\S]*?\?>/, }
o_pi: { match: /<\?[\s\S]*?\?>/, }
i_slash_open: { match: /<\//, push_mode: "inside_mode", }
i_open: { match: /</, push_mode: "inside_mode", }
o_text: { match: /[^<\\]+/, }
#.........................................................................................................
inside_mode:
i_close: { match: />/, pop_mode: true, }
i_special_close: { match: /\?>/, pop_mode: true, }
i_slash_close: { match: /\/>/, pop_mode: true, }
stm_slash1: { match: /\/(?!>)/, push_mode: 'slashtext_mode', }
i_slash: { match: /\//, }
v_equals: { match: /\s*=\s*/, push_mode: 'value_mode', }
i_name: { match: /[^\s!?=\{\[\(<\/>\)\]\}'"]+/, }
i_whitespace: { match: /[ \t\r\n]/, skip: true, }
#.........................................................................................................
slashtext_mode:
stm_slash2: { match: /\//, switch_mode: "outside_mode", }
stm_text: { match: /[^\/]+/, }
#.........................................................................................................
value_mode:
v_value: { match: /"[^"]*"|'[^']*'|[^>\s\/]+/, pop_mode: true, }
#-----------------------------------------------------------------------------------------------------------
@summarize = ( t ) ->
# `t` is an object whose keys are token names and whose values are token patterns
#---------------------------------------------------------------------------------------------------------
@RULE 'document', =>
@MANY =>
@OR [
{ ALT: => @CONSUME t.o_escaped }
{ ALT: => @CONSUME t.o_doctype }
{ ALT: => @CONSUME t.o_xmldecl }
{ ALT: => @CONSUME t.o_pi }
{ ALT: => @CONSUME t.o_cdata }
{ ALT: => @CONSUME t.o_comment }
{ ALT: => @CONSUME t.o_text }
{ ALT: => @CONSUME t.stm_text }
{ ALT: => @SUBRULE @osntag }
{ ALT: => @SUBRULE @ctag }
{ ALT: => @CONSUME t.stm_slash2 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'osntag', => ### `<a b=c>`, `<a b=c/>`, or `<a b=c/` ###
@CONSUME t.i_open
@CONSUME t.i_name
@OPTION => @SUBRULE @attributes
@OR [
{ ALT: => @CONSUME t.i_close }
{ ALT: => @CONSUME t.i_slash_close }
{ ALT: => @CONSUME t.stm_slash1 }
]
#---------------------------------------------------------------------------------------------------------
@RULE 'ctag', => ### `</a>` ###
@CONSUME t.i_slash_open
@CONSUME t.i_name
@CONSUME t.i_close
#---------------------------------------------------------------------------------------------------------
@RULE 'attributes', =>
@AT_LEAST_ONE => @SUBRULE @attribute
#---------------------------------------------------------------------------------------------------------
@RULE 'attribute', =>
@CONSUME t.i_name
@OPTION =>
@CONSUME t.v_equals
@CONSUME t.v_value
#-----------------------------------------------------------------------------------------------------------
dd = ( d ) ->
### TAINT implement as optional functionality of `DATOM.new_datom()` ###
for k of d
delete d[ k ] if d[ k ] in [ undefined, null, '', ]
return d
#-----------------------------------------------------------------------------------------------------------
strip_quotes = ( x ) ->
return x unless isa.text x
return x[ 1 ... x.length - 1 ] if /^".*"$/.test x
return x[ 1 ... x.length - 1 ] if /^'.*'$/.test x
return x
#-----------------------------------------------------------------------------------------------------------
@linearize = ( source, tree, level = 0 ) ->
return null unless tree?
#.........................................................................................................
{ name: token_name
$key
start
stop
text
$vnr } = tree
#.........................................................................................................
if $key is '^token'
switch token_name
when 'o_escaped' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω1^', }
when 'o_text', 'stm_text' then yield dd { $key: '^text', start, stop, text, $vnr, $: '^Ω2^', }
when 'stm_slash2' then yield dd { $key: '>tag', type: 'nctag', start, stop, text, $vnr, $: '^Ω3^', }
when 'o_comment' then yield dd { $key: '^comment', start, stop, text, $vnr, $: '^Ω4^', }
when 'o_pi' then yield dd { $key: '^pi', start, stop, text, $vnr, $: '^Ω5^', }
when 'o_doctype' then yield dd { $key: '^doctype', start, stop, text, $vnr, $: '^Ω6^', }
when 'o_cdata'
start1 = start + 9
stop2 = stop - 3
text1 = source[ start ... start1 ]
text2 = source[ start1 ... stop2 ]
text3 = source[ stop2 ... stop ]
yield dd { $key: '<cdata', start, stop: start1, text: text1, $vnr, $: '^Ω7^', }
yield dd { $key: '^text', start: start1, stop: stop2, text: text2, $vnr, $: '^Ω8^', } if text2 isnt ''
yield dd { $key: '>cdata', start: stop2, stop, text: text3, $vnr, $: '^Ω9^', }
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω10^', }
return null
throw new Error "^445^ unknown $key #{rpr $key}" unless $key in [ '^document', '^node', ]
#.........................................................................................................
{ ukids } = tree
### NOTE we assume that unique kids exist and that values are stored in source order ###
for _, ukid of ukids
$vnr = ukid.$vnr
break
#.........................................................................................................
if $key is '^document'
unless @settings.bare
yield dd { $key: '<document', start: 0, stop: 0, source, errors: tree.errors, $vnr: [ -Infinity, ], $: '^Ω11^', }
for subtree in tree.kids
yield from @linearize source, subtree, level + 1
x = text.length
unless @settings.bare
yield dd { $key: '>document', start: x, stop: x, $vnr: [ Infinity, ], $: '^Ω12^', }
return null
#.........................................................................................................
return null unless ( name = tree.ukids?.i_name?.text )? ### may happen when parsing errors occur ###
switch token_name
#.......................................................................................................
when 'osntag'
$key = '<PI:KEY:<KEY>END_PI'
if tree.ukids.i_close? then type = 'otag'
else if tree.ukids.i_slash_close? then type = 'stag'; $key = '^tag'
else if tree.ukids.stm_slash1? then type = 'ntag'
if ( attributes = tree.ukids.attributes )?
atrs = {}
for attribute in attributes.kids
k = attribute.ukids.i_name.text
v = strip_quotes attribute.ukids.v_value?.text ? true
atrs[ k ] = v
d = { $key, name, type, text, start, stop, atrs, $vnr, $: '^Ω13^', }
else
d = { $key, name, type, text, start, stop, $vnr, $: '^Ω14^', }
#.....................................................................................................
# parse compact tag name:
if d.name? and d.name isnt ''
e = @_parse_compact_tagname d.name
if e.id?
if d.id?
throw new Error "^paragate/htmlish/linearize@1^ duplicate IDs in #{rpr d}"
d.id = e.id
if e.prefix?
if d.prefix?
throw new Error "^paragate/htmlish/linearize@1^ duplicate prefixes in #{rpr d}"
d.prefix = e.prefix
if e.class?
clasz = if d.class? then ( new Set d.class.split /\s+/ ) else ( new Set() )
clasz.add c for c in e.class
d.class = [ clasz..., ]
if e.name?
d.name = e.name
#.....................................................................................................
yield dd d
#.......................................................................................................
when 'ctag'
yield dd { $key: '>tag', name, type: 'ctag', text, start, stop, $vnr, $: '^Ω15^', }
#.......................................................................................................
else yield dd { $key: '^unknown', $value: tree, $vnr, $: '^Ω16^', }
return null
#-----------------------------------------------------------------------------------------------------------
@_parse_compact_tagname = ( text ) -> PGTH.parse_compact_tagname text, true
#-----------------------------------------------------------------------------------------------------------
$parse = ( grammar = null ) ->
SP = require 'steampipes'
grammar ?= new new_grammar { bare: true, }
line_nr = 0
return SP.$ ( line, send ) ->
line_nr++
send new_datom '^newline', { $vnr: [ line_nr, 0, ], $: '^Ω17^', }
for d in grammar.parse line
send lets d, ( d ) -> d.$vnr[ 0 ] = line_nr
return null
#-----------------------------------------------------------------------------------------------------------
merge_texts = ( d1, d2 ) ->
# { '$key': '^text', start: 0, stop: 7, text: 'before ', '$vnr': [ 1, 1 ], '$': '^Ω18^' }
R =
$key: '^text'
start: d1.start
stop: d2.stop
text: d1.text + d2.text
$vnr: d1.$vnr
$: d1.$
return R
#-----------------------------------------------------------------------------------------------------------
parse = ( P... ) ->
tokens = @_parse P...
R = []
prv_d = null
for d, idx in tokens
if ( d.$key is '^text' )
if ( prv_d? ) and ( prv_d.$key is '^text' )
prv_d = merge_texts prv_d, d
continue
prv_d = d
continue
R.push prv_d if prv_d
prv_d = null
R.push d
R.push prv_d if prv_d?
return freeze R
############################################################################################################
### TAINT this seems backwards (but works?) ###
MAIN = @
new_grammar = ( settings ) ->
R = GRAMMAR.new_grammar 'Htmlish', MAIN, settings
R._parse = R.parse
R.parse = parse
return R
grammar = new_grammar()
Htmlish_grammar = grammar.constructor
module.exports = { Htmlish_grammar, grammar, new_grammar, $parse, }
|
[
{
"context": "hat the devil are you trying? You need to convince Ivan that what you are doing is okay.\"\n \n ",
"end": 671,
"score": 0.9267361760139465,
"start": 667,
"tag": "NAME",
"value": "Ivan"
}
] | source/flow-arrows/arrow.coffee | cdig/svga | 4 | Take ["FlowArrows:Config", "SVG", "TRS"], (Config, SVG, TRS)->
Make "FlowArrows:Arrow", (parentElm, segmentData, segmentPosition, vectorPosition, vectorIndex)->
vector = segmentData.vectors[vectorIndex]
element = TRS SVG.create "g", parentElm
triangle = SVG.create "polyline", element, points: "0,-16 30,0 0,16"
line = SVG.create "line", element, x1: -23, y1: 0, x2: 5, y2: 0, "stroke-width": 11, "stroke-linecap": "round"
return arrow =
update: (parentFlow, parentScale)->
# if Config.SPACING < 30 * parentScale then throw new Error "Your flow arrows are overlapping. What the devil are you trying? You need to convince Ivan that what you are doing is okay."
vectorPosition += parentFlow
segmentPosition += parentFlow
while vectorPosition > vector.dist
vectorIndex++
if vectorIndex >= segmentData.vectors.length
vectorIndex = 0
segmentPosition -= segmentData.dist
vectorPosition -= vector.dist
vector = segmentData.vectors[vectorIndex]
while vectorPosition < 0
vectorIndex--
if vectorIndex < 0
vectorIndex = segmentData.vectors.length - 1
segmentPosition += segmentData.dist
vector = segmentData.vectors[vectorIndex]
vectorPosition += vector.dist
if segmentPosition < segmentData.dist/2
scale = Math.max 0, Math.min 1, (segmentPosition / segmentData.dist) * segmentData.dist / Config.FADE_LENGTH
else
scale = Math.max 0, Math.min 1, 1 - (segmentPosition - (segmentData.dist - Config.FADE_LENGTH)) / Config.FADE_LENGTH
TRS.abs element,
x: Math.cos(vector.angle) * vectorPosition + vector.x
y: Math.sin(vector.angle) * vectorPosition + vector.y
scale: scale * parentScale
r: vector.angle / (2*Math.PI) + (if parentFlow < 0 then 0.5 else 0)
| 34080 | Take ["FlowArrows:Config", "SVG", "TRS"], (Config, SVG, TRS)->
Make "FlowArrows:Arrow", (parentElm, segmentData, segmentPosition, vectorPosition, vectorIndex)->
vector = segmentData.vectors[vectorIndex]
element = TRS SVG.create "g", parentElm
triangle = SVG.create "polyline", element, points: "0,-16 30,0 0,16"
line = SVG.create "line", element, x1: -23, y1: 0, x2: 5, y2: 0, "stroke-width": 11, "stroke-linecap": "round"
return arrow =
update: (parentFlow, parentScale)->
# if Config.SPACING < 30 * parentScale then throw new Error "Your flow arrows are overlapping. What the devil are you trying? You need to convince <NAME> that what you are doing is okay."
vectorPosition += parentFlow
segmentPosition += parentFlow
while vectorPosition > vector.dist
vectorIndex++
if vectorIndex >= segmentData.vectors.length
vectorIndex = 0
segmentPosition -= segmentData.dist
vectorPosition -= vector.dist
vector = segmentData.vectors[vectorIndex]
while vectorPosition < 0
vectorIndex--
if vectorIndex < 0
vectorIndex = segmentData.vectors.length - 1
segmentPosition += segmentData.dist
vector = segmentData.vectors[vectorIndex]
vectorPosition += vector.dist
if segmentPosition < segmentData.dist/2
scale = Math.max 0, Math.min 1, (segmentPosition / segmentData.dist) * segmentData.dist / Config.FADE_LENGTH
else
scale = Math.max 0, Math.min 1, 1 - (segmentPosition - (segmentData.dist - Config.FADE_LENGTH)) / Config.FADE_LENGTH
TRS.abs element,
x: Math.cos(vector.angle) * vectorPosition + vector.x
y: Math.sin(vector.angle) * vectorPosition + vector.y
scale: scale * parentScale
r: vector.angle / (2*Math.PI) + (if parentFlow < 0 then 0.5 else 0)
| true | Take ["FlowArrows:Config", "SVG", "TRS"], (Config, SVG, TRS)->
Make "FlowArrows:Arrow", (parentElm, segmentData, segmentPosition, vectorPosition, vectorIndex)->
vector = segmentData.vectors[vectorIndex]
element = TRS SVG.create "g", parentElm
triangle = SVG.create "polyline", element, points: "0,-16 30,0 0,16"
line = SVG.create "line", element, x1: -23, y1: 0, x2: 5, y2: 0, "stroke-width": 11, "stroke-linecap": "round"
return arrow =
update: (parentFlow, parentScale)->
# if Config.SPACING < 30 * parentScale then throw new Error "Your flow arrows are overlapping. What the devil are you trying? You need to convince PI:NAME:<NAME>END_PI that what you are doing is okay."
vectorPosition += parentFlow
segmentPosition += parentFlow
while vectorPosition > vector.dist
vectorIndex++
if vectorIndex >= segmentData.vectors.length
vectorIndex = 0
segmentPosition -= segmentData.dist
vectorPosition -= vector.dist
vector = segmentData.vectors[vectorIndex]
while vectorPosition < 0
vectorIndex--
if vectorIndex < 0
vectorIndex = segmentData.vectors.length - 1
segmentPosition += segmentData.dist
vector = segmentData.vectors[vectorIndex]
vectorPosition += vector.dist
if segmentPosition < segmentData.dist/2
scale = Math.max 0, Math.min 1, (segmentPosition / segmentData.dist) * segmentData.dist / Config.FADE_LENGTH
else
scale = Math.max 0, Math.min 1, 1 - (segmentPosition - (segmentData.dist - Config.FADE_LENGTH)) / Config.FADE_LENGTH
TRS.abs element,
x: Math.cos(vector.angle) * vectorPosition + vector.x
y: Math.sin(vector.angle) * vectorPosition + vector.y
scale: scale * parentScale
r: vector.angle / (2*Math.PI) + (if parentFlow < 0 then 0.5 else 0)
|
[
{
"context": "common/logger.coffee'\n\n# Firebase secure token for duelyst-dev.firebaseio.com\nfirebaseToken = config.get",
"end": 574,
"score": 0.611825168132782,
"start": 571,
"tag": "EMAIL",
"value": "due"
}
] | scripts/wipe/wipe_log_data.coffee | willroberts/duelyst | 5 |
###
wipe_log_data - Wipes all users inventories and gives them gold based on current gold + 100g per booster pack
Examples: (no parameters required)
# Does nothing
wipe_log_data
# Actually wipe the data
wipe_log_data commit_wipe
###
# region Requires
# Configuration object
config = require("../../config/config.js")
Promise = require 'bluebird'
Firebase = require("firebase")
_ = require("underscore")
fbRef = new Firebase(config.get("firebase"))
moment = require('moment')
Logger = require '../../app/common/logger.coffee'
# Firebase secure token for duelyst-dev.firebaseio.com
firebaseToken = config.get("firebaseToken")
UsersModule = require("../../server/lib/users_module")
DuelystFirebase = require("../../server/lib/duelyst_firebase_module")
FirebasePromises = require("../../server/lib/firebase_promises.coffee")
# endregion Requires
# Resolves to a results object filled with data representing the results of the wipe
wipe_log_data = () ->
DuelystFirebase.connect().getRootRef()
.bind({})
.then (fbRootRef) ->
@fbRootRef = fbRootRef
treeRemovalPromises = []
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-aggregates')))
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-logs')))
return Promise.all(treeRemovalPromises)
# Begin script execution
console.log process.argv
if process.argv[2] == 'commit_wipe'
wipe_log_data()
.then () ->
Logger.module("Script").log(("wipe_log_data() -> completed").blue)
process.exit(1);
else
Logger.module("Script").log(("call 'wipe_log_data commit_wipe' to perform wipe").blue)
process.exit(1)
| 139849 |
###
wipe_log_data - Wipes all users inventories and gives them gold based on current gold + 100g per booster pack
Examples: (no parameters required)
# Does nothing
wipe_log_data
# Actually wipe the data
wipe_log_data commit_wipe
###
# region Requires
# Configuration object
config = require("../../config/config.js")
Promise = require 'bluebird'
Firebase = require("firebase")
_ = require("underscore")
fbRef = new Firebase(config.get("firebase"))
moment = require('moment')
Logger = require '../../app/common/logger.coffee'
# Firebase secure token for <EMAIL>lyst-dev.firebaseio.com
firebaseToken = config.get("firebaseToken")
UsersModule = require("../../server/lib/users_module")
DuelystFirebase = require("../../server/lib/duelyst_firebase_module")
FirebasePromises = require("../../server/lib/firebase_promises.coffee")
# endregion Requires
# Resolves to a results object filled with data representing the results of the wipe
wipe_log_data = () ->
DuelystFirebase.connect().getRootRef()
.bind({})
.then (fbRootRef) ->
@fbRootRef = fbRootRef
treeRemovalPromises = []
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-aggregates')))
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-logs')))
return Promise.all(treeRemovalPromises)
# Begin script execution
console.log process.argv
if process.argv[2] == 'commit_wipe'
wipe_log_data()
.then () ->
Logger.module("Script").log(("wipe_log_data() -> completed").blue)
process.exit(1);
else
Logger.module("Script").log(("call 'wipe_log_data commit_wipe' to perform wipe").blue)
process.exit(1)
| true |
###
wipe_log_data - Wipes all users inventories and gives them gold based on current gold + 100g per booster pack
Examples: (no parameters required)
# Does nothing
wipe_log_data
# Actually wipe the data
wipe_log_data commit_wipe
###
# region Requires
# Configuration object
config = require("../../config/config.js")
Promise = require 'bluebird'
Firebase = require("firebase")
_ = require("underscore")
fbRef = new Firebase(config.get("firebase"))
moment = require('moment')
Logger = require '../../app/common/logger.coffee'
# Firebase secure token for PI:EMAIL:<EMAIL>END_PIlyst-dev.firebaseio.com
firebaseToken = config.get("firebaseToken")
UsersModule = require("../../server/lib/users_module")
DuelystFirebase = require("../../server/lib/duelyst_firebase_module")
FirebasePromises = require("../../server/lib/firebase_promises.coffee")
# endregion Requires
# Resolves to a results object filled with data representing the results of the wipe
wipe_log_data = () ->
DuelystFirebase.connect().getRootRef()
.bind({})
.then (fbRootRef) ->
@fbRootRef = fbRootRef
treeRemovalPromises = []
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-aggregates')))
treeRemovalPromises.push(FirebasePromises.remove(@fbRootRef.child('user-logs')))
return Promise.all(treeRemovalPromises)
# Begin script execution
console.log process.argv
if process.argv[2] == 'commit_wipe'
wipe_log_data()
.then () ->
Logger.module("Script").log(("wipe_log_data() -> completed").blue)
process.exit(1);
else
Logger.module("Script").log(("call 'wipe_log_data commit_wipe' to perform wipe").blue)
process.exit(1)
|
[
{
"context": "yDataCounter\",\n start_key: startDate.format(\"GGGG-WW\").split(/-/)\n end_key: endDate.format(\"GGGG-",
"end": 1417,
"score": 0.8734613656997681,
"start": 1410,
"tag": "KEY",
"value": "GGGG-WW"
},
{
"context": "-WW\").split(/-/)\n end_key: endDate.format(\"GGGG-WW\").split(/-/)\n reduce: true\n include_doc",
"end": 1469,
"score": 0.820899248123169,
"start": 1464,
"tag": "KEY",
"value": "GG-WW"
},
{
"context": "kAndDistrict\",\n startkey: [\"#{data.year}-01\"]\n endkey: [\"#{data.year}-52\",{}]\n ",
"end": 5781,
"score": 0.8301265835762024,
"start": 5779,
"tag": "KEY",
"value": "01"
},
{
"context": "{data.year}-01\"]\n endkey: [\"#{data.year}-52\",{}]\n reduce: true\n group_l",
"end": 5821,
"score": 0.8919878005981445,
"start": 5819,
"tag": "KEY",
"value": "52"
},
{
"context": "\n # Values from https://medialab.github.io/iwanthue/\n # Colorblind friendly\n colors = d",
"end": 8840,
"score": 0.9747573733329773,
"start": 8832,
"tag": "USERNAME",
"value": "iwanthue"
}
] | _attachments/app/models/Graphs.coffee | jongoz/coconut-analytice | 3 | _ = require 'underscore'
moment = require 'moment'
$ = require 'jquery'
distinctColors = (require 'distinct-colors').default
Chart = require 'chart.js'
ChartDataLabels = require 'chartjs-plugin-datalabels'
Chart.plugins.unregister(ChartDataLabels)
camelize = require "underscore.string/camelize"
class Graphs
Graphs.render = (graphName, data, target) ->
target or= camelize(graphName)
Graphs.definitions[graphName].render(data, $("##{target}"))
Graphs.getGraphName = (nameOrCamelizedName) ->
return nameOrCamelizedName if Graphs.definitions[nameOrCamelizedName]
camelizedNameToTile = {}
for name of Graphs.definitions
camelizedNameToTile[camelize(name)] = name
if Graphs.definitions[camelizedNameToTile[nameOrCamelizedName]]
camelizedNameToTile[nameOrCamelizedName]
Graphs.weeklyDataCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
if _(startDate).isString()
startDate = moment(startDate)
if _(endDate).isString()
endDate = moment(endDate)
groupLevel = 4 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
await Coconut.weeklyFacilityDatabase.query "weeklyDataCounter",
start_key: startDate.format("GGGG-WW").split(/-/)
end_key: endDate.format("GGGG-WW").split(/-/)
reduce: true
include_docs: false
group: true
group_level: groupLevel
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
groupLevel = 3 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
data = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: true
group_level: groupLevel
include_docs: false
.catch (error, foo) =>
console.error "This may be caused by non numeric answers"
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounterDetails = (options) ->
console.log options
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
# We can't use grouping since we want detailed case data but we still need the groupLevel to filter for the cases that correspond to the selected administrative Level/Name.
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Get all of the keys,
# then query again with those keys to get case details
caseKeys = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: false
include_docs: false
.then (result) =>
caseIds = {}
if options.administrativeName
for row in result.rows
if row.key[groupLevel] is options.administrativeName
caseIds[row.id] = true
Promise.resolve(_(caseIds).keys())
else
Promise.resolve(_(result.rows).pluck "id")
return await Coconut.reportingDatabase.allDocs
keys: caseKeys
include_docs: true
.then (result) =>
Promise.resolve result.rows
# This is a big data structure that is used to create the graphs on the dashboard as well as the individual graph pages as well as to create the menu options
Graphs.definitions =
"Positive Individuals by Year":
description: "Positive Individuals by Year shows the 'classic' epidemiological curve comparing last year's total cases to this years. This is useful to see if general trends are higher or lower than the previous year."
dataQuery: (options) ->
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Only care about the endDate
endDate = options.endDate
if _(endDate).isString()
endDate = moment(endDate)
for label, data of {
"#{lastYear = endDate.clone().subtract(1,'year').year()}":
year: lastYear
options:
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
"#{thisYear = endDate.year()}":
year: thisYear
options:
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
await Coconut.individualIndexDatabase.query "epiCurveByWeekAndDistrict",
startkey: ["#{data.year}-01"]
endkey: ["#{data.year}-52",{}]
reduce: true
group_level: groupLevel
.then (result) =>
Promise.resolve _(data.options).extend {
label: label
data: _(for row in result.rows
if options.administrativeName and _(row.key).last() is options.administrativeName
x: parseInt(row.key[0].replace(/.*-/,""))
y: row.value
).compact()
}
render: (dataForGraph, target) ->
if dataForGraph.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: [1..52]
datasets: dataForGraph
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Positive Individual Classifications":
description: "Positive Individual Classifications shows classifications for all individuals that have been followed up. Note that the dates may differ slightly since this graph uses the date of testing for household members, which usually is different than the date that the index case was found positive, and which is used for other graphs on this page."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Classifications By Household Member Type"
"Index Case Diagnosis Date ISO Week"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
classificationsToAlwaysShow = [
"Indigenous"
"Imported"
"Introduced"
"Induced"
"Relapsing"
]
classificationsToShowIfPresent = [
"In Progress"
"Lost to Followup"
"Unclassified"
]
classifications = classificationsToAlwaysShow.concat(classificationsToShowIfPresent)
presentOptionalClassifications = {}
for data in dataForGraph
if _(classifications).contains data.key[1]
[date, classification] = data.key
week = moment(date).isoWeek()
dataAggregated[classification] or= {}
dataAggregated[classification][week] or= 0
dataAggregated[classification][week] += data.value
weeksIncluded[week] = true
presentOptionalClassifications[classification] = true if _(classificationsToShowIfPresent).contains classification
classifications = classificationsToAlwaysShow.concat(_(presentOptionalClassifications).keys())
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
# Values from https://medialab.github.io/iwanthue/
# Colorblind friendly
colors = distinctColors(
count: classifications.length
hueMin: 0
hueMax: 360
chromaMin: 40
chromaMax: 70
lightMin: 15
lightMax: 85
)
chartOptions = for distinctColor in colors
color = distinctColor.rgb()
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.5)"
borderWidth: 2
}
index = -1
dataSets = for classification in classifications
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: classification
data: for week in [firstWeek..lastWeek]
dataAggregated[classification]?[week] or 0
xAxisLabels = []
for week, index in weeksIncluded
xAxisLabels.push week
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
category = classifications[this.getElementAtEvent(event)[0]._datasetIndex]
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and category: #{category}"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setHeaderFilterValue("Classifications By Household Member Type", category)
"Positive Individuals by Age":
description: "Positive Individuals by Age counts all malaria positive individuals and classifies them by age. Index case date of positive is used for all individuals."
dataQuery: Graphs.caseCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if data.key[1] is "Number Positive Individuals Over 5" or data.key[1] is "Number Positive Individuals Under 5"
[date,age] = data.key
week = moment(date).isoWeek()
dataAggregated[age] or= {}
dataAggregated[age][week] or= 0
dataAggregated[age][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of weekValue
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"OPD Visits By Age":
description: "OPD Visits by Age shows data for all malaria and non-malaria visits to facilities."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
mappings =
"All OPD >= 5" : "Over 5"
"All OPD < 5" : "Under 5"
for data in dataForGraph
if data.key[2] is "All OPD >= 5" or data.key[2] is "All OPD < 5"
[year, week, dataType] = data.key
dataType = mappings[dataType]
week = parseInt(week)
dataAggregated[dataType] or= {}
dataAggregated[dataType][week] or= 0
dataAggregated[dataType][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age in _(mappings).values()
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of dataAggregated[age]
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Hours from Positive Test at Facility to Notification":
description: "Shows how long it is taking facilities to send a notification once someone has tested positive. Target is less than 24 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Notification From Facility"
"Index Case Diagnosis Date ISO Week"
"Facility"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Has Notification") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Notification From Facility/))
[datePositive, timeToNotify] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Notification From Facility": "< 24"
"One To Two Days Between Positive Result And Notification From Facility": "24 - 48"
"Two To Three Days Between Positive Result And Notification From Facility": "48 - 72"
"More Than Three Days Between Positive Result And Notification From Facility": "> 72"
"Has Notification": "No notification" # This Has Notification = 0
timeToNotify = mapping[timeToNotify]
weeksIncluded[week] = true
dataAggregated[timeToNotify] or= {}
dataAggregated[timeToNotify][week] or= 0
if timeToNotify is "No notification"
dataAggregated[timeToNotify][week] += 1
else
dataAggregated[timeToNotify][week] += data.value
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).values() # Do this to get the right order
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week, value of dataAggregated[type]
value
xAxisLabels = []
onTarget = []
for week, index in _(weeksIncluded).keys()
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Notification From Facility", "desc")
"Hours From Positive Test To Complete Follow-up":
description: "Shows how long it is taking the entire followup process to take including both the time for the facility to followup as well as time for the surveillance officer to go to the facility and then go to the household. Target is less than 48 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Complete Household"
"Index Case Diagnosis Date ISO Week"
"Malaria Surveillance Officers"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Complete Household Visit") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Complete Household/))
[datePositive, timeToComplete] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Complete Household": "< 48"
"One To Two Days Between Positive Result And Complete Household": "< 48"
"Two To Three Days Between Positive Result And Complete Household": "48 - 72"
"More Than Three Days Between Positive Result And Complete Household": "> 72"
"Complete Household Visit": "Not followed up" #Confusing but when followed up is 0 then it is not followed up
timeToComplete = mapping[timeToComplete]
weeksIncluded[week] = true
dataAggregated[timeToComplete] or= {}
dataAggregated[timeToComplete][week] or= 0
if timeToComplete is "Not followed up"
dataAggregated[timeToComplete][week] += 1
else
dataAggregated[timeToComplete][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).chain().values().uniq().value()
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
dataAggregated[type]?[week] or 0
xAxisLabels = []
onTarget = []
for week, index in weeksIncluded
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Complete Household", "desc")
"Household Testing and Positivity Rate":
description: "How many tests are being given at households, and how many of those end up being positive. This does not include the index case, since it wasn't tested at the household."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[date, indicator] = data.key
if indicator is "Number Household Members Tested"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= -1 # Includes the index case so remove that for this graph
dataAggregated["Negative"][week] += data.value
else if indicator is "Number Positive Individuals At Household Excluding Index"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
# Subtract these from negative count
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] -= data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
console.log dataAggregated
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
console.log dataSets
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
"OPD Testing and Positivity Rate":
description: "How many tests are being given at facilities, and how many of those end up positive."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[year, week, dataType] = data.key
week = parseInt(week)
weeksIncluded[week] = true
if dataType.match /POS/
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
if dataType.match /NEG/
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
module.exports = Graphs
| 69555 | _ = require 'underscore'
moment = require 'moment'
$ = require 'jquery'
distinctColors = (require 'distinct-colors').default
Chart = require 'chart.js'
ChartDataLabels = require 'chartjs-plugin-datalabels'
Chart.plugins.unregister(ChartDataLabels)
camelize = require "underscore.string/camelize"
class Graphs
Graphs.render = (graphName, data, target) ->
target or= camelize(graphName)
Graphs.definitions[graphName].render(data, $("##{target}"))
Graphs.getGraphName = (nameOrCamelizedName) ->
return nameOrCamelizedName if Graphs.definitions[nameOrCamelizedName]
camelizedNameToTile = {}
for name of Graphs.definitions
camelizedNameToTile[camelize(name)] = name
if Graphs.definitions[camelizedNameToTile[nameOrCamelizedName]]
camelizedNameToTile[nameOrCamelizedName]
Graphs.weeklyDataCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
if _(startDate).isString()
startDate = moment(startDate)
if _(endDate).isString()
endDate = moment(endDate)
groupLevel = 4 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
await Coconut.weeklyFacilityDatabase.query "weeklyDataCounter",
start_key: startDate.format("<KEY>").split(/-/)
end_key: endDate.format("GG<KEY>").split(/-/)
reduce: true
include_docs: false
group: true
group_level: groupLevel
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
groupLevel = 3 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
data = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: true
group_level: groupLevel
include_docs: false
.catch (error, foo) =>
console.error "This may be caused by non numeric answers"
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounterDetails = (options) ->
console.log options
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
# We can't use grouping since we want detailed case data but we still need the groupLevel to filter for the cases that correspond to the selected administrative Level/Name.
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Get all of the keys,
# then query again with those keys to get case details
caseKeys = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: false
include_docs: false
.then (result) =>
caseIds = {}
if options.administrativeName
for row in result.rows
if row.key[groupLevel] is options.administrativeName
caseIds[row.id] = true
Promise.resolve(_(caseIds).keys())
else
Promise.resolve(_(result.rows).pluck "id")
return await Coconut.reportingDatabase.allDocs
keys: caseKeys
include_docs: true
.then (result) =>
Promise.resolve result.rows
# This is a big data structure that is used to create the graphs on the dashboard as well as the individual graph pages as well as to create the menu options
Graphs.definitions =
"Positive Individuals by Year":
description: "Positive Individuals by Year shows the 'classic' epidemiological curve comparing last year's total cases to this years. This is useful to see if general trends are higher or lower than the previous year."
dataQuery: (options) ->
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Only care about the endDate
endDate = options.endDate
if _(endDate).isString()
endDate = moment(endDate)
for label, data of {
"#{lastYear = endDate.clone().subtract(1,'year').year()}":
year: lastYear
options:
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
"#{thisYear = endDate.year()}":
year: thisYear
options:
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
await Coconut.individualIndexDatabase.query "epiCurveByWeekAndDistrict",
startkey: ["#{data.year}-<KEY>"]
endkey: ["#{data.year}-<KEY>",{}]
reduce: true
group_level: groupLevel
.then (result) =>
Promise.resolve _(data.options).extend {
label: label
data: _(for row in result.rows
if options.administrativeName and _(row.key).last() is options.administrativeName
x: parseInt(row.key[0].replace(/.*-/,""))
y: row.value
).compact()
}
render: (dataForGraph, target) ->
if dataForGraph.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: [1..52]
datasets: dataForGraph
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Positive Individual Classifications":
description: "Positive Individual Classifications shows classifications for all individuals that have been followed up. Note that the dates may differ slightly since this graph uses the date of testing for household members, which usually is different than the date that the index case was found positive, and which is used for other graphs on this page."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Classifications By Household Member Type"
"Index Case Diagnosis Date ISO Week"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
classificationsToAlwaysShow = [
"Indigenous"
"Imported"
"Introduced"
"Induced"
"Relapsing"
]
classificationsToShowIfPresent = [
"In Progress"
"Lost to Followup"
"Unclassified"
]
classifications = classificationsToAlwaysShow.concat(classificationsToShowIfPresent)
presentOptionalClassifications = {}
for data in dataForGraph
if _(classifications).contains data.key[1]
[date, classification] = data.key
week = moment(date).isoWeek()
dataAggregated[classification] or= {}
dataAggregated[classification][week] or= 0
dataAggregated[classification][week] += data.value
weeksIncluded[week] = true
presentOptionalClassifications[classification] = true if _(classificationsToShowIfPresent).contains classification
classifications = classificationsToAlwaysShow.concat(_(presentOptionalClassifications).keys())
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
# Values from https://medialab.github.io/iwanthue/
# Colorblind friendly
colors = distinctColors(
count: classifications.length
hueMin: 0
hueMax: 360
chromaMin: 40
chromaMax: 70
lightMin: 15
lightMax: 85
)
chartOptions = for distinctColor in colors
color = distinctColor.rgb()
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.5)"
borderWidth: 2
}
index = -1
dataSets = for classification in classifications
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: classification
data: for week in [firstWeek..lastWeek]
dataAggregated[classification]?[week] or 0
xAxisLabels = []
for week, index in weeksIncluded
xAxisLabels.push week
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
category = classifications[this.getElementAtEvent(event)[0]._datasetIndex]
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and category: #{category}"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setHeaderFilterValue("Classifications By Household Member Type", category)
"Positive Individuals by Age":
description: "Positive Individuals by Age counts all malaria positive individuals and classifies them by age. Index case date of positive is used for all individuals."
dataQuery: Graphs.caseCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if data.key[1] is "Number Positive Individuals Over 5" or data.key[1] is "Number Positive Individuals Under 5"
[date,age] = data.key
week = moment(date).isoWeek()
dataAggregated[age] or= {}
dataAggregated[age][week] or= 0
dataAggregated[age][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of weekValue
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"OPD Visits By Age":
description: "OPD Visits by Age shows data for all malaria and non-malaria visits to facilities."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
mappings =
"All OPD >= 5" : "Over 5"
"All OPD < 5" : "Under 5"
for data in dataForGraph
if data.key[2] is "All OPD >= 5" or data.key[2] is "All OPD < 5"
[year, week, dataType] = data.key
dataType = mappings[dataType]
week = parseInt(week)
dataAggregated[dataType] or= {}
dataAggregated[dataType][week] or= 0
dataAggregated[dataType][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age in _(mappings).values()
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of dataAggregated[age]
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Hours from Positive Test at Facility to Notification":
description: "Shows how long it is taking facilities to send a notification once someone has tested positive. Target is less than 24 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Notification From Facility"
"Index Case Diagnosis Date ISO Week"
"Facility"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Has Notification") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Notification From Facility/))
[datePositive, timeToNotify] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Notification From Facility": "< 24"
"One To Two Days Between Positive Result And Notification From Facility": "24 - 48"
"Two To Three Days Between Positive Result And Notification From Facility": "48 - 72"
"More Than Three Days Between Positive Result And Notification From Facility": "> 72"
"Has Notification": "No notification" # This Has Notification = 0
timeToNotify = mapping[timeToNotify]
weeksIncluded[week] = true
dataAggregated[timeToNotify] or= {}
dataAggregated[timeToNotify][week] or= 0
if timeToNotify is "No notification"
dataAggregated[timeToNotify][week] += 1
else
dataAggregated[timeToNotify][week] += data.value
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).values() # Do this to get the right order
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week, value of dataAggregated[type]
value
xAxisLabels = []
onTarget = []
for week, index in _(weeksIncluded).keys()
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Notification From Facility", "desc")
"Hours From Positive Test To Complete Follow-up":
description: "Shows how long it is taking the entire followup process to take including both the time for the facility to followup as well as time for the surveillance officer to go to the facility and then go to the household. Target is less than 48 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Complete Household"
"Index Case Diagnosis Date ISO Week"
"Malaria Surveillance Officers"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Complete Household Visit") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Complete Household/))
[datePositive, timeToComplete] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Complete Household": "< 48"
"One To Two Days Between Positive Result And Complete Household": "< 48"
"Two To Three Days Between Positive Result And Complete Household": "48 - 72"
"More Than Three Days Between Positive Result And Complete Household": "> 72"
"Complete Household Visit": "Not followed up" #Confusing but when followed up is 0 then it is not followed up
timeToComplete = mapping[timeToComplete]
weeksIncluded[week] = true
dataAggregated[timeToComplete] or= {}
dataAggregated[timeToComplete][week] or= 0
if timeToComplete is "Not followed up"
dataAggregated[timeToComplete][week] += 1
else
dataAggregated[timeToComplete][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).chain().values().uniq().value()
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
dataAggregated[type]?[week] or 0
xAxisLabels = []
onTarget = []
for week, index in weeksIncluded
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Complete Household", "desc")
"Household Testing and Positivity Rate":
description: "How many tests are being given at households, and how many of those end up being positive. This does not include the index case, since it wasn't tested at the household."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[date, indicator] = data.key
if indicator is "Number Household Members Tested"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= -1 # Includes the index case so remove that for this graph
dataAggregated["Negative"][week] += data.value
else if indicator is "Number Positive Individuals At Household Excluding Index"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
# Subtract these from negative count
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] -= data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
console.log dataAggregated
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
console.log dataSets
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
"OPD Testing and Positivity Rate":
description: "How many tests are being given at facilities, and how many of those end up positive."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[year, week, dataType] = data.key
week = parseInt(week)
weeksIncluded[week] = true
if dataType.match /POS/
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
if dataType.match /NEG/
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
module.exports = Graphs
| true | _ = require 'underscore'
moment = require 'moment'
$ = require 'jquery'
distinctColors = (require 'distinct-colors').default
Chart = require 'chart.js'
ChartDataLabels = require 'chartjs-plugin-datalabels'
Chart.plugins.unregister(ChartDataLabels)
camelize = require "underscore.string/camelize"
class Graphs
Graphs.render = (graphName, data, target) ->
target or= camelize(graphName)
Graphs.definitions[graphName].render(data, $("##{target}"))
Graphs.getGraphName = (nameOrCamelizedName) ->
return nameOrCamelizedName if Graphs.definitions[nameOrCamelizedName]
camelizedNameToTile = {}
for name of Graphs.definitions
camelizedNameToTile[camelize(name)] = name
if Graphs.definitions[camelizedNameToTile[nameOrCamelizedName]]
camelizedNameToTile[nameOrCamelizedName]
Graphs.weeklyDataCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
if _(startDate).isString()
startDate = moment(startDate)
if _(endDate).isString()
endDate = moment(endDate)
groupLevel = 4 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
await Coconut.weeklyFacilityDatabase.query "weeklyDataCounter",
start_key: startDate.format("PI:KEY:<KEY>END_PI").split(/-/)
end_key: endDate.format("GGPI:KEY:<KEY>END_PI").split(/-/)
reduce: true
include_docs: false
group: true
group_level: groupLevel
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounter = (options) ->
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
groupLevel = 3 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
data = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: true
group_level: groupLevel
include_docs: false
.catch (error, foo) =>
console.error "This may be caused by non numeric answers"
.then (result) =>
Promise.resolve if options.administrativeName
_(result.rows).filter (row) =>
_(row.key).last() is options.administrativeName
else
result.rows
Graphs.caseCounterDetails = (options) ->
console.log options
startDate = options.startDate
endDate = options.endDate
unless _(startDate).isString()
startDate = startDate.format('YYYY-MM-DD')
unless _(endDate).isString()
endDate = endDate.format('YYYY-MM-DD')
# We can't use grouping since we want detailed case data but we still need the groupLevel to filter for the cases that correspond to the selected administrative Level/Name.
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Get all of the keys,
# then query again with those keys to get case details
caseKeys = await Coconut.reportingDatabase.query "caseCounter",
startkey: [startDate]
endkey: [endDate,{}]
reduce: false
include_docs: false
.then (result) =>
caseIds = {}
if options.administrativeName
for row in result.rows
if row.key[groupLevel] is options.administrativeName
caseIds[row.id] = true
Promise.resolve(_(caseIds).keys())
else
Promise.resolve(_(result.rows).pluck "id")
return await Coconut.reportingDatabase.allDocs
keys: caseKeys
include_docs: true
.then (result) =>
Promise.resolve result.rows
# This is a big data structure that is used to create the graphs on the dashboard as well as the individual graph pages as well as to create the menu options
Graphs.definitions =
"Positive Individuals by Year":
description: "Positive Individuals by Year shows the 'classic' epidemiological curve comparing last year's total cases to this years. This is useful to see if general trends are higher or lower than the previous year."
dataQuery: (options) ->
groupLevel = 2 # All of Zanzibar
if options.administrativeLevel and options.administrativeName
for level, index in GeoHierarchy.levels
if level.name is options.administrativeLevel.toUpperCase()
groupLevel = index + groupLevel
# Only care about the endDate
endDate = options.endDate
if _(endDate).isString()
endDate = moment(endDate)
for label, data of {
"#{lastYear = endDate.clone().subtract(1,'year').year()}":
year: lastYear
options:
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
"#{thisYear = endDate.year()}":
year: thisYear
options:
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
await Coconut.individualIndexDatabase.query "epiCurveByWeekAndDistrict",
startkey: ["#{data.year}-PI:KEY:<KEY>END_PI"]
endkey: ["#{data.year}-PI:KEY:<KEY>END_PI",{}]
reduce: true
group_level: groupLevel
.then (result) =>
Promise.resolve _(data.options).extend {
label: label
data: _(for row in result.rows
if options.administrativeName and _(row.key).last() is options.administrativeName
x: parseInt(row.key[0].replace(/.*-/,""))
y: row.value
).compact()
}
render: (dataForGraph, target) ->
if dataForGraph.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: [1..52]
datasets: dataForGraph
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Positive Individual Classifications":
description: "Positive Individual Classifications shows classifications for all individuals that have been followed up. Note that the dates may differ slightly since this graph uses the date of testing for household members, which usually is different than the date that the index case was found positive, and which is used for other graphs on this page."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Classifications By Household Member Type"
"Index Case Diagnosis Date ISO Week"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
classificationsToAlwaysShow = [
"Indigenous"
"Imported"
"Introduced"
"Induced"
"Relapsing"
]
classificationsToShowIfPresent = [
"In Progress"
"Lost to Followup"
"Unclassified"
]
classifications = classificationsToAlwaysShow.concat(classificationsToShowIfPresent)
presentOptionalClassifications = {}
for data in dataForGraph
if _(classifications).contains data.key[1]
[date, classification] = data.key
week = moment(date).isoWeek()
dataAggregated[classification] or= {}
dataAggregated[classification][week] or= 0
dataAggregated[classification][week] += data.value
weeksIncluded[week] = true
presentOptionalClassifications[classification] = true if _(classificationsToShowIfPresent).contains classification
classifications = classificationsToAlwaysShow.concat(_(presentOptionalClassifications).keys())
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
# Values from https://medialab.github.io/iwanthue/
# Colorblind friendly
colors = distinctColors(
count: classifications.length
hueMin: 0
hueMax: 360
chromaMin: 40
chromaMax: 70
lightMin: 15
lightMax: 85
)
chartOptions = for distinctColor in colors
color = distinctColor.rgb()
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.5)"
borderWidth: 2
}
index = -1
dataSets = for classification in classifications
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: classification
data: for week in [firstWeek..lastWeek]
dataAggregated[classification]?[week] or 0
xAxisLabels = []
for week, index in weeksIncluded
xAxisLabels.push week
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
category = classifications[this.getElementAtEvent(event)[0]._datasetIndex]
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and category: #{category}"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setHeaderFilterValue("Classifications By Household Member Type", category)
"Positive Individuals by Age":
description: "Positive Individuals by Age counts all malaria positive individuals and classifies them by age. Index case date of positive is used for all individuals."
dataQuery: Graphs.caseCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if data.key[1] is "Number Positive Individuals Over 5" or data.key[1] is "Number Positive Individuals Under 5"
[date,age] = data.key
week = moment(date).isoWeek()
dataAggregated[age] or= {}
dataAggregated[age][week] or= 0
dataAggregated[age][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of weekValue
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"OPD Visits By Age":
description: "OPD Visits by Age shows data for all malaria and non-malaria visits to facilities."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
mappings =
"All OPD >= 5" : "Over 5"
"All OPD < 5" : "Under 5"
for data in dataForGraph
if data.key[2] is "All OPD >= 5" or data.key[2] is "All OPD < 5"
[year, week, dataType] = data.key
dataType = mappings[dataType]
week = parseInt(week)
dataAggregated[dataType] or= {}
dataAggregated[dataType][week] or= 0
dataAggregated[dataType][week] += data.value
weeksIncluded[week] = true
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
pointRadius: 2
}
{
borderColor: "rgba(255, 64, 129,1)"
backgroundColor: "rgba(255, 64, 129, 0.1)"
pointRadius: 2
}
]
index = -1
dataSets = for age in _(mappings).values()
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: age
data: for week, value of dataAggregated[age]
x: week
y: value
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "line"
data:
labels: _(weeksIncluded).keys()
datasets: dataSets
options:
scales:
xAxes: [
scaleLabel:
display: true
labelString: "Week"
]
"Hours from Positive Test at Facility to Notification":
description: "Shows how long it is taking facilities to send a notification once someone has tested positive. Target is less than 24 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Notification From Facility"
"Index Case Diagnosis Date ISO Week"
"Facility"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Has Notification") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Notification From Facility/))
[datePositive, timeToNotify] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Notification From Facility": "< 24"
"One To Two Days Between Positive Result And Notification From Facility": "24 - 48"
"Two To Three Days Between Positive Result And Notification From Facility": "48 - 72"
"More Than Three Days Between Positive Result And Notification From Facility": "> 72"
"Has Notification": "No notification" # This Has Notification = 0
timeToNotify = mapping[timeToNotify]
weeksIncluded[week] = true
dataAggregated[timeToNotify] or= {}
dataAggregated[timeToNotify][week] or= 0
if timeToNotify is "No notification"
dataAggregated[timeToNotify][week] += 1
else
dataAggregated[timeToNotify][week] += data.value
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).values() # Do this to get the right order
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week, value of dataAggregated[type]
value
xAxisLabels = []
onTarget = []
for week, index in _(weeksIncluded).keys()
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Notification From Facility", "desc")
"Hours From Positive Test To Complete Follow-up":
description: "Shows how long it is taking the entire followup process to take including both the time for the facility to followup as well as time for the surveillance officer to go to the facility and then go to the household. Target is less than 48 hours."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
tabulatorFields: [
"Malaria Case ID"
"Days Between Positive Result And Complete Household"
"Index Case Diagnosis Date ISO Week"
"Malaria Surveillance Officers"
]
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
if (data.value is 0 and data.key[1] is "Complete Household Visit") or (data.value >= 1 and data.key[1].match( /Between Positive Result And Complete Household/))
[datePositive, timeToComplete] = data.key
week = moment(datePositive).isoWeek()
mapping =
"Less Than One Day Between Positive Result And Complete Household": "< 48"
"One To Two Days Between Positive Result And Complete Household": "< 48"
"Two To Three Days Between Positive Result And Complete Household": "48 - 72"
"More Than Three Days Between Positive Result And Complete Household": "> 72"
"Complete Household Visit": "Not followed up" #Confusing but when followed up is 0 then it is not followed up
timeToComplete = mapping[timeToComplete]
weeksIncluded[week] = true
dataAggregated[timeToComplete] or= {}
dataAggregated[timeToComplete][week] or= 0
if timeToComplete is "Not followed up"
dataAggregated[timeToComplete][week] += 1
else
dataAggregated[timeToComplete][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = for color in [
[0, 128, 0] # green
[192,192, 0] # yellow
[255,128, 0] # orange
[255,0, 0] # red
]
{
borderColor: "rgba(#{color.join(",")},1)"
backgroundColor: "rgba(#{color.join(",")},0.1)"
borderWidth: 2
}
index = -1
dataSets = for type in _(mapping).chain().values().uniq().value()
continue unless dataAggregated[type]
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
dataAggregated[type]?[week] or 0
xAxisLabels = []
onTarget = []
for week, index in weeksIncluded
xAxisLabels.push week
total = 0
for dataSet in dataSets
total += dataSet.data[index] or 0
onTarget.push Math.round(dataSets[0].data[index] / total * 100)
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top'
anchor: 'start'
borderRadius: 4
color: 'green'
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{onTarget[context.dataIndex]}%"
onClick: (event,chartElements, z) ->
if document.location.hash[0..5] is "#graph"
week = this.data.labels[chartElements[0]._index]
week = if week < 10 then "0#{week}" else "#{week}"
# Using a global variable for this - ugly but works
if casesTabulatorView.tabulator and confirm "Do you want to filter the details table to week: #{week} and sort by how many days it took?"
casesTabulatorView.tabulator.setHeaderFilterValue("Index Case Diagnosis Date ISO Week", "-#{week}")
casesTabulatorView.tabulator.setSort("Days Between Positive Result And Complete Household", "desc")
"Household Testing and Positivity Rate":
description: "How many tests are being given at households, and how many of those end up being positive. This does not include the index case, since it wasn't tested at the household."
dataQuery: Graphs.caseCounter
detailedDataQuery: (options) -> Graphs.caseCounterDetails(options)
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[date, indicator] = data.key
if indicator is "Number Household Members Tested"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= -1 # Includes the index case so remove that for this graph
dataAggregated["Negative"][week] += data.value
else if indicator is "Number Positive Individuals At Household Excluding Index"
week = moment(date).isoWeek()
weeksIncluded[week] = true
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
# Subtract these from negative count
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] -= data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
console.log dataAggregated
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
if dataSets.length is 0
canvas = target[0]
ctx = canvas.getContext("2d");
ctx.font = "20px Arial";
ctx.fillText("No cases/data for area/dates", 10, 50);
return
console.log dataSets
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
"OPD Testing and Positivity Rate":
description: "How many tests are being given at facilities, and how many of those end up positive."
dataQuery: Graphs.weeklyDataCounter
render: (dataForGraph, target) ->
dataAggregated = {}
weeksIncluded = {}
for data in dataForGraph
[year, week, dataType] = data.key
week = parseInt(week)
weeksIncluded[week] = true
if dataType.match /POS/
dataAggregated["Positive"] or= {}
dataAggregated["Positive"][week] or= 0
dataAggregated["Positive"][week] += data.value
if dataType.match /NEG/
dataAggregated["Negative"] or= {}
dataAggregated["Negative"][week] or= 0
dataAggregated["Negative"][week] += data.value
weeksIncluded = _(weeksIncluded).keys()
firstWeek = _(weeksIncluded).min()
lastWeek = _(weeksIncluded).max()
chartOptions = [
{
borderColor: "rgba(25, 118, 210, 1)"
backgroundColor: "rgba(25, 118, 210, 0.1)"
borderWidth: 2
}
{
borderColor: "rgba(255, 64, 129, 1)"
backgroundColor: "rgba(255, 64, 129, 0.5)"
borderWidth: 2
}
]
index = -1
dataSets = for type, weekValue of dataAggregated
index +=1
_(chartOptions[index]).extend # Just use an index to get different colors
label: type
data: for week in [firstWeek..lastWeek]
weekValue[week] or 0
xAxisLabels = []
positivityRate = []
for week, index in weeksIncluded
xAxisLabels.push week
positivityRate.push Math.round((dataSets[1].data[index]/dataSets[0].data[index])*100)
new Chart target,
type: "bar"
data:
labels: xAxisLabels
datasets: dataSets
plugins: [ChartDataLabels] # Lets us put the percent over the bar
options:
scales:
xAxes: [
stacked: true
scaleLabel:
display: true
labelString: "Week"
]
yAxes: [
stacked: true
]
plugins:
datalabels:
align: 'top',
anchor: 'end',
borderRadius: 4,
color: 'black',
formatter: (value, context) ->
return null if context.datasetIndex > 0 # Only need one percent per dataset since the calculation uses both parts of the bar
"#{positivityRate[context.dataIndex]}%"
module.exports = Graphs
|
[
{
"context": " # - `search` {String} A search phrase, such as `ben@n` or `Ben G`\n # - `options` (optional) {Object} I",
"end": 1197,
"score": 0.7414624094963074,
"start": 1192,
"tag": "USERNAME",
"value": "ben@n"
},
{
"context": "ch` {String} A search phrase, such as `ben@n` or `Ben G`\n # - `options` (optional) {Object} If you will ",
"end": 1208,
"score": 0.9983428120613098,
"start": 1203,
"tag": "NAME",
"value": "Ben G"
}
] | packages/client-app/src/flux/stores/contact-store.coffee | matt-d-brown/nylas-mail | 595 | fs = require 'fs'
path = require 'path'
Reflux = require 'reflux'
Rx = require 'rx-lite'
Actions = require('../actions').default
Contact = require('../models/contact').default
Utils = require '../models/utils'
NylasStore = require 'nylas-store'
RegExpUtils = require '../../regexp-utils'
DatabaseStore = require('./database-store').default
AccountStore = require('./account-store').default
ComponentRegistry = require('../../registries/component-registry')
ContactRankingStore = require './contact-ranking-store'
_ = require 'underscore'
WindowBridge = require '../../window-bridge'
###
Public: ContactStore provides convenience methods for searching contacts and
formatting contacts. When Contacts become editable, this store will be expanded
with additional actions.
Section: Stores
###
class ContactStore extends NylasStore
constructor: ->
@_rankedContacts = []
@listenTo ContactRankingStore, => @_updateRankedContactCache()
@_updateRankedContactCache()
# Public: Search the user's contact list for the given search term.
# This method compares the `search` string against each Contact's
# `name` and `email`.
#
# - `search` {String} A search phrase, such as `ben@n` or `Ben G`
# - `options` (optional) {Object} If you will only be displaying a few results,
# you should pass a limit value. {::searchContacts} will return as soon
# as `limit` matches have been found.
#
# Returns an {Array} of matching {Contact} models
#
searchContacts: (search, options={}) =>
{limit} = options
limit ?= 5
limit = Math.max(limit, 0)
search = search.toLowerCase()
accountCount = AccountStore.accounts().length
if not search or search.length is 0
return Promise.resolve([])
# Search ranked contacts which are stored in order in memory
results = []
for contact in @_rankedContacts
if (contact.email.toLowerCase().indexOf(search) isnt -1 or
contact.name.toLowerCase().indexOf(search) isnt -1)
results.push(contact)
if results.length is limit
break
# If we haven't found enough items in memory, query for more from the
# database. Note that we ask for LIMIT * accountCount because we want to
# return contacts with distinct email addresses, and the same contact
# could exist in every account. Rather than make SQLite do a SELECT DISTINCT
# (which is very slow), we just ask for more items.
query = DatabaseStore.findAll(Contact)
.search(search)
.limit(limit * accountCount)
query.then (queryResults) =>
existingEmails = _.pluck(results, 'email')
# remove query results that were already found in ranked contacts
queryResults = _.reject queryResults, (c) -> c.email in existingEmails
queryResults = @_distinctByEmail(queryResults)
results = results.concat(queryResults)
extensions = ComponentRegistry.findComponentsMatching({
role: "ContactSearchResults"
})
return Promise.each extensions, (ext) =>
return ext.findAdditionalContacts(search, results).then (contacts) =>
results = contacts
.then =>
if (results.length > limit) then results.length = limit
return Promise.resolve(results)
isValidContact: (contact) =>
return false unless contact instanceof Contact
return contact.isValid()
parseContactsInString: (contactString, options={}) =>
{skipNameLookup} = options
detected = []
emailRegex = RegExpUtils.emailRegex()
lastMatchEnd = 0
while (match = emailRegex.exec(contactString))
email = match[0]
name = null
startsWithQuote = email[0] in ['\'','"']
hasTrailingQuote = contactString[match.index+email.length] in ['\'','"']
if startsWithQuote and hasTrailingQuote
email = email[1..-1]
hasLeadingParen = contactString[match.index-1] in ['(','<']
hasTrailingParen = contactString[match.index+email.length] in [')','>']
if hasLeadingParen and hasTrailingParen
nameStart = lastMatchEnd
for char in [',', ';', '\n', '\r']
i = contactString.lastIndexOf(char, match.index)
nameStart = i+1 if i+1 > nameStart
name = contactString.substr(nameStart, match.index - 1 - nameStart).trim()
# The "nameStart" for the next match must begin after lastMatchEnd
lastMatchEnd = match.index+email.length
if hasTrailingParen
lastMatchEnd += 1
if not name or name.length is 0
name = email
# If the first and last character of the name are quotation marks, remove them
[firstChar,...,lastChar] = name
if firstChar in ['"', "'"] and lastChar in ['"', "'"]
name = name[1...-1]
detected.push(new Contact({email, name}))
if skipNameLookup
return Promise.resolve(detected)
Promise.all detected.map (contact) =>
return contact if contact.name isnt contact.email
@searchContacts(contact.email, {limit: 1}).then ([match]) =>
return match if match and match.email is contact.email
return contact
_updateRankedContactCache: =>
rankings = ContactRankingStore.valuesForAllAccounts()
emails = Object.keys(rankings)
if emails.length is 0
@_rankedContacts = []
return
# Sort the emails by rank and then clip to 400 so that our ranked cache
# has a bounded size.
emails = _.sortBy emails, (email) ->
(- (rankings[email.toLowerCase()] ? 0) / 1)
emails.length = 400 if emails.length > 400
DatabaseStore.findAll(Contact, {email: emails}).background().then (contacts) =>
contacts = @_distinctByEmail(contacts)
for contact in contacts
contact._rank = (- (rankings[contact.email.toLowerCase()] ? 0) / 1)
@_rankedContacts = _.sortBy contacts, (contact) -> contact._rank
_distinctByEmail: (contacts) =>
# remove query results that are duplicates, prefering ones that have names
uniq = {}
for contact in contacts
continue unless contact.email
key = contact.email.toLowerCase()
existing = uniq[key]
if not existing or (not existing.name or existing.name is existing.email)
uniq[key] = contact
_.values(uniq)
_resetCache: =>
@_rankedContacts = []
ContactRankingStore.reset()
@trigger(@)
module.exports = new ContactStore()
| 98511 | fs = require 'fs'
path = require 'path'
Reflux = require 'reflux'
Rx = require 'rx-lite'
Actions = require('../actions').default
Contact = require('../models/contact').default
Utils = require '../models/utils'
NylasStore = require 'nylas-store'
RegExpUtils = require '../../regexp-utils'
DatabaseStore = require('./database-store').default
AccountStore = require('./account-store').default
ComponentRegistry = require('../../registries/component-registry')
ContactRankingStore = require './contact-ranking-store'
_ = require 'underscore'
WindowBridge = require '../../window-bridge'
###
Public: ContactStore provides convenience methods for searching contacts and
formatting contacts. When Contacts become editable, this store will be expanded
with additional actions.
Section: Stores
###
class ContactStore extends NylasStore
constructor: ->
@_rankedContacts = []
@listenTo ContactRankingStore, => @_updateRankedContactCache()
@_updateRankedContactCache()
# Public: Search the user's contact list for the given search term.
# This method compares the `search` string against each Contact's
# `name` and `email`.
#
# - `search` {String} A search phrase, such as `ben@n` or `<NAME>`
# - `options` (optional) {Object} If you will only be displaying a few results,
# you should pass a limit value. {::searchContacts} will return as soon
# as `limit` matches have been found.
#
# Returns an {Array} of matching {Contact} models
#
searchContacts: (search, options={}) =>
{limit} = options
limit ?= 5
limit = Math.max(limit, 0)
search = search.toLowerCase()
accountCount = AccountStore.accounts().length
if not search or search.length is 0
return Promise.resolve([])
# Search ranked contacts which are stored in order in memory
results = []
for contact in @_rankedContacts
if (contact.email.toLowerCase().indexOf(search) isnt -1 or
contact.name.toLowerCase().indexOf(search) isnt -1)
results.push(contact)
if results.length is limit
break
# If we haven't found enough items in memory, query for more from the
# database. Note that we ask for LIMIT * accountCount because we want to
# return contacts with distinct email addresses, and the same contact
# could exist in every account. Rather than make SQLite do a SELECT DISTINCT
# (which is very slow), we just ask for more items.
query = DatabaseStore.findAll(Contact)
.search(search)
.limit(limit * accountCount)
query.then (queryResults) =>
existingEmails = _.pluck(results, 'email')
# remove query results that were already found in ranked contacts
queryResults = _.reject queryResults, (c) -> c.email in existingEmails
queryResults = @_distinctByEmail(queryResults)
results = results.concat(queryResults)
extensions = ComponentRegistry.findComponentsMatching({
role: "ContactSearchResults"
})
return Promise.each extensions, (ext) =>
return ext.findAdditionalContacts(search, results).then (contacts) =>
results = contacts
.then =>
if (results.length > limit) then results.length = limit
return Promise.resolve(results)
isValidContact: (contact) =>
return false unless contact instanceof Contact
return contact.isValid()
parseContactsInString: (contactString, options={}) =>
{skipNameLookup} = options
detected = []
emailRegex = RegExpUtils.emailRegex()
lastMatchEnd = 0
while (match = emailRegex.exec(contactString))
email = match[0]
name = null
startsWithQuote = email[0] in ['\'','"']
hasTrailingQuote = contactString[match.index+email.length] in ['\'','"']
if startsWithQuote and hasTrailingQuote
email = email[1..-1]
hasLeadingParen = contactString[match.index-1] in ['(','<']
hasTrailingParen = contactString[match.index+email.length] in [')','>']
if hasLeadingParen and hasTrailingParen
nameStart = lastMatchEnd
for char in [',', ';', '\n', '\r']
i = contactString.lastIndexOf(char, match.index)
nameStart = i+1 if i+1 > nameStart
name = contactString.substr(nameStart, match.index - 1 - nameStart).trim()
# The "nameStart" for the next match must begin after lastMatchEnd
lastMatchEnd = match.index+email.length
if hasTrailingParen
lastMatchEnd += 1
if not name or name.length is 0
name = email
# If the first and last character of the name are quotation marks, remove them
[firstChar,...,lastChar] = name
if firstChar in ['"', "'"] and lastChar in ['"', "'"]
name = name[1...-1]
detected.push(new Contact({email, name}))
if skipNameLookup
return Promise.resolve(detected)
Promise.all detected.map (contact) =>
return contact if contact.name isnt contact.email
@searchContacts(contact.email, {limit: 1}).then ([match]) =>
return match if match and match.email is contact.email
return contact
_updateRankedContactCache: =>
rankings = ContactRankingStore.valuesForAllAccounts()
emails = Object.keys(rankings)
if emails.length is 0
@_rankedContacts = []
return
# Sort the emails by rank and then clip to 400 so that our ranked cache
# has a bounded size.
emails = _.sortBy emails, (email) ->
(- (rankings[email.toLowerCase()] ? 0) / 1)
emails.length = 400 if emails.length > 400
DatabaseStore.findAll(Contact, {email: emails}).background().then (contacts) =>
contacts = @_distinctByEmail(contacts)
for contact in contacts
contact._rank = (- (rankings[contact.email.toLowerCase()] ? 0) / 1)
@_rankedContacts = _.sortBy contacts, (contact) -> contact._rank
_distinctByEmail: (contacts) =>
# remove query results that are duplicates, prefering ones that have names
uniq = {}
for contact in contacts
continue unless contact.email
key = contact.email.toLowerCase()
existing = uniq[key]
if not existing or (not existing.name or existing.name is existing.email)
uniq[key] = contact
_.values(uniq)
_resetCache: =>
@_rankedContacts = []
ContactRankingStore.reset()
@trigger(@)
module.exports = new ContactStore()
| true | fs = require 'fs'
path = require 'path'
Reflux = require 'reflux'
Rx = require 'rx-lite'
Actions = require('../actions').default
Contact = require('../models/contact').default
Utils = require '../models/utils'
NylasStore = require 'nylas-store'
RegExpUtils = require '../../regexp-utils'
DatabaseStore = require('./database-store').default
AccountStore = require('./account-store').default
ComponentRegistry = require('../../registries/component-registry')
ContactRankingStore = require './contact-ranking-store'
_ = require 'underscore'
WindowBridge = require '../../window-bridge'
###
Public: ContactStore provides convenience methods for searching contacts and
formatting contacts. When Contacts become editable, this store will be expanded
with additional actions.
Section: Stores
###
class ContactStore extends NylasStore
constructor: ->
@_rankedContacts = []
@listenTo ContactRankingStore, => @_updateRankedContactCache()
@_updateRankedContactCache()
# Public: Search the user's contact list for the given search term.
# This method compares the `search` string against each Contact's
# `name` and `email`.
#
# - `search` {String} A search phrase, such as `ben@n` or `PI:NAME:<NAME>END_PI`
# - `options` (optional) {Object} If you will only be displaying a few results,
# you should pass a limit value. {::searchContacts} will return as soon
# as `limit` matches have been found.
#
# Returns an {Array} of matching {Contact} models
#
searchContacts: (search, options={}) =>
{limit} = options
limit ?= 5
limit = Math.max(limit, 0)
search = search.toLowerCase()
accountCount = AccountStore.accounts().length
if not search or search.length is 0
return Promise.resolve([])
# Search ranked contacts which are stored in order in memory
results = []
for contact in @_rankedContacts
if (contact.email.toLowerCase().indexOf(search) isnt -1 or
contact.name.toLowerCase().indexOf(search) isnt -1)
results.push(contact)
if results.length is limit
break
# If we haven't found enough items in memory, query for more from the
# database. Note that we ask for LIMIT * accountCount because we want to
# return contacts with distinct email addresses, and the same contact
# could exist in every account. Rather than make SQLite do a SELECT DISTINCT
# (which is very slow), we just ask for more items.
query = DatabaseStore.findAll(Contact)
.search(search)
.limit(limit * accountCount)
query.then (queryResults) =>
existingEmails = _.pluck(results, 'email')
# remove query results that were already found in ranked contacts
queryResults = _.reject queryResults, (c) -> c.email in existingEmails
queryResults = @_distinctByEmail(queryResults)
results = results.concat(queryResults)
extensions = ComponentRegistry.findComponentsMatching({
role: "ContactSearchResults"
})
return Promise.each extensions, (ext) =>
return ext.findAdditionalContacts(search, results).then (contacts) =>
results = contacts
.then =>
if (results.length > limit) then results.length = limit
return Promise.resolve(results)
isValidContact: (contact) =>
return false unless contact instanceof Contact
return contact.isValid()
parseContactsInString: (contactString, options={}) =>
{skipNameLookup} = options
detected = []
emailRegex = RegExpUtils.emailRegex()
lastMatchEnd = 0
while (match = emailRegex.exec(contactString))
email = match[0]
name = null
startsWithQuote = email[0] in ['\'','"']
hasTrailingQuote = contactString[match.index+email.length] in ['\'','"']
if startsWithQuote and hasTrailingQuote
email = email[1..-1]
hasLeadingParen = contactString[match.index-1] in ['(','<']
hasTrailingParen = contactString[match.index+email.length] in [')','>']
if hasLeadingParen and hasTrailingParen
nameStart = lastMatchEnd
for char in [',', ';', '\n', '\r']
i = contactString.lastIndexOf(char, match.index)
nameStart = i+1 if i+1 > nameStart
name = contactString.substr(nameStart, match.index - 1 - nameStart).trim()
# The "nameStart" for the next match must begin after lastMatchEnd
lastMatchEnd = match.index+email.length
if hasTrailingParen
lastMatchEnd += 1
if not name or name.length is 0
name = email
# If the first and last character of the name are quotation marks, remove them
[firstChar,...,lastChar] = name
if firstChar in ['"', "'"] and lastChar in ['"', "'"]
name = name[1...-1]
detected.push(new Contact({email, name}))
if skipNameLookup
return Promise.resolve(detected)
Promise.all detected.map (contact) =>
return contact if contact.name isnt contact.email
@searchContacts(contact.email, {limit: 1}).then ([match]) =>
return match if match and match.email is contact.email
return contact
_updateRankedContactCache: =>
rankings = ContactRankingStore.valuesForAllAccounts()
emails = Object.keys(rankings)
if emails.length is 0
@_rankedContacts = []
return
# Sort the emails by rank and then clip to 400 so that our ranked cache
# has a bounded size.
emails = _.sortBy emails, (email) ->
(- (rankings[email.toLowerCase()] ? 0) / 1)
emails.length = 400 if emails.length > 400
DatabaseStore.findAll(Contact, {email: emails}).background().then (contacts) =>
contacts = @_distinctByEmail(contacts)
for contact in contacts
contact._rank = (- (rankings[contact.email.toLowerCase()] ? 0) / 1)
@_rankedContacts = _.sortBy contacts, (contact) -> contact._rank
_distinctByEmail: (contacts) =>
# remove query results that are duplicates, prefering ones that have names
uniq = {}
for contact in contacts
continue unless contact.email
key = contact.email.toLowerCase()
existing = uniq[key]
if not existing or (not existing.name or existing.name is existing.email)
uniq[key] = contact
_.values(uniq)
_resetCache: =>
@_rankedContacts = []
ContactRankingStore.reset()
@trigger(@)
module.exports = new ContactStore()
|
[
{
"context": "omises = []\n\npromises.push Users.create\n email: 'cseibert113@gmail.com'\n password: '123456'\n\npromises.push Snippits.cre",
"end": 229,
"score": 0.9999265074729919,
"start": 208,
"tag": "EMAIL",
"value": "cseibert113@gmail.com"
},
{
"context": "ate\n email: 'cseibert113@gmail.com'\n password: '123456'\n\npromises.push Snippits.create\n name: 'Random S",
"end": 250,
"score": 0.9993031620979309,
"start": 244,
"tag": "PASSWORD",
"value": "123456"
},
{
"context": " = models.Reports\n\n Users.create\n email: 'cseibert113@gmail.com'\n password: '123456'\n \"\"\"\n language: \"coff",
"end": 460,
"score": 0.9999263286590576,
"start": 439,
"tag": "EMAIL",
"value": "cseibert113@gmail.com"
},
{
"context": " email: 'cseibert113@gmail.com'\n password: '123456'\n \"\"\"\n language: \"coffeescript\"\n\npromises.push ",
"end": 485,
"score": 0.9993179440498352,
"start": 479,
"tag": "PASSWORD",
"value": "123456"
}
] | server/src/scripts/build_db.coffee | codyseibert/typr.io | 1 | require '../db'
models = require '../models/models'
Promise = require 'bluebird'
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
promises = []
promises.push Users.create
email: 'cseibert113@gmail.com'
password: '123456'
promises.push Snippits.create
name: 'Random Snippit 1'
code: """
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
Users.create
email: 'cseibert113@gmail.com'
password: '123456'
"""
language: "coffeescript"
promises.push Snippits.create
name: 'Random Snippit 2'
code: """
var Base64, Users;
Base64 = require('js-base64').Base64;
Users = require('../models/models').Users;
module.exports = function(req, res, next) {
}
"""
language: "javascript"
Promise.all promises
.then ->
process.exit 0
| 121973 | require '../db'
models = require '../models/models'
Promise = require 'bluebird'
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
promises = []
promises.push Users.create
email: '<EMAIL>'
password: '<PASSWORD>'
promises.push Snippits.create
name: 'Random Snippit 1'
code: """
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
Users.create
email: '<EMAIL>'
password: '<PASSWORD>'
"""
language: "coffeescript"
promises.push Snippits.create
name: 'Random Snippit 2'
code: """
var Base64, Users;
Base64 = require('js-base64').Base64;
Users = require('../models/models').Users;
module.exports = function(req, res, next) {
}
"""
language: "javascript"
Promise.all promises
.then ->
process.exit 0
| true | require '../db'
models = require '../models/models'
Promise = require 'bluebird'
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
promises = []
promises.push Users.create
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
promises.push Snippits.create
name: 'Random Snippit 1'
code: """
Users = models.Users
Snippits = models.Snippits
Reports = models.Reports
Users.create
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
"""
language: "coffeescript"
promises.push Snippits.create
name: 'Random Snippit 2'
code: """
var Base64, Users;
Base64 = require('js-base64').Base64;
Users = require('../models/models').Users;
module.exports = function(req, res, next) {
}
"""
language: "javascript"
Promise.all promises
.then ->
process.exit 0
|
[
{
"context": "in the same output', ->\n user = \n first: 'Huevo'\n last: 'Bueno'\n\n nameHelper = (user) ->\n",
"end": 201,
"score": 0.9997286200523376,
"start": 196,
"tag": "NAME",
"value": "Huevo"
},
{
"context": " ->\n user = \n first: 'Huevo'\n last: 'Bueno'\n\n nameHelper = (user) ->\n p \"#{user.firs",
"end": 221,
"score": 0.9882912635803223,
"start": 216,
"tag": "NAME",
"value": "Bueno"
},
{
"context": " expect(render template, user).to.equal '<div><p>Huevo Bueno</p></div>'\n",
"end": 415,
"score": 0.9997397065162659,
"start": 404,
"tag": "NAME",
"value": "Huevo Bueno"
}
] | test-teacup/nesting.coffee | jahbini/chalice | 0 | expect = require 'expect.js'
{render, div, p} = require '../src/halvalla-mithril'
describe 'nesting templates', ->
it 'renders nested template in the same output', ->
user =
first: 'Huevo'
last: 'Bueno'
nameHelper = (user) ->
p "#{user.first} #{user.last}"
template = (user) ->
div ->
nameHelper user
expect(render template, user).to.equal '<div><p>Huevo Bueno</p></div>'
| 206252 | expect = require 'expect.js'
{render, div, p} = require '../src/halvalla-mithril'
describe 'nesting templates', ->
it 'renders nested template in the same output', ->
user =
first: '<NAME>'
last: '<NAME>'
nameHelper = (user) ->
p "#{user.first} #{user.last}"
template = (user) ->
div ->
nameHelper user
expect(render template, user).to.equal '<div><p><NAME></p></div>'
| true | expect = require 'expect.js'
{render, div, p} = require '../src/halvalla-mithril'
describe 'nesting templates', ->
it 'renders nested template in the same output', ->
user =
first: 'PI:NAME:<NAME>END_PI'
last: 'PI:NAME:<NAME>END_PI'
nameHelper = (user) ->
p "#{user.first} #{user.last}"
template = (user) ->
div ->
nameHelper user
expect(render template, user).to.equal '<div><p>PI:NAME:<NAME>END_PI</p></div>'
|
[
{
"context": "ds.parse = ->\n id : @_id\n name : @name\n description : @description\n address : ",
"end": 2094,
"score": 0.6423300504684448,
"start": 2094,
"tag": "NAME",
"value": ""
},
{
"context": ".parse = ->\n id : @_id\n name : @name\n description : @description\n address : @add",
"end": 2100,
"score": 0.9770875573158264,
"start": 2096,
"tag": "USERNAME",
"value": "name"
}
] | common/models/place.coffee | QuteBits/boilerplate_events | 0 | "use strict"
Hope = require("zenserver").Hope
Schema = require("zenserver").Mongoose.Schema
db = require("zenserver").Mongo.connections.primary
C = require "../constants"
Place = new Schema
name : type: String
description : type: String
address : type: String
site : type: String
position : type: [Number], index: "2dsphere", default: [0,0]
updated_at : type: Date
created_at : type: Date, default: Date.now
# ------------------------------------------------------------------------------
Place.statics.register = (attributes) ->
promise = new Hope.Promise()
@findOne name: attributes.name, (error, value) ->
if value?
promise.done null, value
else
if attributes.latitude and attributes.longitude
attributes.position = [attributes.longitude, attributes.latitude]
place = db.model "Place", Place
new place(attributes).save (error, value) -> promise.done error, value
promise
Place.statics.findAround = (attributes) ->
promise = new Hope.Promise()
query =
position:
$nearSphere :
$geometry :
type : "Point"
coordinates : [attributes.longitude, attributes.latitude]
$maxDistance : attributes.radius or C.RADIUS
@find query, (error, values) -> promise.done error, values
promise
Place.statics.search = (query, limit = 0) ->
promise = new Hope.Promise()
@find(query).limit(limit).exec (error, values) ->
if limit is 1 and not error
error = code: 402, message: "Place not found." if values.length is 0
values = values[0]
promise.done error, values
promise
Place.statics.updateAttributes = (query, attributes) ->
promise = new Hope.Promise()
attributes.updated_at = new Date()
@findByIdAndUpdate query, attributes, new: true, (error, value) ->
error = code: 402, message: "Place not found" if error or value is null
promise.done error, value
promise
# ------------------------------------------------------------------------------
Place.methods.parse = ->
id : @_id
name : @name
description : @description
address : @address
site : @site
latitude : @position[1]
longitude : @position[0]
updated_at : @updated_at
created_at : @created_at
exports = module.exports = db.model "Place", Place
| 168511 | "use strict"
Hope = require("zenserver").Hope
Schema = require("zenserver").Mongoose.Schema
db = require("zenserver").Mongo.connections.primary
C = require "../constants"
Place = new Schema
name : type: String
description : type: String
address : type: String
site : type: String
position : type: [Number], index: "2dsphere", default: [0,0]
updated_at : type: Date
created_at : type: Date, default: Date.now
# ------------------------------------------------------------------------------
Place.statics.register = (attributes) ->
promise = new Hope.Promise()
@findOne name: attributes.name, (error, value) ->
if value?
promise.done null, value
else
if attributes.latitude and attributes.longitude
attributes.position = [attributes.longitude, attributes.latitude]
place = db.model "Place", Place
new place(attributes).save (error, value) -> promise.done error, value
promise
Place.statics.findAround = (attributes) ->
promise = new Hope.Promise()
query =
position:
$nearSphere :
$geometry :
type : "Point"
coordinates : [attributes.longitude, attributes.latitude]
$maxDistance : attributes.radius or C.RADIUS
@find query, (error, values) -> promise.done error, values
promise
Place.statics.search = (query, limit = 0) ->
promise = new Hope.Promise()
@find(query).limit(limit).exec (error, values) ->
if limit is 1 and not error
error = code: 402, message: "Place not found." if values.length is 0
values = values[0]
promise.done error, values
promise
Place.statics.updateAttributes = (query, attributes) ->
promise = new Hope.Promise()
attributes.updated_at = new Date()
@findByIdAndUpdate query, attributes, new: true, (error, value) ->
error = code: 402, message: "Place not found" if error or value is null
promise.done error, value
promise
# ------------------------------------------------------------------------------
Place.methods.parse = ->
id : @_id
name :<NAME> @name
description : @description
address : @address
site : @site
latitude : @position[1]
longitude : @position[0]
updated_at : @updated_at
created_at : @created_at
exports = module.exports = db.model "Place", Place
| true | "use strict"
Hope = require("zenserver").Hope
Schema = require("zenserver").Mongoose.Schema
db = require("zenserver").Mongo.connections.primary
C = require "../constants"
Place = new Schema
name : type: String
description : type: String
address : type: String
site : type: String
position : type: [Number], index: "2dsphere", default: [0,0]
updated_at : type: Date
created_at : type: Date, default: Date.now
# ------------------------------------------------------------------------------
Place.statics.register = (attributes) ->
promise = new Hope.Promise()
@findOne name: attributes.name, (error, value) ->
if value?
promise.done null, value
else
if attributes.latitude and attributes.longitude
attributes.position = [attributes.longitude, attributes.latitude]
place = db.model "Place", Place
new place(attributes).save (error, value) -> promise.done error, value
promise
Place.statics.findAround = (attributes) ->
promise = new Hope.Promise()
query =
position:
$nearSphere :
$geometry :
type : "Point"
coordinates : [attributes.longitude, attributes.latitude]
$maxDistance : attributes.radius or C.RADIUS
@find query, (error, values) -> promise.done error, values
promise
Place.statics.search = (query, limit = 0) ->
promise = new Hope.Promise()
@find(query).limit(limit).exec (error, values) ->
if limit is 1 and not error
error = code: 402, message: "Place not found." if values.length is 0
values = values[0]
promise.done error, values
promise
Place.statics.updateAttributes = (query, attributes) ->
promise = new Hope.Promise()
attributes.updated_at = new Date()
@findByIdAndUpdate query, attributes, new: true, (error, value) ->
error = code: 402, message: "Place not found" if error or value is null
promise.done error, value
promise
# ------------------------------------------------------------------------------
Place.methods.parse = ->
id : @_id
name :PI:NAME:<NAME>END_PI @name
description : @description
address : @address
site : @site
latitude : @position[1]
longitude : @position[0]
updated_at : @updated_at
created_at : @created_at
exports = module.exports = db.model "Place", Place
|
[
{
"context": "= process.env.SURESPOT_REDIS_SENTINEL_HOSTNAME ? \"127.0.0.1\"\ndontUseSSL = process.env.SURESPOT_DONT_USE_SSL i",
"end": 509,
"score": 0.9997345805168152,
"start": 500,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " \"true\"\n\npool = new helenus.ConnectionPool({host:'127.0.0.1', port:9160, keyspace:'surespot'});\nrc = if useRe",
"end": 788,
"score": 0.9997203350067139,
"start": 779,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " url: baseUri + \"/login\"\n json:\n username: username\n password: password\n authSig2: authSig\n",
"end": 2412,
"score": 0.9993190765380859,
"start": 2404,
"tag": "USERNAME",
"value": "username"
},
{
"context": " json:\n username: username\n password: password\n authSig2: authSig\n version: 60\n p",
"end": 2437,
"score": 0.9991463422775269,
"start": 2429,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "url: baseUri + \"/users2\"\n json:\n username: username\n password: password\n dhPub: keys.ecdh.p",
"end": 2728,
"score": 0.9993283748626709,
"start": 2720,
"tag": "USERNAME",
"value": "username"
},
{
"context": " json:\n username: username\n password: password\n dhPub: keys.ecdh.pem_pub\n dsaPub: keys",
"end": 2753,
"score": 0.9992570877075195,
"start": 2745,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "llback) ->\n ecdsa = new dcrypt.keypair.newECDSA 'secp521r1'\n ecdh = new dcrypt.keypair.newECDSA 'secp521r1'",
"end": 3096,
"score": 0.8610985279083252,
"start": 3087,
"tag": "KEY",
"value": "secp521r1"
},
{
"context": "'secp521r1'\n ecdh = new dcrypt.keypair.newECDSA 'secp521r1'\n\n# random = crypto.randomBytes 16\n\n# dsaPubSig",
"end": 3145,
"score": 0.8903873562812805,
"start": 3136,
"tag": "KEY",
"value": "secp521r1"
},
{
"context": "ck) ->\n ecdsa = new dcrypt.keypair.newECDSA 'secp521r1'\n ecdh = new dcrypt.keypair.newECDSA 'secp521r1'",
"end": 3818,
"score": 0.7848575711250305,
"start": 3813,
"tag": "KEY",
"value": "521r1"
},
{
"context": "1r1'\n ecdh = new dcrypt.keypair.newECDSA 'secp521r1'\n# authSig = sign ecdsa.pem_priv, new Buffer(\"te",
"end": 3867,
"score": 0.5471357703208923,
"start": 3865,
"tag": "KEY",
"value": "r1"
},
{
"context": "ase64')]).toString('base64')\n\n\nsignClient = (priv, username, version, dhPubKey) ->\n vbuffer = new Buffer(4)\n",
"end": 4437,
"score": 0.9364800453186035,
"start": 4429,
"tag": "USERNAME",
"value": "username"
},
{
"context": " (done) ->\n signup \"012345678901234567890\", \"test0\", keys[0], done, (res, body) ->\n res.statu",
"end": 5321,
"score": 0.9722940921783447,
"start": 5316,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "0 if username empty\", (done) ->\n signup '', \"test0\", keys[0], done, (res, body) ->\n res.statu",
"end": 5495,
"score": 0.963436484336853,
"start": 5490,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "5\n pw = random.toString('hex')\n signup \"test0\", pw, keys[0], done, (res, body) ->\n res.s",
"end": 5741,
"score": 0.9326353073120117,
"start": 5736,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "should respond with 201\", (done) ->\n signup \"test0\", \"test0\", keys[0], done, (res, body) ->\n ",
"end": 5898,
"score": 0.8989537954330444,
"start": 5893,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "d with 201\", (done) ->\n signup \"test0\", \"test0\", keys[0], done, (res, body) ->\n res.statu",
"end": 5907,
"score": 0.7419089674949646,
"start": 5906,
"tag": "USERNAME",
"value": "0"
},
{
"context": "+ \"/keytoken2\"\n json:\n username: \"test0\"\n password: \"test0\"\n authSig2: ",
"end": 7099,
"score": 0.9995142221450806,
"start": 7094,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "\n username: \"test0\"\n password: \"test0\"\n authSig2: keys[0].authSig\n #c",
"end": 7127,
"score": 0.9988201856613159,
"start": 7122,
"tag": "PASSWORD",
"value": "test0"
},
{
"context": " json:\n username: \"test0\"\n password: \"test0\"\n ",
"end": 7847,
"score": 0.9995424747467041,
"start": 7842,
"tag": "USERNAME",
"value": "test0"
},
{
"context": " username: \"test0\"\n password: \"test0\"\n authSig2: oldKeys[0].authSig\n ",
"end": 7883,
"score": 0.9989497661590576,
"start": 7878,
"tag": "PASSWORD",
"value": "test0"
},
{
"context": " it \"should return 401\", (done) ->\n login \"test0\", \"test0\", keys[0].authSig, done, (res, body) ->\n",
"end": 8475,
"score": 0.99882972240448,
"start": 8470,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "ould return 401\", (done) ->\n login \"test0\", \"test0\", keys[0].authSig, done, (res, body) ->\n r",
"end": 8484,
"score": 0.93115234375,
"start": 8479,
"tag": "USERNAME",
"value": "test0"
},
{
"context": " it \"should return 401\", (done) ->\n login \"test0\", \"bollocks\", oldKeys[0].sig, done, (res, body) -",
"end": 8683,
"score": 0.9991629123687744,
"start": 8678,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "ould return 401\", (done) ->\n login \"test0\", \"bollocks\", oldKeys[0].sig, done, (res, body) ->\n re",
"end": 8695,
"score": 0.9966943264007568,
"start": 8687,
"tag": "USERNAME",
"value": "bollocks"
},
{
"context": " it \"should return 401\", (done) ->\n login \"test0\", \"test0\", \"martin\", done, (res, body) ->\n ",
"end": 8891,
"score": 0.9990701675415039,
"start": 8886,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "ould return 401\", (done) ->\n login \"test0\", \"test0\", \"martin\", done, (res, body) ->\n res.stat",
"end": 8900,
"score": 0.8455966711044312,
"start": 8895,
"tag": "USERNAME",
"value": "test0"
},
{
"context": "rn 401\", (done) ->\n login \"test0\", \"test0\", \"martin\", done, (res, body) ->\n res.statusCode.sho",
"end": 8910,
"score": 0.9917302131652832,
"start": 8904,
"tag": "USERNAME",
"value": "martin"
},
{
"context": " it \"should return 401\", (done) ->\n login \"test0\", \"test0\", \"Lorem ipsum dolor sit amet, consectet",
"end": 9095,
"score": 0.999286413192749,
"start": 9090,
"tag": "USERNAME",
"value": "test0"
},
{
"context": " return 401\", (done) ->\n login \"test0\", \"test0\", \"Lorem ipsum dolor sit amet, consectetur adipis",
"end": 9104,
"score": 0.74616539478302,
"start": 9103,
"tag": "USERNAME",
"value": "0"
},
{
"context": "hould return 401\", (done) ->\n login \"your\", \"mama\", \"what kind of sig is this?\", done, (res, body) ",
"end": 9923,
"score": 0.916944146156311,
"start": 9919,
"tag": "USERNAME",
"value": "mama"
},
{
"context": " it \"should return 204\", (done) ->\n login \"test0\", \"test0\", oldKeys[0].authSig, done, (res, body) ",
"end": 10137,
"score": 0.9495998620986938,
"start": 10132,
"tag": "USERNAME",
"value": "test0"
},
{
"context": " return 204\", (done) ->\n login \"test0\", \"test0\", oldKeys[0].authSig, done, (res, body) ->\n ",
"end": 10146,
"score": 0.8096608519554138,
"start": 10145,
"tag": "USERNAME",
"value": "0"
}
] | test/test/auth2tests.coffee | SchoolOfFreelancing/SureSpot | 1 | assert = require("assert")
should = require("should")
http = require("request")
redis = require("redis")
util = require("util")
crypto = require 'crypto'
dcrypt = require 'dcrypt'
async = require 'async'
fs = require("fs")
redisSentinel = require 'redis-sentinel-client'
helenus = require 'helenus'
socketPort = process.env.SURESPOT_SOCKET ? 8080
redisSentinelPort = parseInt(process.env.SURESPOT_REDIS_SENTINEL_PORT) ? 6379
redisSentinelHostname = process.env.SURESPOT_REDIS_SENTINEL_HOSTNAME ? "127.0.0.1"
dontUseSSL = process.env.SURESPOT_DONT_USE_SSL is "true"
baseUri = process.env.SURESPOT_TEST_BASEURI
cleanupDb = process.env.SURESPOT_TEST_CLEANDB is "true"
useRedisSentinel = process.env.SURESPOT_USE_REDIS_SENTINEL is "true"
pool = new helenus.ConnectionPool({host:'127.0.0.1', port:9160, keyspace:'surespot'});
rc = if useRedisSentinel then redisSentinel.createClient(redisSentinelPort, redisSentinelHostname) else redis.createClient(redisSentinelPort, redisSentinelHostname)
port = socketPort
cleanup = (done) ->
keys = [
"u:test0",
"u:test1",
"u:test2",
"f:test0",
"f:test1",
"fi:test0",
"is:test0",
"ir:test0",
"is:test1",
"ir:test1",
"m:test0",
"c:test1",
"c:test0",
"c:test2",
"kt:test0"
"kv:test0",
"k:test0",
"kv:test1",
"k:test1",
"kv:test2",
"k:test2",
"d:test2"]
multi = rc.multi()
multi.del keys
multi.hdel "mcounters", "test0:test1"
multi.hdel "ucmcounters", "test0"
multi.hdel "ucmcounters", "test1"
multi.srem "u", "test0", "test1", "test2"
multi.exec (err, results) ->
return done err if err?
pool.connect (err, keyspace) ->
return done err if err?
cql = "begin batch
delete from chatmessages where username = ?
delete from chatmessages where username = ?
delete from usercontrolmessages where username = ?
delete from usercontrolmessages where username = ?
delete from frienddata where username = ?
delete from frienddata where username = ?
apply batch"
pool.cql cql, ["test0", "test1", "test0", "test1", "test0", "test1"], (err, results) ->
if err
done err
else
done()
login = (username, password, authSig, done, callback) ->
http.post
url: baseUri + "/login"
json:
username: username
password: password
authSig2: authSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
signup = (username, password, keys, done, callback) ->
http.post
url: baseUri + "/users2"
json:
username: username
password: password
dhPub: keys.ecdh.pem_pub
dsaPub: keys.ecdsa.pem_pub
authSig2: keys.authSig
clientSig: keys.clientSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
generateKey = (i, callback) ->
ecdsa = new dcrypt.keypair.newECDSA 'secp521r1'
ecdh = new dcrypt.keypair.newECDSA 'secp521r1'
# random = crypto.randomBytes 16
# dsaPubSig =
# crypto
# .createSign('sha256')
# .update(new Buffer("test#{i}"))
# .update(new Buffer("test#{i}"))
# .update(random)
# .sign(ecdsa.pem_priv, 'base64')
authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
#Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
clientSig = signClient ecdsa.pem_priv, "test#{i}", 1, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
authSig: authSig
clientSig: clientSig
}
generateKeyVersion = (i, version, firstDsa, callback) ->
ecdsa = new dcrypt.keypair.newECDSA 'secp521r1'
ecdh = new dcrypt.keypair.newECDSA 'secp521r1'
# authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
clientSig = signClient firstDsa, "test#{i}", version, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
# authSig: authSig
clientSig: clientSig
}
sign = (priv, b1, b2) ->
random = crypto.randomBytes 16
dsaPubSig =
crypto
.createSign('sha256')
.update(b1)
.update(b2)
.update(random)
.sign(priv, 'base64')
return Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
signClient = (priv, username, version, dhPubKey) ->
vbuffer = new Buffer(4)
vbuffer.writeInt32BE(version, 0)
clientSig =
crypto
.createSign('sha256')
.update(new Buffer(username))
.update(vbuffer)
.update(new Buffer(dhPubKey))
.sign(priv, 'base64')
return clientSig
makeKeys = (i) ->
return (callback) ->
generateKey i, callback
createKeys = (number, done) ->
keys = []
for i in [0..number]
keys.push makeKeys(i)
async.parallel keys, (err, results) ->
if err?
done err
else
done null, results
describe "auth 2 tests", () ->
keys = undefined
oldKeys = []
before (done) ->
createKeys 3, (err, keyss) ->
keys = keyss
if cleanupDb
cleanup done
else
done()
describe "create user", () ->
it "should respond with 400 if username invalid", (done) ->
signup "012345678901234567890", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if username empty", (done) ->
signup '', "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if password too long", (done) ->
random = crypto.randomBytes 1025
pw = random.toString('hex')
signup "test0", pw, keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 201", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 201
done()
it "and subsequently exist", (done) ->
http.get
url: baseUri + "/users/test0/exists",
(err, res, body) ->
if err
done err
else
body.should.equal "true"
done()
# it "even if the request case is different", (done) ->
# http.get
# url: baseUri + "/users/TEST/exists",
# (err,res,body) ->
# if err
# done err
# else
# body.should.equal "true"
# done()
it "shouldn't be allowed to be created again", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 409
done()
# it "even if the request case is different", (done) ->
# signup "tEsT", "test", keys[0].ecdh.pem_pub, keys[0].ecdsa.pem_pub, keys[0].sig, done, (res, body) ->
# res.statusCode.should.equal 409
# done()
it "should be able to roll the key pair", (done) ->
http.post
url: baseUri + "/keytoken2"
json:
username: "test0"
password: "test0"
authSig2: keys[0].authSig
#clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 200
body.keyversion.should.equal 2
body.token.should.exist
oldKeys[0] = keys[0]
#generate new key pairs, sign with first dsa key
generateKeyVersion 0, body.keyversion, oldKeys[0].ecdsa.pem_priv, (err, nkp) ->
keys[0] = nkp
tokenSig = sign oldKeys[0].ecdsa.pem_priv, new Buffer(body.token, 'base64'), "test0"
http.post
url: baseUri + "/keys2"
json:
username: "test0"
password: "test0"
authSig2: oldKeys[0].authSig
dhPub: keys[0].ecdh.pem_pub
dsaPub: keys[0].ecdsa.pem_pub
keyVersion: body.keyversion
tokenSig: tokenSig
clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 201
done()
#
describe "should not be able to login with the new signature", ->
it "should return 401", (done) ->
login "test0", "test0", keys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with invalid password", ->
it "should return 401", (done) ->
login "test0", "bollocks", oldKeys[0].sig, done, (res, body) ->
res.statusCode.should.equal 401
done()
describe "login with short signature", ->
it "should return 401", (done) ->
login "test0", "test0", "martin", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with invalid signature", ->
it "should return 401", (done) ->
login "test0", "test0", "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean venenatis dictum viverra. Duis vel justo vel purus hendrerit consequat. Duis ac nisi at ante elementum faucibus in eget lorem. Morbi cursus blandit sollicitudin. Aenean tincidunt, turpis eu malesuada venenatis, urna eros sagittis augue, et vehicula quam turpis at risus. Sed ac orci a tellus semper tincidunt eget non lorem. In porta nisi eu elit porttitor pellentesque vestibulum purus luctus. Nam venenatis porta porta. Vestibulum eget orci massa. Fusce laoreet vestibulum lacus ut hendrerit. Proin ac eros enim, ac faucibus eros. Aliquam erat volutpat.",
done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with non existant user", ->
it "should return 401", (done) ->
login "your", "mama", "what kind of sig is this?", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with valid credentials", ->
it "should return 204", (done) ->
login "test0", "test0", oldKeys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 204
done()
#todo set filename explicitly
after (done) ->
if cleanupDb
cleanup done
else
done() | 74694 | assert = require("assert")
should = require("should")
http = require("request")
redis = require("redis")
util = require("util")
crypto = require 'crypto'
dcrypt = require 'dcrypt'
async = require 'async'
fs = require("fs")
redisSentinel = require 'redis-sentinel-client'
helenus = require 'helenus'
socketPort = process.env.SURESPOT_SOCKET ? 8080
redisSentinelPort = parseInt(process.env.SURESPOT_REDIS_SENTINEL_PORT) ? 6379
redisSentinelHostname = process.env.SURESPOT_REDIS_SENTINEL_HOSTNAME ? "127.0.0.1"
dontUseSSL = process.env.SURESPOT_DONT_USE_SSL is "true"
baseUri = process.env.SURESPOT_TEST_BASEURI
cleanupDb = process.env.SURESPOT_TEST_CLEANDB is "true"
useRedisSentinel = process.env.SURESPOT_USE_REDIS_SENTINEL is "true"
pool = new helenus.ConnectionPool({host:'127.0.0.1', port:9160, keyspace:'surespot'});
rc = if useRedisSentinel then redisSentinel.createClient(redisSentinelPort, redisSentinelHostname) else redis.createClient(redisSentinelPort, redisSentinelHostname)
port = socketPort
cleanup = (done) ->
keys = [
"u:test0",
"u:test1",
"u:test2",
"f:test0",
"f:test1",
"fi:test0",
"is:test0",
"ir:test0",
"is:test1",
"ir:test1",
"m:test0",
"c:test1",
"c:test0",
"c:test2",
"kt:test0"
"kv:test0",
"k:test0",
"kv:test1",
"k:test1",
"kv:test2",
"k:test2",
"d:test2"]
multi = rc.multi()
multi.del keys
multi.hdel "mcounters", "test0:test1"
multi.hdel "ucmcounters", "test0"
multi.hdel "ucmcounters", "test1"
multi.srem "u", "test0", "test1", "test2"
multi.exec (err, results) ->
return done err if err?
pool.connect (err, keyspace) ->
return done err if err?
cql = "begin batch
delete from chatmessages where username = ?
delete from chatmessages where username = ?
delete from usercontrolmessages where username = ?
delete from usercontrolmessages where username = ?
delete from frienddata where username = ?
delete from frienddata where username = ?
apply batch"
pool.cql cql, ["test0", "test1", "test0", "test1", "test0", "test1"], (err, results) ->
if err
done err
else
done()
login = (username, password, authSig, done, callback) ->
http.post
url: baseUri + "/login"
json:
username: username
password: <PASSWORD>
authSig2: authSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
signup = (username, password, keys, done, callback) ->
http.post
url: baseUri + "/users2"
json:
username: username
password: <PASSWORD>
dhPub: keys.ecdh.pem_pub
dsaPub: keys.ecdsa.pem_pub
authSig2: keys.authSig
clientSig: keys.clientSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
generateKey = (i, callback) ->
ecdsa = new dcrypt.keypair.newECDSA '<KEY>'
ecdh = new dcrypt.keypair.newECDSA '<KEY>'
# random = crypto.randomBytes 16
# dsaPubSig =
# crypto
# .createSign('sha256')
# .update(new Buffer("test#{i}"))
# .update(new Buffer("test#{i}"))
# .update(random)
# .sign(ecdsa.pem_priv, 'base64')
authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
#Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
clientSig = signClient ecdsa.pem_priv, "test#{i}", 1, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
authSig: authSig
clientSig: clientSig
}
generateKeyVersion = (i, version, firstDsa, callback) ->
ecdsa = new dcrypt.keypair.newECDSA 'secp<KEY>'
ecdh = new dcrypt.keypair.newECDSA 'secp521<KEY>'
# authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
clientSig = signClient firstDsa, "test#{i}", version, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
# authSig: authSig
clientSig: clientSig
}
sign = (priv, b1, b2) ->
random = crypto.randomBytes 16
dsaPubSig =
crypto
.createSign('sha256')
.update(b1)
.update(b2)
.update(random)
.sign(priv, 'base64')
return Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
signClient = (priv, username, version, dhPubKey) ->
vbuffer = new Buffer(4)
vbuffer.writeInt32BE(version, 0)
clientSig =
crypto
.createSign('sha256')
.update(new Buffer(username))
.update(vbuffer)
.update(new Buffer(dhPubKey))
.sign(priv, 'base64')
return clientSig
makeKeys = (i) ->
return (callback) ->
generateKey i, callback
createKeys = (number, done) ->
keys = []
for i in [0..number]
keys.push makeKeys(i)
async.parallel keys, (err, results) ->
if err?
done err
else
done null, results
describe "auth 2 tests", () ->
keys = undefined
oldKeys = []
before (done) ->
createKeys 3, (err, keyss) ->
keys = keyss
if cleanupDb
cleanup done
else
done()
describe "create user", () ->
it "should respond with 400 if username invalid", (done) ->
signup "012345678901234567890", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if username empty", (done) ->
signup '', "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if password too long", (done) ->
random = crypto.randomBytes 1025
pw = random.toString('hex')
signup "test0", pw, keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 201", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 201
done()
it "and subsequently exist", (done) ->
http.get
url: baseUri + "/users/test0/exists",
(err, res, body) ->
if err
done err
else
body.should.equal "true"
done()
# it "even if the request case is different", (done) ->
# http.get
# url: baseUri + "/users/TEST/exists",
# (err,res,body) ->
# if err
# done err
# else
# body.should.equal "true"
# done()
it "shouldn't be allowed to be created again", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 409
done()
# it "even if the request case is different", (done) ->
# signup "tEsT", "test", keys[0].ecdh.pem_pub, keys[0].ecdsa.pem_pub, keys[0].sig, done, (res, body) ->
# res.statusCode.should.equal 409
# done()
it "should be able to roll the key pair", (done) ->
http.post
url: baseUri + "/keytoken2"
json:
username: "test0"
password: "<PASSWORD>"
authSig2: keys[0].authSig
#clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 200
body.keyversion.should.equal 2
body.token.should.exist
oldKeys[0] = keys[0]
#generate new key pairs, sign with first dsa key
generateKeyVersion 0, body.keyversion, oldKeys[0].ecdsa.pem_priv, (err, nkp) ->
keys[0] = nkp
tokenSig = sign oldKeys[0].ecdsa.pem_priv, new Buffer(body.token, 'base64'), "test0"
http.post
url: baseUri + "/keys2"
json:
username: "test0"
password: "<PASSWORD>"
authSig2: oldKeys[0].authSig
dhPub: keys[0].ecdh.pem_pub
dsaPub: keys[0].ecdsa.pem_pub
keyVersion: body.keyversion
tokenSig: tokenSig
clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 201
done()
#
describe "should not be able to login with the new signature", ->
it "should return 401", (done) ->
login "test0", "test0", keys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with invalid password", ->
it "should return 401", (done) ->
login "test0", "bollocks", oldKeys[0].sig, done, (res, body) ->
res.statusCode.should.equal 401
done()
describe "login with short signature", ->
it "should return 401", (done) ->
login "test0", "test0", "martin", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with invalid signature", ->
it "should return 401", (done) ->
login "test0", "test0", "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean venenatis dictum viverra. Duis vel justo vel purus hendrerit consequat. Duis ac nisi at ante elementum faucibus in eget lorem. Morbi cursus blandit sollicitudin. Aenean tincidunt, turpis eu malesuada venenatis, urna eros sagittis augue, et vehicula quam turpis at risus. Sed ac orci a tellus semper tincidunt eget non lorem. In porta nisi eu elit porttitor pellentesque vestibulum purus luctus. Nam venenatis porta porta. Vestibulum eget orci massa. Fusce laoreet vestibulum lacus ut hendrerit. Proin ac eros enim, ac faucibus eros. Aliquam erat volutpat.",
done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with non existant user", ->
it "should return 401", (done) ->
login "your", "mama", "what kind of sig is this?", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with valid credentials", ->
it "should return 204", (done) ->
login "test0", "test0", oldKeys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 204
done()
#todo set filename explicitly
after (done) ->
if cleanupDb
cleanup done
else
done() | true | assert = require("assert")
should = require("should")
http = require("request")
redis = require("redis")
util = require("util")
crypto = require 'crypto'
dcrypt = require 'dcrypt'
async = require 'async'
fs = require("fs")
redisSentinel = require 'redis-sentinel-client'
helenus = require 'helenus'
socketPort = process.env.SURESPOT_SOCKET ? 8080
redisSentinelPort = parseInt(process.env.SURESPOT_REDIS_SENTINEL_PORT) ? 6379
redisSentinelHostname = process.env.SURESPOT_REDIS_SENTINEL_HOSTNAME ? "127.0.0.1"
dontUseSSL = process.env.SURESPOT_DONT_USE_SSL is "true"
baseUri = process.env.SURESPOT_TEST_BASEURI
cleanupDb = process.env.SURESPOT_TEST_CLEANDB is "true"
useRedisSentinel = process.env.SURESPOT_USE_REDIS_SENTINEL is "true"
pool = new helenus.ConnectionPool({host:'127.0.0.1', port:9160, keyspace:'surespot'});
rc = if useRedisSentinel then redisSentinel.createClient(redisSentinelPort, redisSentinelHostname) else redis.createClient(redisSentinelPort, redisSentinelHostname)
port = socketPort
cleanup = (done) ->
keys = [
"u:test0",
"u:test1",
"u:test2",
"f:test0",
"f:test1",
"fi:test0",
"is:test0",
"ir:test0",
"is:test1",
"ir:test1",
"m:test0",
"c:test1",
"c:test0",
"c:test2",
"kt:test0"
"kv:test0",
"k:test0",
"kv:test1",
"k:test1",
"kv:test2",
"k:test2",
"d:test2"]
multi = rc.multi()
multi.del keys
multi.hdel "mcounters", "test0:test1"
multi.hdel "ucmcounters", "test0"
multi.hdel "ucmcounters", "test1"
multi.srem "u", "test0", "test1", "test2"
multi.exec (err, results) ->
return done err if err?
pool.connect (err, keyspace) ->
return done err if err?
cql = "begin batch
delete from chatmessages where username = ?
delete from chatmessages where username = ?
delete from usercontrolmessages where username = ?
delete from usercontrolmessages where username = ?
delete from frienddata where username = ?
delete from frienddata where username = ?
apply batch"
pool.cql cql, ["test0", "test1", "test0", "test1", "test0", "test1"], (err, results) ->
if err
done err
else
done()
login = (username, password, authSig, done, callback) ->
http.post
url: baseUri + "/login"
json:
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
authSig2: authSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
signup = (username, password, keys, done, callback) ->
http.post
url: baseUri + "/users2"
json:
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
dhPub: keys.ecdh.pem_pub
dsaPub: keys.ecdsa.pem_pub
authSig2: keys.authSig
clientSig: keys.clientSig
version: 60
platform:'android'
(err, res, body) ->
if err
done err
else
callback res, body
generateKey = (i, callback) ->
ecdsa = new dcrypt.keypair.newECDSA 'PI:KEY:<KEY>END_PI'
ecdh = new dcrypt.keypair.newECDSA 'PI:KEY:<KEY>END_PI'
# random = crypto.randomBytes 16
# dsaPubSig =
# crypto
# .createSign('sha256')
# .update(new Buffer("test#{i}"))
# .update(new Buffer("test#{i}"))
# .update(random)
# .sign(ecdsa.pem_priv, 'base64')
authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
#Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
clientSig = signClient ecdsa.pem_priv, "test#{i}", 1, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
authSig: authSig
clientSig: clientSig
}
generateKeyVersion = (i, version, firstDsa, callback) ->
ecdsa = new dcrypt.keypair.newECDSA 'secpPI:KEY:<KEY>END_PI'
ecdh = new dcrypt.keypair.newECDSA 'secp521PI:KEY:<KEY>END_PI'
# authSig = sign ecdsa.pem_priv, new Buffer("test#{i}"), new Buffer("test#{i}")
clientSig = signClient firstDsa, "test#{i}", version, ecdh.pem_pub
callback null, {
ecdsa: ecdsa
ecdh: ecdh
# authSig: authSig
clientSig: clientSig
}
sign = (priv, b1, b2) ->
random = crypto.randomBytes 16
dsaPubSig =
crypto
.createSign('sha256')
.update(b1)
.update(b2)
.update(random)
.sign(priv, 'base64')
return Buffer.concat([random, new Buffer(dsaPubSig, 'base64')]).toString('base64')
signClient = (priv, username, version, dhPubKey) ->
vbuffer = new Buffer(4)
vbuffer.writeInt32BE(version, 0)
clientSig =
crypto
.createSign('sha256')
.update(new Buffer(username))
.update(vbuffer)
.update(new Buffer(dhPubKey))
.sign(priv, 'base64')
return clientSig
makeKeys = (i) ->
return (callback) ->
generateKey i, callback
createKeys = (number, done) ->
keys = []
for i in [0..number]
keys.push makeKeys(i)
async.parallel keys, (err, results) ->
if err?
done err
else
done null, results
describe "auth 2 tests", () ->
keys = undefined
oldKeys = []
before (done) ->
createKeys 3, (err, keyss) ->
keys = keyss
if cleanupDb
cleanup done
else
done()
describe "create user", () ->
it "should respond with 400 if username invalid", (done) ->
signup "012345678901234567890", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if username empty", (done) ->
signup '', "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 400 if password too long", (done) ->
random = crypto.randomBytes 1025
pw = random.toString('hex')
signup "test0", pw, keys[0], done, (res, body) ->
res.statusCode.should.equal 400
done()
it "should respond with 201", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 201
done()
it "and subsequently exist", (done) ->
http.get
url: baseUri + "/users/test0/exists",
(err, res, body) ->
if err
done err
else
body.should.equal "true"
done()
# it "even if the request case is different", (done) ->
# http.get
# url: baseUri + "/users/TEST/exists",
# (err,res,body) ->
# if err
# done err
# else
# body.should.equal "true"
# done()
it "shouldn't be allowed to be created again", (done) ->
signup "test0", "test0", keys[0], done, (res, body) ->
res.statusCode.should.equal 409
done()
# it "even if the request case is different", (done) ->
# signup "tEsT", "test", keys[0].ecdh.pem_pub, keys[0].ecdsa.pem_pub, keys[0].sig, done, (res, body) ->
# res.statusCode.should.equal 409
# done()
it "should be able to roll the key pair", (done) ->
http.post
url: baseUri + "/keytoken2"
json:
username: "test0"
password: "PI:PASSWORD:<PASSWORD>END_PI"
authSig2: keys[0].authSig
#clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 200
body.keyversion.should.equal 2
body.token.should.exist
oldKeys[0] = keys[0]
#generate new key pairs, sign with first dsa key
generateKeyVersion 0, body.keyversion, oldKeys[0].ecdsa.pem_priv, (err, nkp) ->
keys[0] = nkp
tokenSig = sign oldKeys[0].ecdsa.pem_priv, new Buffer(body.token, 'base64'), "test0"
http.post
url: baseUri + "/keys2"
json:
username: "test0"
password: "PI:PASSWORD:<PASSWORD>END_PI"
authSig2: oldKeys[0].authSig
dhPub: keys[0].ecdh.pem_pub
dsaPub: keys[0].ecdsa.pem_pub
keyVersion: body.keyversion
tokenSig: tokenSig
clientSig: keys[0].clientSig
(err, res, body) ->
if err
done err
else
res.statusCode.should.equal 201
done()
#
describe "should not be able to login with the new signature", ->
it "should return 401", (done) ->
login "test0", "test0", keys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with invalid password", ->
it "should return 401", (done) ->
login "test0", "bollocks", oldKeys[0].sig, done, (res, body) ->
res.statusCode.should.equal 401
done()
describe "login with short signature", ->
it "should return 401", (done) ->
login "test0", "test0", "martin", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with invalid signature", ->
it "should return 401", (done) ->
login "test0", "test0", "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean venenatis dictum viverra. Duis vel justo vel purus hendrerit consequat. Duis ac nisi at ante elementum faucibus in eget lorem. Morbi cursus blandit sollicitudin. Aenean tincidunt, turpis eu malesuada venenatis, urna eros sagittis augue, et vehicula quam turpis at risus. Sed ac orci a tellus semper tincidunt eget non lorem. In porta nisi eu elit porttitor pellentesque vestibulum purus luctus. Nam venenatis porta porta. Vestibulum eget orci massa. Fusce laoreet vestibulum lacus ut hendrerit. Proin ac eros enim, ac faucibus eros. Aliquam erat volutpat.",
done, (res, body) ->
res.statusCode.should.equal 401
done()
#
describe "login with non existant user", ->
it "should return 401", (done) ->
login "your", "mama", "what kind of sig is this?", done, (res, body) ->
res.statusCode.should.equal 401
done()
#
#
describe "login with valid credentials", ->
it "should return 204", (done) ->
login "test0", "test0", oldKeys[0].authSig, done, (res, body) ->
res.statusCode.should.equal 204
done()
#todo set filename explicitly
after (done) ->
if cleanupDb
cleanup done
else
done() |
[
{
"context": ".inputs\n\n options =\n password : password.getValue()\n disable : yes\n\n @handleProcessOf2",
"end": 2707,
"score": 0.8594573140144348,
"start": 2690,
"tag": "PASSWORD",
"value": "password.getValue"
},
{
"context": " key : @_activeKey\n password : password.getValue()\n verification : tfcode.getValue()\n\n @ha",
"end": 3956,
"score": 0.8705041408538818,
"start": 3939,
"tag": "PASSWORD",
"value": "password.getValue"
}
] | client/home/lib/account/homeaccountsecurityview.coffee | lionheart1022/koding | 0 | kd = require 'kd'
whoami = require 'app/util/whoami'
CustomLinkView = require 'app/customlinkview'
module.exports = class HomeAccountSecurityView extends kd.CustomHTMLView
constructor: (options = {}, data) ->
options.cssClass = kd.utils.curry \
'AppModal--account tfauth', options.cssClass
super options, data
viewAppended: ->
@buildInitialView()
showError: (err) ->
return unless err
console.warn err
new kd.NotificationView
type : 'mini'
title : err.message
cssClass : 'error'
return err
buildInitialView: ->
@destroySubViews()
@addSubView loader = @getLoaderView()
kd.singletons.mainController.ready =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
loader.hide()
if err
if err.name is 'ALREADY_INUSE'
@addSubView @getEnabledView()
return
return @showError err
{ key, qrcode } = authInfo
@_activeKey = key
instructionsView = @getInstructionsView()
instructionsView.addSubView
@addSubView @getFormView()
@enableForm.addSubView @getQrCodeView qrcode
@addSubView instructionsView
getEnabledView: ->
@addSubView new kd.CustomHTMLView
cssClass : 'enabled-intro'
partial : "
<div>
<p class='status'><strong>ACTIVE</strong> Your 2-factor status</p>
<p class='info'>To change your code generator you must first disable the current authentication.</p>
</div>
<div>
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
</div>
"
@setHeight 240
@setClass 'enabled2Factor'
@addSubView @disableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form enabled-form'
fields :
password :
cssClass : 'Formline--half'
placeholder : 'Enter your Koding password...'
name : 'password'
type : 'password'
label : 'Password'
button :
type : 'submit'
label : ' '
cssClass : 'Formline--half'
itemClass : kd.ButtonView
title : 'DISABLE 2-FACTOR'
style : 'GenericButton disable-tf'
callback : @bound 'handleDisableFormButton'
handleDisableFormButton: ->
{ password } = @disableForm.inputs
options =
password : password.getValue()
disable : yes
@handleProcessOf2FactorAuth options, 'Successfully Disabled!'
handleProcessOf2FactorAuth: (options, message) ->
me = whoami()
me.setup2FactorAuth options, (err) =>
return if @showError err
new kd.NotificationView
title : message
type : 'mini'
@buildInitialView()
@unsetClass 'enabled2Factor'
@setHeight 360
getFormView: ->
@addSubView @enableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form'
fields :
tfcode :
name : 'tfcode'
label : 'Verification Code'
password :
name : 'password'
type : 'password'
label : 'Password'
buttons :
Enable :
type : 'submit'
title : 'ENABLE 2-FACTOR AUTH'
style : 'GenericButton enable-tf'
callback : @bound 'handleEnableFormButton'
handleEnableFormButton: ->
{ password, tfcode } = @enableForm.inputs
options =
key : @_activeKey
password : password.getValue()
verification : tfcode.getValue()
@handleProcessOf2FactorAuth options, 'Successfully Enabled!'
getQrCodeView: (url) ->
view = new kd.CustomHTMLView
cssClass : 'qrcode-view'
view.addSubView new kd.CustomHTMLView
tagName : 'label'
cssClass : 'qrcode-label'
partial : 'QR Code'
view.addSubView @imageView = new kd.CustomHTMLView
tagName : 'img'
attributes : { src : url }
view.addSubView @getRenewQRCodeLink()
return view
getRenewQRCodeLink: ->
new CustomLinkView
title : 'Renew QR Code'
cssClass : 'HomeAppView--link primary refresh'
click : =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
@showError err
if authInfo
@_activeKey = authInfo.key
@imageView.setAttribute 'src', authInfo.qrcode
kd.utils.defer button.bound 'hideLoader'
getLoaderView: ->
new kd.LoaderView
cssClass : 'main-loader'
showLoader : yes
size :
width : 25
height : 25
getLearnLink: ->
"
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
"
getInstructionsView: ->
new kd.CustomHTMLView
cssClass : 'instructions'
partial : """
<div class='intro'>
Use your Keychain or Authenticator App to generate a 6-digit
Verification Code by scanning this QR: <br />
</div>
<ul>
<li>Open Keychain or App</li>
<li>Scan QR code</li>
<li>Enter Verification Code & account password</li>
</ul>
#{@getLearnLink()}
"""
| 54335 | kd = require 'kd'
whoami = require 'app/util/whoami'
CustomLinkView = require 'app/customlinkview'
module.exports = class HomeAccountSecurityView extends kd.CustomHTMLView
constructor: (options = {}, data) ->
options.cssClass = kd.utils.curry \
'AppModal--account tfauth', options.cssClass
super options, data
viewAppended: ->
@buildInitialView()
showError: (err) ->
return unless err
console.warn err
new kd.NotificationView
type : 'mini'
title : err.message
cssClass : 'error'
return err
buildInitialView: ->
@destroySubViews()
@addSubView loader = @getLoaderView()
kd.singletons.mainController.ready =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
loader.hide()
if err
if err.name is 'ALREADY_INUSE'
@addSubView @getEnabledView()
return
return @showError err
{ key, qrcode } = authInfo
@_activeKey = key
instructionsView = @getInstructionsView()
instructionsView.addSubView
@addSubView @getFormView()
@enableForm.addSubView @getQrCodeView qrcode
@addSubView instructionsView
getEnabledView: ->
@addSubView new kd.CustomHTMLView
cssClass : 'enabled-intro'
partial : "
<div>
<p class='status'><strong>ACTIVE</strong> Your 2-factor status</p>
<p class='info'>To change your code generator you must first disable the current authentication.</p>
</div>
<div>
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
</div>
"
@setHeight 240
@setClass 'enabled2Factor'
@addSubView @disableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form enabled-form'
fields :
password :
cssClass : 'Formline--half'
placeholder : 'Enter your Koding password...'
name : 'password'
type : 'password'
label : 'Password'
button :
type : 'submit'
label : ' '
cssClass : 'Formline--half'
itemClass : kd.ButtonView
title : 'DISABLE 2-FACTOR'
style : 'GenericButton disable-tf'
callback : @bound 'handleDisableFormButton'
handleDisableFormButton: ->
{ password } = @disableForm.inputs
options =
password : <PASSWORD>()
disable : yes
@handleProcessOf2FactorAuth options, 'Successfully Disabled!'
handleProcessOf2FactorAuth: (options, message) ->
me = whoami()
me.setup2FactorAuth options, (err) =>
return if @showError err
new kd.NotificationView
title : message
type : 'mini'
@buildInitialView()
@unsetClass 'enabled2Factor'
@setHeight 360
getFormView: ->
@addSubView @enableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form'
fields :
tfcode :
name : 'tfcode'
label : 'Verification Code'
password :
name : 'password'
type : 'password'
label : 'Password'
buttons :
Enable :
type : 'submit'
title : 'ENABLE 2-FACTOR AUTH'
style : 'GenericButton enable-tf'
callback : @bound 'handleEnableFormButton'
handleEnableFormButton: ->
{ password, tfcode } = @enableForm.inputs
options =
key : @_activeKey
password : <PASSWORD>()
verification : tfcode.getValue()
@handleProcessOf2FactorAuth options, 'Successfully Enabled!'
getQrCodeView: (url) ->
view = new kd.CustomHTMLView
cssClass : 'qrcode-view'
view.addSubView new kd.CustomHTMLView
tagName : 'label'
cssClass : 'qrcode-label'
partial : 'QR Code'
view.addSubView @imageView = new kd.CustomHTMLView
tagName : 'img'
attributes : { src : url }
view.addSubView @getRenewQRCodeLink()
return view
getRenewQRCodeLink: ->
new CustomLinkView
title : 'Renew QR Code'
cssClass : 'HomeAppView--link primary refresh'
click : =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
@showError err
if authInfo
@_activeKey = authInfo.key
@imageView.setAttribute 'src', authInfo.qrcode
kd.utils.defer button.bound 'hideLoader'
getLoaderView: ->
new kd.LoaderView
cssClass : 'main-loader'
showLoader : yes
size :
width : 25
height : 25
getLearnLink: ->
"
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
"
getInstructionsView: ->
new kd.CustomHTMLView
cssClass : 'instructions'
partial : """
<div class='intro'>
Use your Keychain or Authenticator App to generate a 6-digit
Verification Code by scanning this QR: <br />
</div>
<ul>
<li>Open Keychain or App</li>
<li>Scan QR code</li>
<li>Enter Verification Code & account password</li>
</ul>
#{@getLearnLink()}
"""
| true | kd = require 'kd'
whoami = require 'app/util/whoami'
CustomLinkView = require 'app/customlinkview'
module.exports = class HomeAccountSecurityView extends kd.CustomHTMLView
constructor: (options = {}, data) ->
options.cssClass = kd.utils.curry \
'AppModal--account tfauth', options.cssClass
super options, data
viewAppended: ->
@buildInitialView()
showError: (err) ->
return unless err
console.warn err
new kd.NotificationView
type : 'mini'
title : err.message
cssClass : 'error'
return err
buildInitialView: ->
@destroySubViews()
@addSubView loader = @getLoaderView()
kd.singletons.mainController.ready =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
loader.hide()
if err
if err.name is 'ALREADY_INUSE'
@addSubView @getEnabledView()
return
return @showError err
{ key, qrcode } = authInfo
@_activeKey = key
instructionsView = @getInstructionsView()
instructionsView.addSubView
@addSubView @getFormView()
@enableForm.addSubView @getQrCodeView qrcode
@addSubView instructionsView
getEnabledView: ->
@addSubView new kd.CustomHTMLView
cssClass : 'enabled-intro'
partial : "
<div>
<p class='status'><strong>ACTIVE</strong> Your 2-factor status</p>
<p class='info'>To change your code generator you must first disable the current authentication.</p>
</div>
<div>
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
</div>
"
@setHeight 240
@setClass 'enabled2Factor'
@addSubView @disableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form enabled-form'
fields :
password :
cssClass : 'Formline--half'
placeholder : 'Enter your Koding password...'
name : 'password'
type : 'password'
label : 'Password'
button :
type : 'submit'
label : ' '
cssClass : 'Formline--half'
itemClass : kd.ButtonView
title : 'DISABLE 2-FACTOR'
style : 'GenericButton disable-tf'
callback : @bound 'handleDisableFormButton'
handleDisableFormButton: ->
{ password } = @disableForm.inputs
options =
password : PI:PASSWORD:<PASSWORD>END_PI()
disable : yes
@handleProcessOf2FactorAuth options, 'Successfully Disabled!'
handleProcessOf2FactorAuth: (options, message) ->
me = whoami()
me.setup2FactorAuth options, (err) =>
return if @showError err
new kd.NotificationView
title : message
type : 'mini'
@buildInitialView()
@unsetClass 'enabled2Factor'
@setHeight 360
getFormView: ->
@addSubView @enableForm = new kd.FormViewWithFields
cssClass : 'AppModal-form'
fields :
tfcode :
name : 'tfcode'
label : 'Verification Code'
password :
name : 'password'
type : 'password'
label : 'Password'
buttons :
Enable :
type : 'submit'
title : 'ENABLE 2-FACTOR AUTH'
style : 'GenericButton enable-tf'
callback : @bound 'handleEnableFormButton'
handleEnableFormButton: ->
{ password, tfcode } = @enableForm.inputs
options =
key : @_activeKey
password : PI:PASSWORD:<PASSWORD>END_PI()
verification : tfcode.getValue()
@handleProcessOf2FactorAuth options, 'Successfully Enabled!'
getQrCodeView: (url) ->
view = new kd.CustomHTMLView
cssClass : 'qrcode-view'
view.addSubView new kd.CustomHTMLView
tagName : 'label'
cssClass : 'qrcode-label'
partial : 'QR Code'
view.addSubView @imageView = new kd.CustomHTMLView
tagName : 'img'
attributes : { src : url }
view.addSubView @getRenewQRCodeLink()
return view
getRenewQRCodeLink: ->
new CustomLinkView
title : 'Renew QR Code'
cssClass : 'HomeAppView--link primary refresh'
click : =>
me = whoami()
me.generate2FactorAuthKey (err, authInfo) =>
@showError err
if authInfo
@_activeKey = authInfo.key
@imageView.setAttribute 'src', authInfo.qrcode
kd.utils.defer button.bound 'hideLoader'
getLoaderView: ->
new kd.LoaderView
cssClass : 'main-loader'
showLoader : yes
size :
width : 25
height : 25
getLearnLink: ->
"
<a class='learn-link HomeAppView--link primary' href='https://koding.com/docs/2-factor-auth/' target=_blank>
LEARN MORE</a>
"
getInstructionsView: ->
new kd.CustomHTMLView
cssClass : 'instructions'
partial : """
<div class='intro'>
Use your Keychain or Authenticator App to generate a 6-digit
Verification Code by scanning this QR: <br />
</div>
<ul>
<li>Open Keychain or App</li>
<li>Scan QR code</li>
<li>Enter Verification Code & account password</li>
</ul>
#{@getLearnLink()}
"""
|
[
{
"context": "name = \"World\"\nconsole.log \"Hello\"+name+\" Welcome to CoffeeScri",
"end": 13,
"score": 0.9987039566040039,
"start": 8,
"tag": "NAME",
"value": "World"
}
] | src/HelloCoffee.coffee | RayMiles94/CoffeeScriptests | 0 | name = "World"
console.log "Hello"+name+" Welcome to CoffeeScript" | 102502 | name = "<NAME>"
console.log "Hello"+name+" Welcome to CoffeeScript" | true | name = "PI:NAME:<NAME>END_PI"
console.log "Hello"+name+" Welcome to CoffeeScript" |
[
{
"context": "us: 422, error: '这个电话号码已经被注册了'}\n password = bbCrypto.encodePassword password\n\n docs = {phone_nu",
"end": 1024,
"score": 0.7098840475082397,
"start": 1016,
"tag": "PASSWORD",
"value": "bbCrypto"
},
{
"context": "经被注册了'}\n password = bbCrypto.encodePassword password\n\n docs = {phone_number, password, jpush_re",
"end": 1048,
"score": 0.6380863785743713,
"start": 1040,
"tag": "PASSWORD",
"value": "password"
}
] | controller/sign.coffee | icarusysuper/imageDistribution | 0 | proxy = require '../proxy'
User = proxy.User
Album = proxy.Album
validator = require 'validator'
Eventproxy = require 'eventproxy'
bbCrypto = require '../helper/bbCrypto'
userMiddleware = require './user'
exports.signup = (req, res, next)->
phone_number = validator.trim(req.body.phone_number) || ''
password = validator.trim(req.body.password) || ''
re_password = validator.trim(req.body.re_password) || ''
relation = validator.trim(req.body.relation) || ''
jpush_registration_id = req.body.jpush_registration_id
if ([phone_number, password, re_password, relation].some (value)-> value is '')
return next {status: 422, error: '信息不完整'}
if password isnt re_password
return next {status: 422, error: '两次输入的密码不一致'}
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if user then return next {status: 422, error: '这个电话号码已经被注册了'}
password = bbCrypto.encodePassword password
docs = {phone_number, password, jpush_registration_id}
User.insert docs, ep.done('newUserDone')
ep.once 'newUserDone', (user)->
docs =
followers: {}
docs.followers[relation] =
id: user._id.toHexString()
phone_number: user.phone_number
is_creator: true
Album.insert docs, ep.done('newAlbumDone', (album)-> {album, user})
ep.once 'newAlbumDone', (data)->
data.user.album =
id: data.album._id.toHexString()
relation: relation
data.user.save ep.doneLater('updateUserDone')
ep.once 'updateUserDone', (user)->
res.json
flag: true
exports.signin = (req, res, next)->
phone_number = validator.trim(req.body.phone_number)
password = bbCrypto.encodePassword validator.trim(req.body.password)
jpush_registration_id = req.body.jpush_registration_id
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if not user then return next {error: '没有这个用户'}
if password isnt user.password then return next {error: '密码错误'}
if jpush_registration_id and jpush_registration_id isnt user.jpush_registration_id
user.jpush_registration_id = jpush_registration_id
user.save ep.doneLater('authDone')
else ep.emitLater('authDone', user)
ep.once 'authDone', (user)->
req.session.user = user
ep.emitLater 'loadUser', user
userMiddleware.loadAlbumData user.album.id, ep.done('loadAlbum')
userMiddleware.loadPhotosDataByAlbumId user.album.id, ep.done('loadPhotos')
ep.all 'loadUser', 'loadAlbum', 'loadPhotos', (user, album, photos)->
res.json
flag: true
session_id: req.sessionID
user: User.getUserPersonalInfo user
album: album
photos: photos
exports.signout = (req, res, next)->
req.session.destroy (err)->
if err then return next err
console.log 'session destroy'
res.json {flag: true}
# 第三方登录
| 139265 | proxy = require '../proxy'
User = proxy.User
Album = proxy.Album
validator = require 'validator'
Eventproxy = require 'eventproxy'
bbCrypto = require '../helper/bbCrypto'
userMiddleware = require './user'
exports.signup = (req, res, next)->
phone_number = validator.trim(req.body.phone_number) || ''
password = validator.trim(req.body.password) || ''
re_password = validator.trim(req.body.re_password) || ''
relation = validator.trim(req.body.relation) || ''
jpush_registration_id = req.body.jpush_registration_id
if ([phone_number, password, re_password, relation].some (value)-> value is '')
return next {status: 422, error: '信息不完整'}
if password isnt re_password
return next {status: 422, error: '两次输入的密码不一致'}
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if user then return next {status: 422, error: '这个电话号码已经被注册了'}
password = <PASSWORD>.encodePassword <PASSWORD>
docs = {phone_number, password, jpush_registration_id}
User.insert docs, ep.done('newUserDone')
ep.once 'newUserDone', (user)->
docs =
followers: {}
docs.followers[relation] =
id: user._id.toHexString()
phone_number: user.phone_number
is_creator: true
Album.insert docs, ep.done('newAlbumDone', (album)-> {album, user})
ep.once 'newAlbumDone', (data)->
data.user.album =
id: data.album._id.toHexString()
relation: relation
data.user.save ep.doneLater('updateUserDone')
ep.once 'updateUserDone', (user)->
res.json
flag: true
exports.signin = (req, res, next)->
phone_number = validator.trim(req.body.phone_number)
password = bbCrypto.encodePassword validator.trim(req.body.password)
jpush_registration_id = req.body.jpush_registration_id
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if not user then return next {error: '没有这个用户'}
if password isnt user.password then return next {error: '密码错误'}
if jpush_registration_id and jpush_registration_id isnt user.jpush_registration_id
user.jpush_registration_id = jpush_registration_id
user.save ep.doneLater('authDone')
else ep.emitLater('authDone', user)
ep.once 'authDone', (user)->
req.session.user = user
ep.emitLater 'loadUser', user
userMiddleware.loadAlbumData user.album.id, ep.done('loadAlbum')
userMiddleware.loadPhotosDataByAlbumId user.album.id, ep.done('loadPhotos')
ep.all 'loadUser', 'loadAlbum', 'loadPhotos', (user, album, photos)->
res.json
flag: true
session_id: req.sessionID
user: User.getUserPersonalInfo user
album: album
photos: photos
exports.signout = (req, res, next)->
req.session.destroy (err)->
if err then return next err
console.log 'session destroy'
res.json {flag: true}
# 第三方登录
| true | proxy = require '../proxy'
User = proxy.User
Album = proxy.Album
validator = require 'validator'
Eventproxy = require 'eventproxy'
bbCrypto = require '../helper/bbCrypto'
userMiddleware = require './user'
exports.signup = (req, res, next)->
phone_number = validator.trim(req.body.phone_number) || ''
password = validator.trim(req.body.password) || ''
re_password = validator.trim(req.body.re_password) || ''
relation = validator.trim(req.body.relation) || ''
jpush_registration_id = req.body.jpush_registration_id
if ([phone_number, password, re_password, relation].some (value)-> value is '')
return next {status: 422, error: '信息不完整'}
if password isnt re_password
return next {status: 422, error: '两次输入的密码不一致'}
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if user then return next {status: 422, error: '这个电话号码已经被注册了'}
password = PI:PASSWORD:<PASSWORD>END_PI.encodePassword PI:PASSWORD:<PASSWORD>END_PI
docs = {phone_number, password, jpush_registration_id}
User.insert docs, ep.done('newUserDone')
ep.once 'newUserDone', (user)->
docs =
followers: {}
docs.followers[relation] =
id: user._id.toHexString()
phone_number: user.phone_number
is_creator: true
Album.insert docs, ep.done('newAlbumDone', (album)-> {album, user})
ep.once 'newAlbumDone', (data)->
data.user.album =
id: data.album._id.toHexString()
relation: relation
data.user.save ep.doneLater('updateUserDone')
ep.once 'updateUserDone', (user)->
res.json
flag: true
exports.signin = (req, res, next)->
phone_number = validator.trim(req.body.phone_number)
password = bbCrypto.encodePassword validator.trim(req.body.password)
jpush_registration_id = req.body.jpush_registration_id
ep = new Eventproxy()
ep.fail next
User.findOne {phone_number}, ep.done('gotUser')
ep.once 'gotUser', (user)->
if not user then return next {error: '没有这个用户'}
if password isnt user.password then return next {error: '密码错误'}
if jpush_registration_id and jpush_registration_id isnt user.jpush_registration_id
user.jpush_registration_id = jpush_registration_id
user.save ep.doneLater('authDone')
else ep.emitLater('authDone', user)
ep.once 'authDone', (user)->
req.session.user = user
ep.emitLater 'loadUser', user
userMiddleware.loadAlbumData user.album.id, ep.done('loadAlbum')
userMiddleware.loadPhotosDataByAlbumId user.album.id, ep.done('loadPhotos')
ep.all 'loadUser', 'loadAlbum', 'loadPhotos', (user, album, photos)->
res.json
flag: true
session_id: req.sessionID
user: User.getUserPersonalInfo user
album: album
photos: photos
exports.signout = (req, res, next)->
req.session.destroy (err)->
if err then return next err
console.log 'session destroy'
res.json {flag: true}
# 第三方登录
|
[
{
"context": "d = 0\n\npeople = [\n\t{\"id\": \"#{nextId++}\", \"name\": \"Saasha\", \"age\": \"5\"}\n\t{\"id\": \"#{nextId++}\", \"name\": \"Pla",
"end": 215,
"score": 0.9997945427894592,
"start": 209,
"tag": "NAME",
"value": "Saasha"
},
{
"context": "sha\", \"age\": \"5\"}\n\t{\"id\": \"#{nextId++}\", \"name\": \"Planet\", \"age\": \"7\"}\n]\n\nisUniqueName = (name) ->\n\t(name ",
"end": 268,
"score": 0.9555016756057739,
"start": 262,
"tag": "NAME",
"value": "Planet"
},
{
"context": "name\n\n\t\tperson =\n\t\t\t\"id\": \"#{nextId++}\"\n\t\t\t\"name\": \"#{name}\"\n\t\t\t\"age\": \"#{age}\"\n\n\t\tpeople.push person\n\t\t",
"end": 717,
"score": 0.8235641717910767,
"start": 717,
"tag": "NAME",
"value": ""
},
{
"context": "\n\n\t\tperson =\n\t\t\t\"id\": \"#{nextId++}\"\n\t\t\t\"name\": \"#{name}\"\n\t\t\t\"age\": \"#{age}\"\n\n\t\tpeople.push person\n\t\tres.",
"end": 725,
"score": 0.5131711363792419,
"start": 721,
"tag": "NAME",
"value": "name"
}
] | routes.coffee | gmp26/Apps1 | 6 | module.exports = (app, dir) ->
app.get '/', (req, res) ->
res.render "#{dir}/index.html"
###
#
# Original AngularFun app server - useful reference
#
nextId = 0
people = [
{"id": "#{nextId++}", "name": "Saasha", "age": "5"}
{"id": "#{nextId++}", "name": "Planet", "age": "7"}
]
isUniqueName = (name) ->
(name for person in people when person.name is name).length is 0
app.get '/people', (req, res) ->
res.json people
app.post '/people', (req, res) ->
name = req.body.name
age = req.body.age
message =
"title": "Duplicate!"
"message": "#{name} is a duplicate. Please enter a new name."
return res.send(message, 403) if not isUniqueName name
person =
"id": "#{nextId++}"
"name": "#{name}"
"age": "#{age}"
people.push person
res.json person
app.get '/people/:id', (req, res) ->
id = req.params.id
current = person for person in people when parseInt(person.id, 10) is parseInt(id, 10)
res.json current
### | 46358 | module.exports = (app, dir) ->
app.get '/', (req, res) ->
res.render "#{dir}/index.html"
###
#
# Original AngularFun app server - useful reference
#
nextId = 0
people = [
{"id": "#{nextId++}", "name": "<NAME>", "age": "5"}
{"id": "#{nextId++}", "name": "<NAME>", "age": "7"}
]
isUniqueName = (name) ->
(name for person in people when person.name is name).length is 0
app.get '/people', (req, res) ->
res.json people
app.post '/people', (req, res) ->
name = req.body.name
age = req.body.age
message =
"title": "Duplicate!"
"message": "#{name} is a duplicate. Please enter a new name."
return res.send(message, 403) if not isUniqueName name
person =
"id": "#{nextId++}"
"name":<NAME> "#{<NAME>}"
"age": "#{age}"
people.push person
res.json person
app.get '/people/:id', (req, res) ->
id = req.params.id
current = person for person in people when parseInt(person.id, 10) is parseInt(id, 10)
res.json current
### | true | module.exports = (app, dir) ->
app.get '/', (req, res) ->
res.render "#{dir}/index.html"
###
#
# Original AngularFun app server - useful reference
#
nextId = 0
people = [
{"id": "#{nextId++}", "name": "PI:NAME:<NAME>END_PI", "age": "5"}
{"id": "#{nextId++}", "name": "PI:NAME:<NAME>END_PI", "age": "7"}
]
isUniqueName = (name) ->
(name for person in people when person.name is name).length is 0
app.get '/people', (req, res) ->
res.json people
app.post '/people', (req, res) ->
name = req.body.name
age = req.body.age
message =
"title": "Duplicate!"
"message": "#{name} is a duplicate. Please enter a new name."
return res.send(message, 403) if not isUniqueName name
person =
"id": "#{nextId++}"
"name":PI:NAME:<NAME>END_PI "#{PI:NAME:<NAME>END_PI}"
"age": "#{age}"
people.push person
res.json person
app.get '/people/:id', (req, res) ->
id = req.params.id
current = person for person in people when parseInt(person.id, 10) is parseInt(id, 10)
res.json current
### |
[
{
"context": " ->\n\n beforeEach ->\n @h = R.h({\n plato: 'greek',\n witgenstein: ['austrian', 'british'],\n ",
"end": 79,
"score": 0.9526156783103943,
"start": 74,
"tag": "NAME",
"value": "greek"
},
{
"context": "Each ->\n @h = R.h({\n plato: 'greek',\n witgenstein: ['austrian', 'british'],\n russell: 'we",
"end": 94,
"score": 0.7786096930503845,
"start": 88,
"tag": "NAME",
"value": "witgen"
},
{
"context": " R.h({\n plato: 'greek',\n witgenstein: ['austrian', 'british'],\n russell: 'welsh'\n })\n\n",
"end": 107,
"score": 0.6253781318664551,
"start": 103,
"tag": "NAME",
"value": "aust"
},
{
"context": "\n witgenstein: ['austrian', 'british'],\n russell: 'welsh'\n })\n\n it \"returns an Array\", ->\n ",
"end": 139,
"score": 0.9190279245376587,
"start": 132,
"tag": "NAME",
"value": "russell"
},
{
"context": "enstein: ['austrian', 'british'],\n russell: 'welsh'\n })\n\n it \"returns an Array\", ->\n expect( ",
"end": 147,
"score": 0.9841732382774353,
"start": 142,
"tag": "NAME",
"value": "welsh"
},
{
"context": "flatten().to_native()\n expect( a[0] ).toEqual 'plato'\n expect( a[2] ).toEqual 'witgenstein'\n exp",
"end": 473,
"score": 0.7662695646286011,
"start": 468,
"tag": "NAME",
"value": "plato"
},
{
"context": "[0] ).toEqual 'plato'\n expect( a[2] ).toEqual 'witgenstein'\n expect( a[4] ).toEqual 'russell'\n\n it \"sets",
"end": 514,
"score": 0.9769759178161621,
"start": 503,
"tag": "NAME",
"value": "witgenstein"
},
{
"context": "toEqual 'witgenstein'\n expect( a[4] ).toEqual 'russell'\n\n it \"sets each odd index of the Array to the v",
"end": 551,
"score": 0.9773690700531006,
"start": 544,
"tag": "NAME",
"value": "russell"
},
{
"context": "flatten().to_native()\n expect( a[1] ).toEqual 'greek'\n expect( a[3] ).toEqual ['austrian', 'british",
"end": 715,
"score": 0.9638020396232605,
"start": 710,
"tag": "NAME",
"value": "greek"
},
{
"context": "es when called without arguments\", ->\n @h.set('russell', {born: 'wales', influenced_by: 'mill' })\n a ",
"end": 1073,
"score": 0.9542744159698486,
"start": 1066,
"tag": "NAME",
"value": "russell"
},
{
"context": " Array values to the given depth\", ->\n @h.set('russell', [['born', 'wales'], ['influenced_by', 'mill']])",
"end": 1564,
"score": 0.719933807849884,
"start": 1557,
"tag": "NAME",
"value": "russell"
}
] | spec/lib/corelib/hash/flatten_spec.coffee | hasclass/core-lib | 1 | describe "Hash#flatten", ->
beforeEach ->
@h = R.h({
plato: 'greek',
witgenstein: ['austrian', 'british'],
russell: 'welsh'
})
it "returns an Array", ->
expect( R.h({}).flatten() ).toBeInstanceOf(R.Array)
it "returns an empty Array for an empty Hash", ->
expect( R.h({}).flatten() ).toEqual R([])
it "sets each even index of the Array to a key of the Hash", ->
a = @h.flatten().to_native()
expect( a[0] ).toEqual 'plato'
expect( a[2] ).toEqual 'witgenstein'
expect( a[4] ).toEqual 'russell'
it "sets each odd index of the Array to the value corresponding to the previous element", ->
a = @h.flatten().to_native()
expect( a[1] ).toEqual 'greek'
expect( a[3] ).toEqual ['austrian', 'british']
expect( a[5] ).toEqual 'welsh'
it "does not recursively flatten Array values when called without arguments", ->
a = @h.flatten().to_native()
expect( a[3] ).toEqual ['austrian', 'british']
it "does not recursively flatten Hash values when called without arguments", ->
@h.set('russell', {born: 'wales', influenced_by: 'mill' })
a = @h.flatten().to_native()
expect( a[5] ).toNotEqual ['born', 'wales', 'influenced_by', 'mill' ]
expect( a[5] ).toEqual {born: 'wales', influenced_by: 'mill' }
it "recursively flattens Array values when called with an argument >= 2", ->
a = @h.flatten(2).to_native()
expect( a[3] ).toEqual 'austrian'
expect( a[4] ).toEqual 'british'
it "recursively flattens Array values to the given depth", ->
@h.set('russell', [['born', 'wales'], ['influenced_by', 'mill']])
a = @h.flatten(2).to_native()
expect( a[6] ).toEqual ['born', 'wales']
expect( a[7] ).toEqual ['influenced_by', 'mill']
# it "raises an TypeError if given a non-Integer argument", ->
# lambda do
# @h.flatten(Object.new)
# end.should raise_error(TypeError)
| 187395 | describe "Hash#flatten", ->
beforeEach ->
@h = R.h({
plato: '<NAME>',
<NAME>stein: ['<NAME>rian', 'british'],
<NAME>: '<NAME>'
})
it "returns an Array", ->
expect( R.h({}).flatten() ).toBeInstanceOf(R.Array)
it "returns an empty Array for an empty Hash", ->
expect( R.h({}).flatten() ).toEqual R([])
it "sets each even index of the Array to a key of the Hash", ->
a = @h.flatten().to_native()
expect( a[0] ).toEqual '<NAME>'
expect( a[2] ).toEqual '<NAME>'
expect( a[4] ).toEqual '<NAME>'
it "sets each odd index of the Array to the value corresponding to the previous element", ->
a = @h.flatten().to_native()
expect( a[1] ).toEqual '<NAME>'
expect( a[3] ).toEqual ['austrian', 'british']
expect( a[5] ).toEqual 'welsh'
it "does not recursively flatten Array values when called without arguments", ->
a = @h.flatten().to_native()
expect( a[3] ).toEqual ['austrian', 'british']
it "does not recursively flatten Hash values when called without arguments", ->
@h.set('<NAME>', {born: 'wales', influenced_by: 'mill' })
a = @h.flatten().to_native()
expect( a[5] ).toNotEqual ['born', 'wales', 'influenced_by', 'mill' ]
expect( a[5] ).toEqual {born: 'wales', influenced_by: 'mill' }
it "recursively flattens Array values when called with an argument >= 2", ->
a = @h.flatten(2).to_native()
expect( a[3] ).toEqual 'austrian'
expect( a[4] ).toEqual 'british'
it "recursively flattens Array values to the given depth", ->
@h.set('<NAME>', [['born', 'wales'], ['influenced_by', 'mill']])
a = @h.flatten(2).to_native()
expect( a[6] ).toEqual ['born', 'wales']
expect( a[7] ).toEqual ['influenced_by', 'mill']
# it "raises an TypeError if given a non-Integer argument", ->
# lambda do
# @h.flatten(Object.new)
# end.should raise_error(TypeError)
| true | describe "Hash#flatten", ->
beforeEach ->
@h = R.h({
plato: 'PI:NAME:<NAME>END_PI',
PI:NAME:<NAME>END_PIstein: ['PI:NAME:<NAME>END_PIrian', 'british'],
PI:NAME:<NAME>END_PI: 'PI:NAME:<NAME>END_PI'
})
it "returns an Array", ->
expect( R.h({}).flatten() ).toBeInstanceOf(R.Array)
it "returns an empty Array for an empty Hash", ->
expect( R.h({}).flatten() ).toEqual R([])
it "sets each even index of the Array to a key of the Hash", ->
a = @h.flatten().to_native()
expect( a[0] ).toEqual 'PI:NAME:<NAME>END_PI'
expect( a[2] ).toEqual 'PI:NAME:<NAME>END_PI'
expect( a[4] ).toEqual 'PI:NAME:<NAME>END_PI'
it "sets each odd index of the Array to the value corresponding to the previous element", ->
a = @h.flatten().to_native()
expect( a[1] ).toEqual 'PI:NAME:<NAME>END_PI'
expect( a[3] ).toEqual ['austrian', 'british']
expect( a[5] ).toEqual 'welsh'
it "does not recursively flatten Array values when called without arguments", ->
a = @h.flatten().to_native()
expect( a[3] ).toEqual ['austrian', 'british']
it "does not recursively flatten Hash values when called without arguments", ->
@h.set('PI:NAME:<NAME>END_PI', {born: 'wales', influenced_by: 'mill' })
a = @h.flatten().to_native()
expect( a[5] ).toNotEqual ['born', 'wales', 'influenced_by', 'mill' ]
expect( a[5] ).toEqual {born: 'wales', influenced_by: 'mill' }
it "recursively flattens Array values when called with an argument >= 2", ->
a = @h.flatten(2).to_native()
expect( a[3] ).toEqual 'austrian'
expect( a[4] ).toEqual 'british'
it "recursively flattens Array values to the given depth", ->
@h.set('PI:NAME:<NAME>END_PI', [['born', 'wales'], ['influenced_by', 'mill']])
a = @h.flatten(2).to_native()
expect( a[6] ).toEqual ['born', 'wales']
expect( a[7] ).toEqual ['influenced_by', 'mill']
# it "raises an TypeError if given a non-Integer argument", ->
# lambda do
# @h.flatten(Object.new)
# end.should raise_error(TypeError)
|
[
{
"context": "ail: (data, callback) ->\n\n { profile : { email, firstName, lastName } } = data\n invitations = [ { email,",
"end": 1203,
"score": 0.9993669986724854,
"start": 1194,
"tag": "NAME",
"value": "firstName"
},
{
"context": " callback) ->\n\n { profile : { email, firstName, lastName } } = data\n invitations = [ { email, firstName",
"end": 1213,
"score": 0.9991422891616821,
"start": 1205,
"tag": "NAME",
"value": "lastName"
},
{
"context": ", lastName } } = data\n invitations = [ { email, firstName, lastName, role : 'member' } ]\n\n remote.api.JI",
"end": 1263,
"score": 0.9993670582771301,
"start": 1254,
"tag": "NAME",
"value": "firstName"
},
{
"context": "} } = data\n invitations = [ { email, firstName, lastName, role : 'member' } ]\n\n remote.api.JInvitation.",
"end": 1273,
"score": 0.9985679388046265,
"start": 1265,
"tag": "NAME",
"value": "lastName"
},
{
"context": "llname\">#{fullname}</p>\n <p class=\"nickname\">@#{nickname}</p>\n </div>\n <p title=\"#{ema",
"end": 2780,
"score": 0.7462486624717712,
"start": 2780,
"tag": "USERNAME",
"value": ""
}
] | client/admin/lib/views/members/blockedmemberitemview.coffee | ezgikaysi/koding | 1 | kd = require 'kd'
JView = require 'app/jview'
remote = require('app/remote').getInstance()
AvatarView = require 'app/commonviews/avatarviews/avatarview'
getFullnameFromAccount = require 'app/util/getFullnameFromAccount'
module.exports = class BlockedMemberItemView extends kd.ListItemView
JView.mixin @prototype
constructor: (options = {}, data) ->
options.type or= 'member'
super options, data
@avatar = new AvatarView
size : { width: 40, height : 40 }
, @getData()
@roleLabel = new kd.CustomHTMLView
cssClass : 'role'
partial : "Disabled <span class='settings-icon'></span>"
click : @bound 'toggleSettings'
@createSettingsView()
createSettingsView: ->
unless @settings
@settings = new kd.CustomHTMLView
cssClass : 'settings hidden'
@settings.addSubView @unblockButton = new kd.ButtonView
cssClass : kd.utils.curry 'solid compact outline blocked'
title : 'Enable User'
loader : { color: '#444444' }
callback : @bound 'unblockUser'
invitationWithNoEmail: (data, callback) ->
{ profile : { email, firstName, lastName } } = data
invitations = [ { email, firstName, lastName, role : 'member' } ]
remote.api.JInvitation.create
invitations : invitations
noEmail : yes
returnCodes : yes
, (err, res) ->
return callback err if err
return callback { message: 'Something went wrong, please try again!' } unless res
invite = res[0]
invite.status = 'accepted'
invite.accept().then (response) ->
callback null, response
.catch (err) -> callback err
unblockUser: ->
id = @getData().getId()
currentGroup = kd.singletons.groupsController.getCurrentGroup()
@invitationWithNoEmail @getData(), (err, result) =>
if err
customErr = new Error 'Something went wrong, please try again!'
return @handleError @unblockButton, customErr
options =
id: id
removeUserFromTeam: no
currentGroup.unblockMember options, (err) =>
if err
customErr = new Error 'Failed to unblock user. Please try again.'
return @handleError @unblockButton, customErr
kd.singletons.notificationController.emit 'NewMemberJoinedToGroup'
@destroy()
toggleSettings: ->
@settings.toggleClass 'hidden'
@roleLabel.toggleClass 'active'
pistachio: ->
data = @getData()
fullname = getFullnameFromAccount data
nickname = data.profile.nickname
email = data.profile.email
return """
<div class="details">
{{> @avatar}}
<p class="fullname">#{fullname}</p>
<p class="nickname">@#{nickname}</p>
</div>
<p title="#{email}" class="email">#{email}</p>
{{> @roleLabel}}
<div class='clear'></div>
{{> @settings}}
"""
| 146527 | kd = require 'kd'
JView = require 'app/jview'
remote = require('app/remote').getInstance()
AvatarView = require 'app/commonviews/avatarviews/avatarview'
getFullnameFromAccount = require 'app/util/getFullnameFromAccount'
module.exports = class BlockedMemberItemView extends kd.ListItemView
JView.mixin @prototype
constructor: (options = {}, data) ->
options.type or= 'member'
super options, data
@avatar = new AvatarView
size : { width: 40, height : 40 }
, @getData()
@roleLabel = new kd.CustomHTMLView
cssClass : 'role'
partial : "Disabled <span class='settings-icon'></span>"
click : @bound 'toggleSettings'
@createSettingsView()
createSettingsView: ->
unless @settings
@settings = new kd.CustomHTMLView
cssClass : 'settings hidden'
@settings.addSubView @unblockButton = new kd.ButtonView
cssClass : kd.utils.curry 'solid compact outline blocked'
title : 'Enable User'
loader : { color: '#444444' }
callback : @bound 'unblockUser'
invitationWithNoEmail: (data, callback) ->
{ profile : { email, <NAME>, <NAME> } } = data
invitations = [ { email, <NAME>, <NAME>, role : 'member' } ]
remote.api.JInvitation.create
invitations : invitations
noEmail : yes
returnCodes : yes
, (err, res) ->
return callback err if err
return callback { message: 'Something went wrong, please try again!' } unless res
invite = res[0]
invite.status = 'accepted'
invite.accept().then (response) ->
callback null, response
.catch (err) -> callback err
unblockUser: ->
id = @getData().getId()
currentGroup = kd.singletons.groupsController.getCurrentGroup()
@invitationWithNoEmail @getData(), (err, result) =>
if err
customErr = new Error 'Something went wrong, please try again!'
return @handleError @unblockButton, customErr
options =
id: id
removeUserFromTeam: no
currentGroup.unblockMember options, (err) =>
if err
customErr = new Error 'Failed to unblock user. Please try again.'
return @handleError @unblockButton, customErr
kd.singletons.notificationController.emit 'NewMemberJoinedToGroup'
@destroy()
toggleSettings: ->
@settings.toggleClass 'hidden'
@roleLabel.toggleClass 'active'
pistachio: ->
data = @getData()
fullname = getFullnameFromAccount data
nickname = data.profile.nickname
email = data.profile.email
return """
<div class="details">
{{> @avatar}}
<p class="fullname">#{fullname}</p>
<p class="nickname">@#{nickname}</p>
</div>
<p title="#{email}" class="email">#{email}</p>
{{> @roleLabel}}
<div class='clear'></div>
{{> @settings}}
"""
| true | kd = require 'kd'
JView = require 'app/jview'
remote = require('app/remote').getInstance()
AvatarView = require 'app/commonviews/avatarviews/avatarview'
getFullnameFromAccount = require 'app/util/getFullnameFromAccount'
module.exports = class BlockedMemberItemView extends kd.ListItemView
JView.mixin @prototype
constructor: (options = {}, data) ->
options.type or= 'member'
super options, data
@avatar = new AvatarView
size : { width: 40, height : 40 }
, @getData()
@roleLabel = new kd.CustomHTMLView
cssClass : 'role'
partial : "Disabled <span class='settings-icon'></span>"
click : @bound 'toggleSettings'
@createSettingsView()
createSettingsView: ->
unless @settings
@settings = new kd.CustomHTMLView
cssClass : 'settings hidden'
@settings.addSubView @unblockButton = new kd.ButtonView
cssClass : kd.utils.curry 'solid compact outline blocked'
title : 'Enable User'
loader : { color: '#444444' }
callback : @bound 'unblockUser'
invitationWithNoEmail: (data, callback) ->
{ profile : { email, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI } } = data
invitations = [ { email, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, role : 'member' } ]
remote.api.JInvitation.create
invitations : invitations
noEmail : yes
returnCodes : yes
, (err, res) ->
return callback err if err
return callback { message: 'Something went wrong, please try again!' } unless res
invite = res[0]
invite.status = 'accepted'
invite.accept().then (response) ->
callback null, response
.catch (err) -> callback err
unblockUser: ->
id = @getData().getId()
currentGroup = kd.singletons.groupsController.getCurrentGroup()
@invitationWithNoEmail @getData(), (err, result) =>
if err
customErr = new Error 'Something went wrong, please try again!'
return @handleError @unblockButton, customErr
options =
id: id
removeUserFromTeam: no
currentGroup.unblockMember options, (err) =>
if err
customErr = new Error 'Failed to unblock user. Please try again.'
return @handleError @unblockButton, customErr
kd.singletons.notificationController.emit 'NewMemberJoinedToGroup'
@destroy()
toggleSettings: ->
@settings.toggleClass 'hidden'
@roleLabel.toggleClass 'active'
pistachio: ->
data = @getData()
fullname = getFullnameFromAccount data
nickname = data.profile.nickname
email = data.profile.email
return """
<div class="details">
{{> @avatar}}
<p class="fullname">#{fullname}</p>
<p class="nickname">@#{nickname}</p>
</div>
<p title="#{email}" class="email">#{email}</p>
{{> @roleLabel}}
<div class='clear'></div>
{{> @settings}}
"""
|
[
{
"context": " callback: callback\n json:\n name: name\n email: email\n requester_id: reques",
"end": 3027,
"score": 0.7161269187927246,
"start": 3023,
"tag": "NAME",
"value": "name"
}
] | src/PagerDuty.coffee | atlassian/node-pagerduty | 0 | request = require 'request'
_object = (kvpairs) ->
res = {}
for kv in kvpairs
res[kv[0]] = kv[1]
res
_stripUndefined = (obj) ->
_object([k, v] for k, v of obj when v isnt undefined)
_expect = (expectedStatusCode, callback) ->
(err, response, body) ->
try
body = JSON.parse body
if err or response.statusCode != expectedStatusCode
callback err || if body && body.error then new Error(body.error.errors[0]) else new Error('Unexpected HTTP code: ' + response.statusCode)
else
callback null, body
class PagerDuty
module.exports = PagerDuty
constructor: ({@serviceKey, @subdomain}) ->
throw new Error 'PagerDuty.constructor: Need serviceKey!' unless @serviceKey?
create: ({description, incidentKey, details, callback}) ->
throw new Error 'PagerDuty.create: Need description!' unless description?
@_eventRequest arguments[0] extends eventType: 'trigger'
acknowledge: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.acknowledge: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'acknowledge'
resolve: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.resolve: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'resolve'
_eventRequest: ({description, incidentKey, eventType, details, callback}) ->
throw new Error 'PagerDuty._request: Need eventType!' unless eventType?
details ||= {}
callback ||= ->
json =
service_key: @serviceKey
event_type: eventType
description: description
details: details
incident_key: incidentKey
request
method: 'POST'
uri: 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
json: _stripUndefined json
, (err, response, body) ->
if err or response.statusCode != 200
callback err || new Error(body.errors[0])
else
callback null, body
getEscalationPolicies: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies'
callback: callback
qs:
query: query
offset: offset
limit: limit
getEscalationPoliciesOnCall: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies/on_call'
callback: callback
qs:
query: query
offset: offset
limit: limit
createEscalationPolicy: ({name, escalationRules, callback}) ->
@_postRequest
resource: 'escalation_policies'
callback: callback
json:
name: name
escalation_rules: escalationRules
getUsers: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'users'
callback: callback
qs:
query: query
offset: offset
limit: limit
createUser: ({name, email, requesterId, callback}) ->
@_postRequest
resource: 'users'
callback: callback
json:
name: name
email: email
requester_id: requesterId
getServices: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'services'
callback: callback
qs:
query: query
offset: offset
limit: limit
createService: ({name, escalationPolicyId, type, serviceKey, callback}) ->
@_postRequest
resource: 'services'
callback: callback
json:
service:
name: name
escalation_policy_id: escalationPolicyId
type: type
service_key: serviceKey
_getRequest: ({resource, qs, offset, limit, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'GET'
uri: uri
qs: _stripUndefined qs
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(200, callback)
_postRequest: ({resource, json, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'POST'
uri: uri
json: _stripUndefined json
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(201, callback)
| 158372 | request = require 'request'
_object = (kvpairs) ->
res = {}
for kv in kvpairs
res[kv[0]] = kv[1]
res
_stripUndefined = (obj) ->
_object([k, v] for k, v of obj when v isnt undefined)
_expect = (expectedStatusCode, callback) ->
(err, response, body) ->
try
body = JSON.parse body
if err or response.statusCode != expectedStatusCode
callback err || if body && body.error then new Error(body.error.errors[0]) else new Error('Unexpected HTTP code: ' + response.statusCode)
else
callback null, body
class PagerDuty
module.exports = PagerDuty
constructor: ({@serviceKey, @subdomain}) ->
throw new Error 'PagerDuty.constructor: Need serviceKey!' unless @serviceKey?
create: ({description, incidentKey, details, callback}) ->
throw new Error 'PagerDuty.create: Need description!' unless description?
@_eventRequest arguments[0] extends eventType: 'trigger'
acknowledge: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.acknowledge: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'acknowledge'
resolve: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.resolve: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'resolve'
_eventRequest: ({description, incidentKey, eventType, details, callback}) ->
throw new Error 'PagerDuty._request: Need eventType!' unless eventType?
details ||= {}
callback ||= ->
json =
service_key: @serviceKey
event_type: eventType
description: description
details: details
incident_key: incidentKey
request
method: 'POST'
uri: 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
json: _stripUndefined json
, (err, response, body) ->
if err or response.statusCode != 200
callback err || new Error(body.errors[0])
else
callback null, body
getEscalationPolicies: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies'
callback: callback
qs:
query: query
offset: offset
limit: limit
getEscalationPoliciesOnCall: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies/on_call'
callback: callback
qs:
query: query
offset: offset
limit: limit
createEscalationPolicy: ({name, escalationRules, callback}) ->
@_postRequest
resource: 'escalation_policies'
callback: callback
json:
name: name
escalation_rules: escalationRules
getUsers: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'users'
callback: callback
qs:
query: query
offset: offset
limit: limit
createUser: ({name, email, requesterId, callback}) ->
@_postRequest
resource: 'users'
callback: callback
json:
name: <NAME>
email: email
requester_id: requesterId
getServices: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'services'
callback: callback
qs:
query: query
offset: offset
limit: limit
createService: ({name, escalationPolicyId, type, serviceKey, callback}) ->
@_postRequest
resource: 'services'
callback: callback
json:
service:
name: name
escalation_policy_id: escalationPolicyId
type: type
service_key: serviceKey
_getRequest: ({resource, qs, offset, limit, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'GET'
uri: uri
qs: _stripUndefined qs
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(200, callback)
_postRequest: ({resource, json, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'POST'
uri: uri
json: _stripUndefined json
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(201, callback)
| true | request = require 'request'
_object = (kvpairs) ->
res = {}
for kv in kvpairs
res[kv[0]] = kv[1]
res
_stripUndefined = (obj) ->
_object([k, v] for k, v of obj when v isnt undefined)
_expect = (expectedStatusCode, callback) ->
(err, response, body) ->
try
body = JSON.parse body
if err or response.statusCode != expectedStatusCode
callback err || if body && body.error then new Error(body.error.errors[0]) else new Error('Unexpected HTTP code: ' + response.statusCode)
else
callback null, body
class PagerDuty
module.exports = PagerDuty
constructor: ({@serviceKey, @subdomain}) ->
throw new Error 'PagerDuty.constructor: Need serviceKey!' unless @serviceKey?
create: ({description, incidentKey, details, callback}) ->
throw new Error 'PagerDuty.create: Need description!' unless description?
@_eventRequest arguments[0] extends eventType: 'trigger'
acknowledge: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.acknowledge: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'acknowledge'
resolve: ({incidentKey, details, description, callback}) ->
throw new Error 'PagerDuty.resolve: Need incidentKey!' unless incidentKey?
@_eventRequest arguments[0] extends eventType: 'resolve'
_eventRequest: ({description, incidentKey, eventType, details, callback}) ->
throw new Error 'PagerDuty._request: Need eventType!' unless eventType?
details ||= {}
callback ||= ->
json =
service_key: @serviceKey
event_type: eventType
description: description
details: details
incident_key: incidentKey
request
method: 'POST'
uri: 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
json: _stripUndefined json
, (err, response, body) ->
if err or response.statusCode != 200
callback err || new Error(body.errors[0])
else
callback null, body
getEscalationPolicies: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies'
callback: callback
qs:
query: query
offset: offset
limit: limit
getEscalationPoliciesOnCall: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'escalation_policies/on_call'
callback: callback
qs:
query: query
offset: offset
limit: limit
createEscalationPolicy: ({name, escalationRules, callback}) ->
@_postRequest
resource: 'escalation_policies'
callback: callback
json:
name: name
escalation_rules: escalationRules
getUsers: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'users'
callback: callback
qs:
query: query
offset: offset
limit: limit
createUser: ({name, email, requesterId, callback}) ->
@_postRequest
resource: 'users'
callback: callback
json:
name: PI:NAME:<NAME>END_PI
email: email
requester_id: requesterId
getServices: ({query, offset, limit, callback}) ->
@_getRequest
resource: 'services'
callback: callback
qs:
query: query
offset: offset
limit: limit
createService: ({name, escalationPolicyId, type, serviceKey, callback}) ->
@_postRequest
resource: 'services'
callback: callback
json:
service:
name: name
escalation_policy_id: escalationPolicyId
type: type
service_key: serviceKey
_getRequest: ({resource, qs, offset, limit, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'GET'
uri: uri
qs: _stripUndefined qs
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(200, callback)
_postRequest: ({resource, json, callback}) ->
callback ||= ->
uri = 'https://' + @subdomain + '.pagerduty.com/api/v1/' + resource
request
method: 'POST'
uri: uri
json: _stripUndefined json
headers: { 'Authorization': 'Token token=' + @serviceKey }
, _expect(201, callback)
|
[
{
"context": "js\n\n PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.",
"end": 215,
"score": 0.9998324513435364,
"start": 198,
"tag": "NAME",
"value": "Benjamin Blundell"
},
{
"context": " PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.com\n\n This so",
"end": 231,
"score": 0.9999305009841919,
"start": 218,
"tag": "EMAIL",
"value": "ben@pxljs.com"
}
] | src/gl/shader.coffee | OniDaito/pxljs | 1 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
Benjamin Blundell - ben@pxljs.com
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
###
{Matrix4, Vec2, Vec3, Vec4} = require '../math/math'
{Light} = require '../light/light'
{PXLError, PXLWarning, PXLLog} = require '../util/log'
{Contract} = require './contract'
# ## Shader
# The master shader class. Represents a shader that can be bound to a context or attached to a node
# chunks - a list [] of ShaderChunks that create our shader - order is important - it defines what
# chunks take precidence. chunks later on in the line rely and may overwrite these earlier in line.
# user_roles = passed onto the contract
class Shader
# **@constructor** Designed so an object can be built with no parameters
# via functions such as shaderFromText
# We keep hold of the chunks incase we need to rebuild the shader (changing various defines
# for example)
# - **vertex_source** - a String - Required
# - **fragment_source** - a String - Required
# - **user_roles** - an Object with named attributes mapping to String
constructor : (@vertex_source, @fragment_source, user_roles) ->
if PXL?
if PXL.Context.gl?
@_compile @vertex_source, @fragment_source
@contract = new Contract( @_getAttributes(), @_getUniforms(), user_roles)
@_uber = false
@
_compile: (sv, sf) ->
gl = PXL.Context.gl # Global / Current context
# Create the Vertex Shader
@vertexShader = gl.createShader(gl.VERTEX_SHADER)
if not @vertexShader
PXLError "No vertex shader object could be created"
# Fragment Shader
@fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)
if not @fragmentShader
PXLError "No Fragment shader object could be created"
gl.shaderSource @vertexShader, sv
gl.compileShader @vertexShader
if not gl.getShaderParameter @vertexShader, gl.COMPILE_STATUS
@_printLog @vertexShader, sv, "vertex"
PXLError "Failed to compiled Vertex Shader"
gl.shaderSource @fragmentShader, sf
gl.compileShader @fragmentShader
if not gl.getShaderParameter @fragmentShader, gl.COMPILE_STATUS
@_printLog @fragmentShader, sf, "fragment"
PXLError "Failed to compile Fragment Shader"
@shaderProgram = gl.createProgram()
gl.attachShader(@shaderProgram, @vertexShader)
gl.attachShader(@shaderProgram, @fragmentShader)
# Naughty hack - we check for the attribute aVertexPosition to bind it to 0!
# TODO - might need to do something better here but it stops attrib 0 being blank which is good
gl.bindAttribLocation(@shaderProgram, 0, "aVertexPosition")
gl.linkProgram(@shaderProgram)
gl.validateProgram(@shaderProgram)
success = gl.getProgramParameter(@shaderProgram, gl.LINK_STATUS)
if not success
PXLWarning gl.getProgramInfoLog @shaderProgram
PXLError "Failed to Link Shader"
WebGLActiveInfo
# We grab the attributes here and we set their positions to these
# in the contract - order dependent
attrs = @_getAttributes()
for attr in attrs
gl.bindAttribLocation @shaderProgram, attr.pos, attr.name
@
_printLog : (shader, source, kind) ->
compilationLog = PXL.Context.gl.getShaderInfoLog shader
PXLLog 'Shader compiler log: ' + compilationLog
tsf = ""
ln = 1
for l in source.split "\n"
tsf += ln + ": " + l + "\n"
ln++
PXLLog tsf
@_splitError compilationLog, source
PXLError "Could not compile " + kind + " shader"
_addToNode : (node) ->
node.shader = @
@
_splitError : (s, data) ->
lines = s.split('\n')
for line in lines
match = line.match(/ERROR: (\d+):(\d+): (.*)/)
if match
fileno = parseInt(match[1],10)-1
lineno = parseInt(match[2],10)-1
message = match[3]
datalines = data.split('\n')
PXLLog "Shader Error Log: " + fileno + ", " + lineno + ", " + message + "," + datalines[lineno]
@
# _getLocation: Return the location of a variable inside a compiled shader
_getLocation : (name) ->
PXL.Context.gl.getUniformLocation(@shaderProgram, name)
# **bind** - bind the shader to the current context
# - returns this
bind: ->
PXL.Context.gl.useProgram(@shaderProgram);
PXL.Context.shader = @
@
# **unbind** - Clear the current context so there are no shaders
# - returns this
unbind: ->
PXL.Context.gl.useProgram(null)
@
# **washUp** - Remove this shader and destroy it
# - returns this
washUp : ->
gl = PXL.Context.gl
gl.detachShader(@shaderProgram, @vertexShader)
gl.detachShader(@shaderProgram, @fragmentShader)
gl.deleteProgram(@shaderProgram)
gl.deleteShader(@vertexShader)
gl.deleteShader(@fragmentShader)
@
_getAttributes : () ->
# Cache attributes as they hopefully wont change!
if not @attributes?
gl = PXL.Context.gl
num_attributes = gl.getProgramParameter @shaderProgram, GL.ACTIVE_ATTRIBUTES
@attributes = []
# we set and use the positions here. aVertexPosition must always be zero
for i in [0..num_attributes-1]
a = gl.getActiveAttrib @shaderProgram, i
attribute =
name : a.name
pos : gl.getAttribLocation(@shaderProgram, a.name)
type : a.type # TODO - test this
size : a.size # Pretty much always 1 it seems for attributes
@attributes.push attribute
@attributes
# **_getUniforms** - Find all the active uniforms in a shader
# - returns an Array of objects
# { name, type, pos, size }
# Types are listed thusly and will need to be changed
# Uniform Types
# - const GLenum FLOAT_VEC2 = 0x8B50;
# - const GLenum FLOAT_VEC3 = 0x8B51;
# - const GLenum FLOAT_VEC4 = 0x8B52;
# - const GLenum INT_VEC2 = 0x8B53;
# - const GLenum INT_VEC3 = 0x8B54;
# - const GLenum INT_VEC4 = 0x8B55;
# - const GLenum BOOL = 0x8B56;
# - const GLenum BOOL_VEC2 = 0x8B57;
# - const GLenum BOOL_VEC3 = 0x8B58;
# - const GLenum BOOL_VEC4 = 0x8B59;
# - const GLenum FLOAT_MAT2 = 0x8B5A;
# - const GLenum FLOAT_MAT3 = 0x8B5B;
# - const GLenum FLOAT_MAT4 = 0x8B5C;
# - const GLenum SAMPLER_2D = 0x8B5E;
# - const GLenum SAMPLER_CUBE = 0x8B60;
_getUniforms : () ->
# Cache uniforms as they hopefully wont change!
if not @uniforms?
gl = PXL.Context.gl
num_uniforms = gl.getProgramParameter @shaderProgram, GL.ACTIVE_UNIFORMS
@uniforms = []
for i in [0..num_uniforms-1]
u = gl.getActiveUniform @shaderProgram, i
# It appears that active uniform arrays seem to have their name with '[0]' so remove
# TODO - must check to make sure this is the actual behaviour
tn = u.name
i = tn.indexOf("[")
if i > 0
tn = u.name.slice(0,i)
uniform =
name : tn
pos : @._getLocation u.name
type : u.type # TODO - test this
size : u.size # TODO - test this
@uniforms.push uniform
@uniforms
# _getTextures - find all the texture in a shader
# TODO - other samplers? Texture Cube for example?
_getTextures : () ->
d = @._parseShader("uniform")
x = []
for a in d
if a.type == "sampler2D"
p = @._getLocation(a.name)
if p? and p != -1
a.pos = p
x.push(a)
x
# _parseShader - Given a token, a line basically, parse the line and get the types (vec3 etc)
_parseShader : (token) ->
data = []
lines = @sv.split(";").concat(@sf.split(";"))
for l in lines
re = RegExp("\\b" + token + "\\b")
if l.match(re)?
tokens = l.split(" ")
finals = []
for t in tokens
t = t.replace /\n/, ""
t = t.replace /\s/, ""
if t.length != 0
finals.push t
finals.push 1
matches = finals[2].match(/\[([0-9]+)\]/)
if matches?
finals[3] = matches[1]
finals[2] = finals[2].match(/([a-zA-Z]+)/g)[0]
if finals.length == 4
attr = {}
attr.name = finals[2]
attr.type = finals[1]
attr.pos = -1
attr.size = finals[3]
data.push attr
data
# **setUniform1f** - Given a uniform name and one float, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - returns this
setUniform1f: (name,a) ->
gl = PXL.Context.gl
gl.uniform1f(@_getLocation(name),a)
@
# **setUniform1i** - Given a uniform name and one integer, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Integer - Required
# - returns this
setUniform1i: (name,a) ->
gl = PXL.Context.gl
gl.uniform1i(@_getLocation(name),a)
@
# **setUniform1fv** - Given a uniform name and an array of floats, not grouped, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform1fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform1fv(@_getLocation(name), a)
@
# **setUniform2fv** - Given a uniform name and an array of floats, grouped in pairs, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform2fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform2fv(@_getLocation(name), a)
@
# **setUniform2v** - Given a uniform name and a Vec2, set this uniform
# - **name** - a String - Required
# - **v** - a Vec2 - Required
# - returns this
setUniform2v: (name,v) ->
gl = PXL.Context.gl
gl.uniform2f(@_getLocation(name),v.x,v.y)
@
# **setUniform3f** - Given a uniform name and three floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform3fv** - Given a uniform name and an array of floats, grouped in threes, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform3fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform3fv(@_getLocation(name), a)
@
# **setUniform3v** - Given a uniform name and a Vec3, set this uniform
# - **name** - a String - Required
# - **v** - a Vec3 - Required
# - returns this
setUniform3v: (name,v) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),v.x,v.y,v.z)
@
# **setUniform3f** - Given 3 floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform4f** - Given a uniform name and four floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - **d** - a Number - Required
# - returns this
setUniform4f: (name,a,b,c,d) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),a,b,c,d)
@
# **setUniform4v** - Given a uniform name and a Vec4, set this uniform
# - **name** - a String - Required
# - **v** - a Vec4 - Required
# - returns this
setUniform4v: (name,v) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),v.x,v.y,v.z,v.w)
@
# **setUniform4fv** - Given a uniform name and an array of floats, grouped in fours, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform4fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform4fv(@_getLocation(name), a)
@
# setMatrix4f - Given a uniform name and a matrix3, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix3 - Required
# - returns this
setMatrix3f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix3fv(@_getLocation(name), false, m.a)
@
# setMatrix4f - Given a uniform name and a matrix4, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix4 - Required
# - returns this
setMatrix4f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix4fv(@_getLocation(name), false, m.a)
@
# **enableAttribArray** - Enable an attribute array by name
# - **name** - a String - Required
# - returns this
enableAttribArray: (name) ->
gl = PXL.Context.gl
position = gl.getAttribLocation(@shaderProgram, name)
gl.enableVertexAttribArray(position)
@
# getAttribLocation - Get the location of an attribute
# - **name** - a String - Required
# - returns a Number
getAttribLocation: (name) ->
gl = PXL.Context.gl
gl.getAttribLocation(@shaderProgram, name)
# ## shaderFromText
# Create a shader from a block of text and an optional contract
# - **text** - A String - Required
# - **user_roles** - An Object with named attributes mapping to Strings
# - returns a Shader
shaderFromText = ( text, user_roles ) ->
_splitShader = (s) ->
sv = sf = ""
pv = s.indexOf("##>VERTEX")
pf = s.indexOf("##>FRAGMENT")
if pv != -1
if (pf != -1 && pf > pv)
sv = s.slice(pv + 9, pf)
else if (pf != -1 && pf < pv)
sv = s.slice(pv + 9)
if pf != -1
if pv != -1 && pv > pf
sf = s.slice(pf + 11, pv)
else if pf != -1 && pv < pf
sf = s.slice(pf + 11)
return [sv,sf]
_javascriptize = (shader_txt,var_name) ->
shader_js = "var " + var_name + "=" + "\n"
lines = shader_txt.split("\n")
for lidx in [0..lines.length-1]
newline = lines[lidx]
newline = newline.replace("\n","")
newline = newline.replace("\r","")
shader_js = shader_js + '\"' + newline + '\\n"'
if (lidx + 1) < lines.length
shader_js = shader_js + ' +\n'
shader_js = shader_js + ";"
shader_js
# We assume that chunks are in a subdir relative to the glsl files called 'chunks'
# TODO - need someway to check it against a list of chunks that could already be in memory maybe?
parts =_splitShader(text);
shader_vertex = parts[0];
shader_fragment = parts[1];
#shader_vertex = _javascriptize(shader_vertex, "shader_vertex");
#shader_fragment = _javascriptize(shader_fragment, "shader_fragment");
new Shader(shader_vertex, shader_fragment, user_roles)
module.exports =
Shader : Shader
shaderFromText : shaderFromText
| 17446 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
<NAME> - <EMAIL>
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
###
{Matrix4, Vec2, Vec3, Vec4} = require '../math/math'
{Light} = require '../light/light'
{PXLError, PXLWarning, PXLLog} = require '../util/log'
{Contract} = require './contract'
# ## Shader
# The master shader class. Represents a shader that can be bound to a context or attached to a node
# chunks - a list [] of ShaderChunks that create our shader - order is important - it defines what
# chunks take precidence. chunks later on in the line rely and may overwrite these earlier in line.
# user_roles = passed onto the contract
class Shader
# **@constructor** Designed so an object can be built with no parameters
# via functions such as shaderFromText
# We keep hold of the chunks incase we need to rebuild the shader (changing various defines
# for example)
# - **vertex_source** - a String - Required
# - **fragment_source** - a String - Required
# - **user_roles** - an Object with named attributes mapping to String
constructor : (@vertex_source, @fragment_source, user_roles) ->
if PXL?
if PXL.Context.gl?
@_compile @vertex_source, @fragment_source
@contract = new Contract( @_getAttributes(), @_getUniforms(), user_roles)
@_uber = false
@
_compile: (sv, sf) ->
gl = PXL.Context.gl # Global / Current context
# Create the Vertex Shader
@vertexShader = gl.createShader(gl.VERTEX_SHADER)
if not @vertexShader
PXLError "No vertex shader object could be created"
# Fragment Shader
@fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)
if not @fragmentShader
PXLError "No Fragment shader object could be created"
gl.shaderSource @vertexShader, sv
gl.compileShader @vertexShader
if not gl.getShaderParameter @vertexShader, gl.COMPILE_STATUS
@_printLog @vertexShader, sv, "vertex"
PXLError "Failed to compiled Vertex Shader"
gl.shaderSource @fragmentShader, sf
gl.compileShader @fragmentShader
if not gl.getShaderParameter @fragmentShader, gl.COMPILE_STATUS
@_printLog @fragmentShader, sf, "fragment"
PXLError "Failed to compile Fragment Shader"
@shaderProgram = gl.createProgram()
gl.attachShader(@shaderProgram, @vertexShader)
gl.attachShader(@shaderProgram, @fragmentShader)
# Naughty hack - we check for the attribute aVertexPosition to bind it to 0!
# TODO - might need to do something better here but it stops attrib 0 being blank which is good
gl.bindAttribLocation(@shaderProgram, 0, "aVertexPosition")
gl.linkProgram(@shaderProgram)
gl.validateProgram(@shaderProgram)
success = gl.getProgramParameter(@shaderProgram, gl.LINK_STATUS)
if not success
PXLWarning gl.getProgramInfoLog @shaderProgram
PXLError "Failed to Link Shader"
WebGLActiveInfo
# We grab the attributes here and we set their positions to these
# in the contract - order dependent
attrs = @_getAttributes()
for attr in attrs
gl.bindAttribLocation @shaderProgram, attr.pos, attr.name
@
_printLog : (shader, source, kind) ->
compilationLog = PXL.Context.gl.getShaderInfoLog shader
PXLLog 'Shader compiler log: ' + compilationLog
tsf = ""
ln = 1
for l in source.split "\n"
tsf += ln + ": " + l + "\n"
ln++
PXLLog tsf
@_splitError compilationLog, source
PXLError "Could not compile " + kind + " shader"
_addToNode : (node) ->
node.shader = @
@
_splitError : (s, data) ->
lines = s.split('\n')
for line in lines
match = line.match(/ERROR: (\d+):(\d+): (.*)/)
if match
fileno = parseInt(match[1],10)-1
lineno = parseInt(match[2],10)-1
message = match[3]
datalines = data.split('\n')
PXLLog "Shader Error Log: " + fileno + ", " + lineno + ", " + message + "," + datalines[lineno]
@
# _getLocation: Return the location of a variable inside a compiled shader
_getLocation : (name) ->
PXL.Context.gl.getUniformLocation(@shaderProgram, name)
# **bind** - bind the shader to the current context
# - returns this
bind: ->
PXL.Context.gl.useProgram(@shaderProgram);
PXL.Context.shader = @
@
# **unbind** - Clear the current context so there are no shaders
# - returns this
unbind: ->
PXL.Context.gl.useProgram(null)
@
# **washUp** - Remove this shader and destroy it
# - returns this
washUp : ->
gl = PXL.Context.gl
gl.detachShader(@shaderProgram, @vertexShader)
gl.detachShader(@shaderProgram, @fragmentShader)
gl.deleteProgram(@shaderProgram)
gl.deleteShader(@vertexShader)
gl.deleteShader(@fragmentShader)
@
_getAttributes : () ->
# Cache attributes as they hopefully wont change!
if not @attributes?
gl = PXL.Context.gl
num_attributes = gl.getProgramParameter @shaderProgram, GL.ACTIVE_ATTRIBUTES
@attributes = []
# we set and use the positions here. aVertexPosition must always be zero
for i in [0..num_attributes-1]
a = gl.getActiveAttrib @shaderProgram, i
attribute =
name : a.name
pos : gl.getAttribLocation(@shaderProgram, a.name)
type : a.type # TODO - test this
size : a.size # Pretty much always 1 it seems for attributes
@attributes.push attribute
@attributes
# **_getUniforms** - Find all the active uniforms in a shader
# - returns an Array of objects
# { name, type, pos, size }
# Types are listed thusly and will need to be changed
# Uniform Types
# - const GLenum FLOAT_VEC2 = 0x8B50;
# - const GLenum FLOAT_VEC3 = 0x8B51;
# - const GLenum FLOAT_VEC4 = 0x8B52;
# - const GLenum INT_VEC2 = 0x8B53;
# - const GLenum INT_VEC3 = 0x8B54;
# - const GLenum INT_VEC4 = 0x8B55;
# - const GLenum BOOL = 0x8B56;
# - const GLenum BOOL_VEC2 = 0x8B57;
# - const GLenum BOOL_VEC3 = 0x8B58;
# - const GLenum BOOL_VEC4 = 0x8B59;
# - const GLenum FLOAT_MAT2 = 0x8B5A;
# - const GLenum FLOAT_MAT3 = 0x8B5B;
# - const GLenum FLOAT_MAT4 = 0x8B5C;
# - const GLenum SAMPLER_2D = 0x8B5E;
# - const GLenum SAMPLER_CUBE = 0x8B60;
_getUniforms : () ->
# Cache uniforms as they hopefully wont change!
if not @uniforms?
gl = PXL.Context.gl
num_uniforms = gl.getProgramParameter @shaderProgram, GL.ACTIVE_UNIFORMS
@uniforms = []
for i in [0..num_uniforms-1]
u = gl.getActiveUniform @shaderProgram, i
# It appears that active uniform arrays seem to have their name with '[0]' so remove
# TODO - must check to make sure this is the actual behaviour
tn = u.name
i = tn.indexOf("[")
if i > 0
tn = u.name.slice(0,i)
uniform =
name : tn
pos : @._getLocation u.name
type : u.type # TODO - test this
size : u.size # TODO - test this
@uniforms.push uniform
@uniforms
# _getTextures - find all the texture in a shader
# TODO - other samplers? Texture Cube for example?
_getTextures : () ->
d = @._parseShader("uniform")
x = []
for a in d
if a.type == "sampler2D"
p = @._getLocation(a.name)
if p? and p != -1
a.pos = p
x.push(a)
x
# _parseShader - Given a token, a line basically, parse the line and get the types (vec3 etc)
_parseShader : (token) ->
data = []
lines = @sv.split(";").concat(@sf.split(";"))
for l in lines
re = RegExp("\\b" + token + "\\b")
if l.match(re)?
tokens = l.split(" ")
finals = []
for t in tokens
t = t.replace /\n/, ""
t = t.replace /\s/, ""
if t.length != 0
finals.push t
finals.push 1
matches = finals[2].match(/\[([0-9]+)\]/)
if matches?
finals[3] = matches[1]
finals[2] = finals[2].match(/([a-zA-Z]+)/g)[0]
if finals.length == 4
attr = {}
attr.name = finals[2]
attr.type = finals[1]
attr.pos = -1
attr.size = finals[3]
data.push attr
data
# **setUniform1f** - Given a uniform name and one float, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - returns this
setUniform1f: (name,a) ->
gl = PXL.Context.gl
gl.uniform1f(@_getLocation(name),a)
@
# **setUniform1i** - Given a uniform name and one integer, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Integer - Required
# - returns this
setUniform1i: (name,a) ->
gl = PXL.Context.gl
gl.uniform1i(@_getLocation(name),a)
@
# **setUniform1fv** - Given a uniform name and an array of floats, not grouped, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform1fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform1fv(@_getLocation(name), a)
@
# **setUniform2fv** - Given a uniform name and an array of floats, grouped in pairs, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform2fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform2fv(@_getLocation(name), a)
@
# **setUniform2v** - Given a uniform name and a Vec2, set this uniform
# - **name** - a String - Required
# - **v** - a Vec2 - Required
# - returns this
setUniform2v: (name,v) ->
gl = PXL.Context.gl
gl.uniform2f(@_getLocation(name),v.x,v.y)
@
# **setUniform3f** - Given a uniform name and three floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform3fv** - Given a uniform name and an array of floats, grouped in threes, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform3fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform3fv(@_getLocation(name), a)
@
# **setUniform3v** - Given a uniform name and a Vec3, set this uniform
# - **name** - a String - Required
# - **v** - a Vec3 - Required
# - returns this
setUniform3v: (name,v) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),v.x,v.y,v.z)
@
# **setUniform3f** - Given 3 floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform4f** - Given a uniform name and four floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - **d** - a Number - Required
# - returns this
setUniform4f: (name,a,b,c,d) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),a,b,c,d)
@
# **setUniform4v** - Given a uniform name and a Vec4, set this uniform
# - **name** - a String - Required
# - **v** - a Vec4 - Required
# - returns this
setUniform4v: (name,v) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),v.x,v.y,v.z,v.w)
@
# **setUniform4fv** - Given a uniform name and an array of floats, grouped in fours, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform4fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform4fv(@_getLocation(name), a)
@
# setMatrix4f - Given a uniform name and a matrix3, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix3 - Required
# - returns this
setMatrix3f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix3fv(@_getLocation(name), false, m.a)
@
# setMatrix4f - Given a uniform name and a matrix4, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix4 - Required
# - returns this
setMatrix4f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix4fv(@_getLocation(name), false, m.a)
@
# **enableAttribArray** - Enable an attribute array by name
# - **name** - a String - Required
# - returns this
enableAttribArray: (name) ->
gl = PXL.Context.gl
position = gl.getAttribLocation(@shaderProgram, name)
gl.enableVertexAttribArray(position)
@
# getAttribLocation - Get the location of an attribute
# - **name** - a String - Required
# - returns a Number
getAttribLocation: (name) ->
gl = PXL.Context.gl
gl.getAttribLocation(@shaderProgram, name)
# ## shaderFromText
# Create a shader from a block of text and an optional contract
# - **text** - A String - Required
# - **user_roles** - An Object with named attributes mapping to Strings
# - returns a Shader
shaderFromText = ( text, user_roles ) ->
_splitShader = (s) ->
sv = sf = ""
pv = s.indexOf("##>VERTEX")
pf = s.indexOf("##>FRAGMENT")
if pv != -1
if (pf != -1 && pf > pv)
sv = s.slice(pv + 9, pf)
else if (pf != -1 && pf < pv)
sv = s.slice(pv + 9)
if pf != -1
if pv != -1 && pv > pf
sf = s.slice(pf + 11, pv)
else if pf != -1 && pv < pf
sf = s.slice(pf + 11)
return [sv,sf]
_javascriptize = (shader_txt,var_name) ->
shader_js = "var " + var_name + "=" + "\n"
lines = shader_txt.split("\n")
for lidx in [0..lines.length-1]
newline = lines[lidx]
newline = newline.replace("\n","")
newline = newline.replace("\r","")
shader_js = shader_js + '\"' + newline + '\\n"'
if (lidx + 1) < lines.length
shader_js = shader_js + ' +\n'
shader_js = shader_js + ";"
shader_js
# We assume that chunks are in a subdir relative to the glsl files called 'chunks'
# TODO - need someway to check it against a list of chunks that could already be in memory maybe?
parts =_splitShader(text);
shader_vertex = parts[0];
shader_fragment = parts[1];
#shader_vertex = _javascriptize(shader_vertex, "shader_vertex");
#shader_fragment = _javascriptize(shader_fragment, "shader_fragment");
new Shader(shader_vertex, shader_fragment, user_roles)
module.exports =
Shader : Shader
shaderFromText : shaderFromText
| true | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
PI:NAME:<NAME>END_PI - PI:EMAIL:<EMAIL>END_PI
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
###
{Matrix4, Vec2, Vec3, Vec4} = require '../math/math'
{Light} = require '../light/light'
{PXLError, PXLWarning, PXLLog} = require '../util/log'
{Contract} = require './contract'
# ## Shader
# The master shader class. Represents a shader that can be bound to a context or attached to a node
# chunks - a list [] of ShaderChunks that create our shader - order is important - it defines what
# chunks take precidence. chunks later on in the line rely and may overwrite these earlier in line.
# user_roles = passed onto the contract
class Shader
# **@constructor** Designed so an object can be built with no parameters
# via functions such as shaderFromText
# We keep hold of the chunks incase we need to rebuild the shader (changing various defines
# for example)
# - **vertex_source** - a String - Required
# - **fragment_source** - a String - Required
# - **user_roles** - an Object with named attributes mapping to String
constructor : (@vertex_source, @fragment_source, user_roles) ->
if PXL?
if PXL.Context.gl?
@_compile @vertex_source, @fragment_source
@contract = new Contract( @_getAttributes(), @_getUniforms(), user_roles)
@_uber = false
@
_compile: (sv, sf) ->
gl = PXL.Context.gl # Global / Current context
# Create the Vertex Shader
@vertexShader = gl.createShader(gl.VERTEX_SHADER)
if not @vertexShader
PXLError "No vertex shader object could be created"
# Fragment Shader
@fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)
if not @fragmentShader
PXLError "No Fragment shader object could be created"
gl.shaderSource @vertexShader, sv
gl.compileShader @vertexShader
if not gl.getShaderParameter @vertexShader, gl.COMPILE_STATUS
@_printLog @vertexShader, sv, "vertex"
PXLError "Failed to compiled Vertex Shader"
gl.shaderSource @fragmentShader, sf
gl.compileShader @fragmentShader
if not gl.getShaderParameter @fragmentShader, gl.COMPILE_STATUS
@_printLog @fragmentShader, sf, "fragment"
PXLError "Failed to compile Fragment Shader"
@shaderProgram = gl.createProgram()
gl.attachShader(@shaderProgram, @vertexShader)
gl.attachShader(@shaderProgram, @fragmentShader)
# Naughty hack - we check for the attribute aVertexPosition to bind it to 0!
# TODO - might need to do something better here but it stops attrib 0 being blank which is good
gl.bindAttribLocation(@shaderProgram, 0, "aVertexPosition")
gl.linkProgram(@shaderProgram)
gl.validateProgram(@shaderProgram)
success = gl.getProgramParameter(@shaderProgram, gl.LINK_STATUS)
if not success
PXLWarning gl.getProgramInfoLog @shaderProgram
PXLError "Failed to Link Shader"
WebGLActiveInfo
# We grab the attributes here and we set their positions to these
# in the contract - order dependent
attrs = @_getAttributes()
for attr in attrs
gl.bindAttribLocation @shaderProgram, attr.pos, attr.name
@
_printLog : (shader, source, kind) ->
compilationLog = PXL.Context.gl.getShaderInfoLog shader
PXLLog 'Shader compiler log: ' + compilationLog
tsf = ""
ln = 1
for l in source.split "\n"
tsf += ln + ": " + l + "\n"
ln++
PXLLog tsf
@_splitError compilationLog, source
PXLError "Could not compile " + kind + " shader"
_addToNode : (node) ->
node.shader = @
@
_splitError : (s, data) ->
lines = s.split('\n')
for line in lines
match = line.match(/ERROR: (\d+):(\d+): (.*)/)
if match
fileno = parseInt(match[1],10)-1
lineno = parseInt(match[2],10)-1
message = match[3]
datalines = data.split('\n')
PXLLog "Shader Error Log: " + fileno + ", " + lineno + ", " + message + "," + datalines[lineno]
@
# _getLocation: Return the location of a variable inside a compiled shader
_getLocation : (name) ->
PXL.Context.gl.getUniformLocation(@shaderProgram, name)
# **bind** - bind the shader to the current context
# - returns this
bind: ->
PXL.Context.gl.useProgram(@shaderProgram);
PXL.Context.shader = @
@
# **unbind** - Clear the current context so there are no shaders
# - returns this
unbind: ->
PXL.Context.gl.useProgram(null)
@
# **washUp** - Remove this shader and destroy it
# - returns this
washUp : ->
gl = PXL.Context.gl
gl.detachShader(@shaderProgram, @vertexShader)
gl.detachShader(@shaderProgram, @fragmentShader)
gl.deleteProgram(@shaderProgram)
gl.deleteShader(@vertexShader)
gl.deleteShader(@fragmentShader)
@
_getAttributes : () ->
# Cache attributes as they hopefully wont change!
if not @attributes?
gl = PXL.Context.gl
num_attributes = gl.getProgramParameter @shaderProgram, GL.ACTIVE_ATTRIBUTES
@attributes = []
# we set and use the positions here. aVertexPosition must always be zero
for i in [0..num_attributes-1]
a = gl.getActiveAttrib @shaderProgram, i
attribute =
name : a.name
pos : gl.getAttribLocation(@shaderProgram, a.name)
type : a.type # TODO - test this
size : a.size # Pretty much always 1 it seems for attributes
@attributes.push attribute
@attributes
# **_getUniforms** - Find all the active uniforms in a shader
# - returns an Array of objects
# { name, type, pos, size }
# Types are listed thusly and will need to be changed
# Uniform Types
# - const GLenum FLOAT_VEC2 = 0x8B50;
# - const GLenum FLOAT_VEC3 = 0x8B51;
# - const GLenum FLOAT_VEC4 = 0x8B52;
# - const GLenum INT_VEC2 = 0x8B53;
# - const GLenum INT_VEC3 = 0x8B54;
# - const GLenum INT_VEC4 = 0x8B55;
# - const GLenum BOOL = 0x8B56;
# - const GLenum BOOL_VEC2 = 0x8B57;
# - const GLenum BOOL_VEC3 = 0x8B58;
# - const GLenum BOOL_VEC4 = 0x8B59;
# - const GLenum FLOAT_MAT2 = 0x8B5A;
# - const GLenum FLOAT_MAT3 = 0x8B5B;
# - const GLenum FLOAT_MAT4 = 0x8B5C;
# - const GLenum SAMPLER_2D = 0x8B5E;
# - const GLenum SAMPLER_CUBE = 0x8B60;
_getUniforms : () ->
# Cache uniforms as they hopefully wont change!
if not @uniforms?
gl = PXL.Context.gl
num_uniforms = gl.getProgramParameter @shaderProgram, GL.ACTIVE_UNIFORMS
@uniforms = []
for i in [0..num_uniforms-1]
u = gl.getActiveUniform @shaderProgram, i
# It appears that active uniform arrays seem to have their name with '[0]' so remove
# TODO - must check to make sure this is the actual behaviour
tn = u.name
i = tn.indexOf("[")
if i > 0
tn = u.name.slice(0,i)
uniform =
name : tn
pos : @._getLocation u.name
type : u.type # TODO - test this
size : u.size # TODO - test this
@uniforms.push uniform
@uniforms
# _getTextures - find all the texture in a shader
# TODO - other samplers? Texture Cube for example?
_getTextures : () ->
d = @._parseShader("uniform")
x = []
for a in d
if a.type == "sampler2D"
p = @._getLocation(a.name)
if p? and p != -1
a.pos = p
x.push(a)
x
# _parseShader - Given a token, a line basically, parse the line and get the types (vec3 etc)
_parseShader : (token) ->
data = []
lines = @sv.split(";").concat(@sf.split(";"))
for l in lines
re = RegExp("\\b" + token + "\\b")
if l.match(re)?
tokens = l.split(" ")
finals = []
for t in tokens
t = t.replace /\n/, ""
t = t.replace /\s/, ""
if t.length != 0
finals.push t
finals.push 1
matches = finals[2].match(/\[([0-9]+)\]/)
if matches?
finals[3] = matches[1]
finals[2] = finals[2].match(/([a-zA-Z]+)/g)[0]
if finals.length == 4
attr = {}
attr.name = finals[2]
attr.type = finals[1]
attr.pos = -1
attr.size = finals[3]
data.push attr
data
# **setUniform1f** - Given a uniform name and one float, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - returns this
setUniform1f: (name,a) ->
gl = PXL.Context.gl
gl.uniform1f(@_getLocation(name),a)
@
# **setUniform1i** - Given a uniform name and one integer, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Integer - Required
# - returns this
setUniform1i: (name,a) ->
gl = PXL.Context.gl
gl.uniform1i(@_getLocation(name),a)
@
# **setUniform1fv** - Given a uniform name and an array of floats, not grouped, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform1fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform1fv(@_getLocation(name), a)
@
# **setUniform2fv** - Given a uniform name and an array of floats, grouped in pairs, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform2fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform2fv(@_getLocation(name), a)
@
# **setUniform2v** - Given a uniform name and a Vec2, set this uniform
# - **name** - a String - Required
# - **v** - a Vec2 - Required
# - returns this
setUniform2v: (name,v) ->
gl = PXL.Context.gl
gl.uniform2f(@_getLocation(name),v.x,v.y)
@
# **setUniform3f** - Given a uniform name and three floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform3fv** - Given a uniform name and an array of floats, grouped in threes, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform3fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform3fv(@_getLocation(name), a)
@
# **setUniform3v** - Given a uniform name and a Vec3, set this uniform
# - **name** - a String - Required
# - **v** - a Vec3 - Required
# - returns this
setUniform3v: (name,v) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),v.x,v.y,v.z)
@
# **setUniform3f** - Given 3 floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - returns this
setUniform3f: (name,a,b,c) ->
gl = PXL.Context.gl
gl.uniform3f(@_getLocation(name),a,b,c)
@
# **setUniform4f** - Given a uniform name and four floats, set this uniform
# - **name** - a String - Required
# - **a** - a Number - Required
# - **b** - a Number - Required
# - **c** - a Number - Required
# - **d** - a Number - Required
# - returns this
setUniform4f: (name,a,b,c,d) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),a,b,c,d)
@
# **setUniform4v** - Given a uniform name and a Vec4, set this uniform
# - **name** - a String - Required
# - **v** - a Vec4 - Required
# - returns this
setUniform4v: (name,v) ->
gl = PXL.Context.gl
gl.uniform4f(@_getLocation(name),v.x,v.y,v.z,v.w)
@
# **setUniform4fv** - Given a uniform name and an array of floats, grouped in fours, set this uniform
# - **name** - a String - Required
# - **a** - an Array of Number - Required
# - returns this
setUniform4fv : (name, a) ->
gl = PXL.Context.gl
gl.uniform4fv(@_getLocation(name), a)
@
# setMatrix4f - Given a uniform name and a matrix3, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix3 - Required
# - returns this
setMatrix3f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix3fv(@_getLocation(name), false, m.a)
@
# setMatrix4f - Given a uniform name and a matrix4, set this uniform
# - **name** - a String - Required
# - **m** - a Matrix4 - Required
# - returns this
setMatrix4f: (name, m) ->
gl = PXL.Context.gl
gl.uniformMatrix4fv(@_getLocation(name), false, m.a)
@
# **enableAttribArray** - Enable an attribute array by name
# - **name** - a String - Required
# - returns this
enableAttribArray: (name) ->
gl = PXL.Context.gl
position = gl.getAttribLocation(@shaderProgram, name)
gl.enableVertexAttribArray(position)
@
# getAttribLocation - Get the location of an attribute
# - **name** - a String - Required
# - returns a Number
getAttribLocation: (name) ->
gl = PXL.Context.gl
gl.getAttribLocation(@shaderProgram, name)
# ## shaderFromText
# Create a shader from a block of text and an optional contract
# - **text** - A String - Required
# - **user_roles** - An Object with named attributes mapping to Strings
# - returns a Shader
shaderFromText = ( text, user_roles ) ->
_splitShader = (s) ->
sv = sf = ""
pv = s.indexOf("##>VERTEX")
pf = s.indexOf("##>FRAGMENT")
if pv != -1
if (pf != -1 && pf > pv)
sv = s.slice(pv + 9, pf)
else if (pf != -1 && pf < pv)
sv = s.slice(pv + 9)
if pf != -1
if pv != -1 && pv > pf
sf = s.slice(pf + 11, pv)
else if pf != -1 && pv < pf
sf = s.slice(pf + 11)
return [sv,sf]
_javascriptize = (shader_txt,var_name) ->
shader_js = "var " + var_name + "=" + "\n"
lines = shader_txt.split("\n")
for lidx in [0..lines.length-1]
newline = lines[lidx]
newline = newline.replace("\n","")
newline = newline.replace("\r","")
shader_js = shader_js + '\"' + newline + '\\n"'
if (lidx + 1) < lines.length
shader_js = shader_js + ' +\n'
shader_js = shader_js + ";"
shader_js
# We assume that chunks are in a subdir relative to the glsl files called 'chunks'
# TODO - need someway to check it against a list of chunks that could already be in memory maybe?
parts =_splitShader(text);
shader_vertex = parts[0];
shader_fragment = parts[1];
#shader_vertex = _javascriptize(shader_vertex, "shader_vertex");
#shader_fragment = _javascriptize(shader_fragment, "shader_fragment");
new Shader(shader_vertex, shader_fragment, user_roles)
module.exports =
Shader : Shader
shaderFromText : shaderFromText
|
[
{
"context": " binary encoding with a custom https://github.com/deanlandolt/bytewise layer; the current\n Jizura DB version",
"end": 3606,
"score": 0.9996566772460938,
"start": 3595,
"tag": "USERNAME",
"value": "deanlandolt"
},
{
"context": ", ok, rank, sk, glyph, ] = key\n sub_key = \"so|glyph:#{glyph}|pod:\"\n return db[ '%self' ].createV",
"end": 4144,
"score": 0.9007153511047363,
"start": 4134,
"tag": "KEY",
"value": "so|glyph:#"
},
{
"context": "glyph, ] = key\n sub_key = \"so|glyph:#{glyph}|pod:\"\n return db[ '%self' ].createValueStream { ",
"end": 4157,
"score": 0.8203828930854797,
"start": 4152,
"tag": "KEY",
"value": "pod:\""
}
] | src/demo.coffee | loveencounterflow/hollerith-legacy | 0 |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/demo'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
step = ( require 'coffeenode-suspend' ).step
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
ASYNC = require 'async'
CHR = require 'coffeenode-chr'
KWIC = require 'kwic'
#...........................................................................................................
new_db = require 'level'
HOLLERITH = require './main'
ƒ = CND.format_number.bind CND
#...........................................................................................................
options = null
#-----------------------------------------------------------------------------------------------------------
@_misfit = Symbol 'misfit'
#===========================================================================================================
# PIPEDREAMS
#-----------------------------------------------------------------------------------------------------------
D.new_indexer = ( idx = 0 ) -> ( data ) => [ idx++, data, ]
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@initialize = ( handler ) ->
options[ 'db' ] = HOLLERITH.new_db options[ 'route' ]
handler null
#-----------------------------------------------------------------------------------------------------------
@main = ( first_query ) ->
first_query ?= { gte: 'os|rank/cjt:0', lte: 'os|rank/cjt:9', }
step ( resume ) =>
yield @initialize resume
db = options[ 'db' ]
count_chrs = ( text ) -> ( CHR.chrs_from_text text, input: 'xncr' ).length
#.......................................................................................................
input = db[ '%self' ].createKeyStream first_query
# k = "so|glyph:繼|pod:"
# input = db[ '%self' ].createKeyStream db, { gte: k, lte: k + '\uffff' }
# debug '©cW8tK', HOLLERITH.new_key db, 'os', 'rank/cjt', '00000'
#.......................................................................................................
### TAINT We can currently not use `HOLLERITH2.read_sub` because HOLLERITH2 assumes a key-only
DB that uses binary encoding with a custom https://github.com/deanlandolt/bytewise layer; the current
Jizura DB version uses UTF-8 strings and is a key/value DB. ###
#.......................................................................................................
input
.pipe @_$split_bkey()
#.....................................................................................................
# .pipe HOLLERITH.read_sub db, indexed: yes, ( key ) =>
.pipe @read_sub db, indexed: yes, ( key ) =>
[ pt, ok, rank, sk, glyph, ] = key
sub_key = "so|glyph:#{glyph}|pod:"
return db[ '%self' ].createValueStream { gte: sub_key, lte: sub_key + '\uffff' }
#.....................................................................................................
.pipe D.$densort 0, 0, true
#.....................................................................................................
.pipe $ ( [ idx, [ pod, ], ], send ) =>
debug '©jd5cE', pod
unless pod[ 'strokeorder/short' ]?
warn '©9YXoq', pod
else
glyph = pod[ 'glyph/uchr' ]
strokeorder = pod[ 'strokeorder/short' ][ 0 ].length
lineup = pod[ 'guide/lineup/uchr' ].replace /\u3000/g, ''
send [ glyph, strokeorder, lineup, ]
#.....................................................................................................
.pipe $ ( [ glyph, strokeorder, lineup, ], send ) =>
send [ glyph, strokeorder, count_chrs lineup, ]
#.....................................................................................................
.pipe D.$sort ( a, b ) ->
idx = 1
return +1 if a[ idx ] > b[ idx ]
return -1 if a[ idx ] < b[ idx ]
return 0
#.....................................................................................................
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@_$split_bkey = -> $ ( bkey, send ) => send @_split_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = ( R.split '|' )[ .. 2 ]
R = [ R[ 0 ], ( R[ 1 ].split ':' )..., ( R[ 2 ].split ':' )..., ]
return R
#-----------------------------------------------------------------------------------------------------------
@_$split_so_bkey = -> $ ( bkey, send ) => send @_split_so_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_so_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = R.split '|'
idx_txt = R[ 3 ]
R = [ ( R[ 1 ].split ':' )[ 1 ], ( R[ 2 ].split ':' )..., ]
R.push ( parseInt idx_txt, 10 ) if idx_txt? and idx_txt.length > 0
return R
#-----------------------------------------------------------------------------------------------------------
@_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#-----------------------------------------------------------------------------------------------------------
@$lineup_from_glyph = ( db ) ->
settings =
indexed: no
single: yes
return @read_sub db, settings, ( glyph ) =>
lte = "so|glyph:#{glyph}|guide/lineup/uchr:"
sub_input = db[ '%self' ].createKeyStream { gte: lte, lte: @_lte_from_gte lte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
@$shapeclass_wbf_from_glyph_and_lineup = ( db ) ->
### TAINT wrong ###
settings =
indexed: no
single: yes
return @read_sub db, settings, ( [ glyph, lineup_glyphs, ] ) =>
for lineup_glyph in lineup_glyphs
do ( lineup_glyph ) =>
gte = "so|glyph:#{lineup_glyph}|factor/strokeclass/wbf:"
sub_input = db[ '%self' ].createKeyStream { gte: gte, lte: @_lte_from_gte gte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_subject = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v0 else v1
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_object = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v1 else v0
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_values = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then [ v0, v1, ] else [ v1, v0, ]
#-----------------------------------------------------------------------------------------------------------
@dump_jizura_db = ->
source_db = HOLLERITH.new_db '/Volumes/Storage/temp/jizura-hollerith2'
prefix = [ 'spo', '𡏠', ]
prefix = [ 'spo', '㔰', ]
input = HOLLERITH.create_phrasestream source_db, prefix
#.........................................................................................................
input
.pipe D.$count ( count ) -> help "read #{count} keys"
.pipe $ ( data, send ) => send JSON.stringify data
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@find_good_kwic_sample_glyphs_3 = ( db ) ->
### version for Hollerith2 DBs; using `HOLLERITH.remit_async` instead of `HOLLERITH.read_sub`. ###
###
* ▶ '[["勷",5,9907,["亠","吅","𠀎","𧘇","力"]],"41","25","11","35","53"]'
* ▶ '[["噿",5,13090,["口","羽","亠","从","十"]],"25","54","41","34","12"]'
* ▶ '[["塾",5,3818,["亠","口","子","丸","土"]],"41","25","51","35","12"]'
* ▶ '[["墩",5,5457,["土","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["孃",5,7225,["女","亠","吅","𠀎","𧘇"]],"53","41","25","11","35"]'
* ▶ '[["寡",5,3412,["宀","丆","且","八","刀"]],"44","13","25","34","53"]'
* ▶ '[["巕",5,13586,["山","卄","𠂤","辛","女"]],"25","12","32","41","53"]'
* ▶ '[["橔",5,13883,["木","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["灂",5,12349,["氵","爫","罒","","寸"]],"44","34","25","51","12"]'
* ▶ '[["纏",5,3421,["糹","广","里","八","土"]],"55","41","25","34","12"]'
* ▶ '[["纕",5,8882,["糹","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
* ▶ '[["鄸",5,8392,["卄","罒","冖","夕","阝"]],"12","25","45","35","52"]'
* ▶ '[["韽",5,10377,["亽","𠃌","酉","立","日"]],"34","5","12","41","25"]'
* ▶ '[["頀",5,8385,["立","日","卄","隹","又"]],"41","25","12","32","54"]'
* ▶ '[["驐",5,12644,["馬","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["骧",5,6010,["马","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
###
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
#.......................................................................................................
CHR = require join __dirname, '../../coffeenode-chr'
chrs_from_text = ( text ) -> CHR.chrs_from_text text, input: 'xncr'
#.......................................................................................................
prefix = [ 'pos', 'guide/lineup/length', 5, ]
query = { prefix, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
decode_lineup = ( lineup ) =>
return chrs_from_text lineup.replace /\u3000/g, ''
#.......................................................................................................
xncr_from_uchr = ( uchr ) =>
return if ( CHR.as_rsg uchr ) is 'u-pua' then ( CHR.as_xncr uchr, csg: 'jzr' ) else uchr
#.......................................................................................................
input
#.....................................................................................................
.pipe $async ( phrase, done ) =>
[ _, _, lineup_length, glyph, ] = phrase
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_fallback = [ null, null, null, Infinity, ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
debug '©zfQhm', phrase, sub_prefix if glyph is '公'
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
# debug '©FST09', sub_phrase unless sub_phrase[ sub_phrase.length - 1 ] is Infinity
[ _, _, _, rank, ] = sub_phrase
done [ glyph, { lineup_length, rank, }, ]
#.....................................................................................................
# .pipe D.$show()
.pipe D.$filter ( [ glyph, { lineup_length, rank, }, ] ) -> rank < 15000
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, }, ] = entry
sub_prefix = [ 'spo', glyph, 'guide/lineup/uchr', ]
sub_query = { prefix: sub_prefix, star: '*', fallback: null, }
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
# debug '©h4GY2', sub_phrase
return done.error error if error?
return done() unless sub_phrase?
[ _, _, _, guides, ] = sub_phrase
guides = decode_lineup guides
done [ glyph, { lineup_length, rank, guides, }, ]
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, guides, }, ] = entry
tasks = []
#...................................................................................................
for guide in guides
do ( guide ) =>
guide_xncr = xncr_from_uchr guide
sub_prefix = [ 'spo', guide_xncr, 'factor/shapeclass/wbf', ]
sub_fallback = [ null, null, null, 'X', ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
tasks.push ( handler ) -> HOLLERITH.read_one_phrase db, sub_query, handler
#...................................................................................................
ASYNC.parallelLimit tasks, 10, ( error, sub_phrases ) =>
return done.error error if error?
strokeclasses = []
for sub_phrase, sub_idx in sub_phrases
[ _, _, _, strokeorder, ] = sub_phrase
strokeclasses[ sub_idx ] = strokeorder[ 0 ]
done [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
.pipe D.$filter ( entry ) =>
[ glyph, { lineup_length, rank, guides, strokeclasses, }, ] = entry
return ( strokeclasses[ .. ].sort().join '' ) is '12345'
#.....................................................................................................
.pipe $ ( [ glyph, { lineup_length, rank, guides, strokeclasses, }, ], send ) ->
guides = guides.join ''
strokeclasses = strokeclasses.join ''
send [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
# .pipe D.$filter ( entry ) => entry[ 1 ][ 'strokeclasses' ] is '12345'
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@read_factors = ( db, handler ) ->
#.........................................................................................................
step ( resume ) =>
Z = {}
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
prefix = [ 'pos', 'factor/', ]
query = { prefix, star: '*', }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe do =>
last_sbj = null
target = null
#...................................................................................................
return $ ( phrase, send, end ) =>
#.................................................................................................
if phrase?
[ _, prd, obj, sbj, ] = phrase
prd = prd.replace /^factor\//g, ''
sbj = CHR.as_uchr sbj, input: 'xncr'
if sbj isnt last_sbj
send target if target?
target = Z[ sbj ]?= { glyph: sbj, }
last_sbj = sbj
target[ prd ] = obj
Z[ obj ] = target if prd is 'sortcode'
#.................................................................................................
if end?
send target if target?
end()
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@read_sample = ( db, limit_or_list, handler ) ->
### Return a gamut of select glyphs from the DB. `limit_or_list` may be a list of glyphs or a number
representing an upper bound to the usage rank recorded as `rank/cjt`. If `limit_or_list` is a list,
a POD whose keys are the glyphs in the list is returned; if it is a number, a similar POD with all the
glyphs whose rank is not worse than the given limit is returned. If `limit_or_list` is smaller than zero
or equals infinity, `null` is returned to indicate absence of a filter. ###
Z = {}
#.......................................................................................................
if CND.isa_list limit_or_list
Z[ glyph ] = 1 for glyph in limit_or_list
return handler null, Z
#.......................................................................................................
return handler null, null if limit_or_list < 0 or limit_or_list is Infinity
#.......................................................................................................
throw new Error "expected list or number, got a #{type}" unless CND.isa_number limit_or_list
#.......................................................................................................
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
lo = [ 'pos', 'rank/cjt', 0, ]
hi = [ 'pos', 'rank/cjt', limit_or_list, ]
query = { lo, hi, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe $ ( phrase, send ) =>
[ _, _, _, glyph, ] = phrase
Z[ glyph ] = 1
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@show_kwic_v2_and_v3_sample = ( db ) ->
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
factor_infos = yield @read_factors db, resume
# debug '©g5bVR', factors; process.exit()
help "read #{( Object.keys factor_infos ).length} entries for factor_infos"
ranks = {}
include = Infinity
include = 15000
include = 50
include = 5000
# include = [ '寿', '邦', '帮', '畴', '铸', '筹', '涛', '祷', '绑', '綁', ]
# include = Array.from '未釐犛剺味昧眛魅鮇沬妹業寐鄴澲末抹茉枺沫袜妺'
# 'guide/hierarchy/uchr'
#.........................................................................................................
sample = yield @read_sample db, include, resume
#.........................................................................................................
$reorder_phrase = =>
return $ ( phrase, send ) =>
### extract sortcode ###
[ _, _, sortcode, glyph, _, ] = phrase
send [ glyph, sortcode, ]
#.........................................................................................................
$exclude_gaiji = =>
return D.$filter ( [ glyph, sortcode ] ) =>
return ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$include_sample = =>
return D.$filter ( [ glyph, sortcode ] ) => if sample? then ( glyph of sample ) else true
#.........................................................................................................
$extract_lineup = =>
return $ ( [ glyph, sortcode ], send ) =>
[ _, lineup, ] = sortcode.split ';'
[ infix, suffix, prefix, ] = lineup.split ','
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, ]
#.........................................................................................................
$format_sortcode_v3 = =>
return $ ( [ glyph, sortcode_plus, ], send ) =>
[ sortcode, infix, suffix, prefix, ] = sortcode_plus
prefix.unshift '\u3000' until prefix.length >= 6
suffix.push '\u3000' until suffix.length >= 6
prefix = prefix.join ''
suffix = suffix.join ''
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, sortcode, ]
#.........................................................................................................
$unpack = =>
return $ ( [ [ v1, v2, ], v3, ], send ) =>
send [ v1, v2, v3, ]
#.........................................................................................................
$transform_v1_v2 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$extract_lineup()
]
#.........................................................................................................
$transform_v3 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$format_sortcode_v3()
]
#.........................................................................................................
query_v1 = { prefix: [ 'pos', 'guide/kwic/v1/sortcode', ], }
query_v2 = { prefix: [ 'pos', 'guide/kwic/v2/sortcode', ], }
query_v3 = { prefix: [ 'pos', 'guide/kwic/v3/sortcode', ], }
input_v1 = ( HOLLERITH.create_phrasestream db, query_v1 ).pipe $transform_v1_v2()
input_v2 = ( HOLLERITH.create_phrasestream db, query_v2 ).pipe $transform_v1_v2()
input_v3 = ( HOLLERITH.create_phrasestream db, query_v3 ).pipe $transform_v3()
# .pipe D.$observe ( [ glyph, lineup, ] ) -> help glyph, lineup if glyph is '畴'
#.........................................................................................................
input_v1
.pipe D.$lockstep input_v2, fallback: [ null, null, ]
.pipe D.$lockstep input_v3
.pipe $unpack()
#.....................................................................................................
.pipe do =>
last_guide = null
return $ ( [ v1, v2, v3, ], send ) =>
[ glyph_v1
lineup_v1 ] = v1
this_guide = ( Array.from lineup_v1 )[ 7 ]
return send [ v1, v2, v3, ] if this_guide is last_guide
last_guide = this_guide
linup = " |#{this_guide} "
sortcode_v3 = v3[ 2 ]
send [ [ this_guide, linup, ], [ this_guide, linup, ], [ this_guide, linup, sortcode_v3, ], ]
send [ v1, v2, v3, ]
#.....................................................................................................
.pipe do =>
count = 0
wspc = '\u3000'
nspc = '\u3000'
# style = 'A'
# style = 'B'
style = 'C'
switch style
when 'A'
vsep = '◉'
include_v1 = no
include_sortcode = no
for_mkts = no
ldiff_0 = ' '
ldiff_1 = '<'
rdiff_0 = ' '
rdiff_1 = '>'
when 'B'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
when 'C'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
else throw new Error "unknown style #{rpr style}"
#...................................................................................................
return D.$observe ( [ v1, v2, v3, ] ) =>
[ glyph_v1, lineup_v1, ] = v1
[ glyph_v2, lineup_v2, ] = v2
[ glyph_v3, lineup_v3, sortcode_v3, ] = v3
#.................................................................................................
if include_sortcode
( sortcode_v3[ idx ] = '________' if code is null ) for code, idx in sortcode_v3
sortcode_v3 = sortcode_v3.join ' '
#.................................................................................................
diff = []
diff_v1 = if glyph_v1 is glyph_v2 then ldiff_0 else ldiff_1
diff_v2 = if glyph_v2 is glyph_v3 then rdiff_0 else rdiff_1
# debug '©28420', rpr lineup_v3
#.................................................................................................
if include_v1
line = lineup_v1 + nspc + glyph_v1
line += nspc + diff_v1 + vsep + nspc + lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
else
line = lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
if for_mkts
line += '<<< >>>'
line += sortcode_v3 if include_sortcode
#.................................................................................................
else
line += spc + spc + spc
#.................................................................................................
count += 1
help ƒ count if count % 500 is 0
echo line
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_codepoints_with_missing_predicates = ( v2_db, prd = 'guide/kwic/v1/lineup' ) ->
#.........................................................................................................
home = join __dirname, '../../jizura-datasources'
v1_route = join home, 'data/leveldb'
v2_route = join home, 'data/leveldb-v2'
v1_db = HOLLERITH.new_db v1_route, create: no
v2_db ?= HOLLERITH.new_db v2_route, create: no
help "using DB at #{v1_db[ '%self' ][ 'location' ]}"
help "using DB at #{v2_db[ '%self' ][ 'location' ]}"
rank_limit = Infinity
rank_limit = 100
#.........................................................................................................
$extract_glyph = => $ ( xpos, send ) => send xpos[ 3 ]
$exclude_gaiji = => D.$filter ( glyph ) => ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$show_progress = =>
count = 0
return D.$observe =>
count += +1
echo ƒ count if count % 10000 is 0
#.........................................................................................................
$show = => D.$observe ( [ glyph, keys, ] ) =>
echo ( CHR.as_fncr glyph, input: 'xncr' ), CHR.as_uchr glyph, input: 'xncr'
for key in keys
echo ' ' + key
#.........................................................................................................
$exclude_rare_glyphs = =>
### TAINT code duplication; factor out ###
ranks = {}
return $async ( glyph, done ) =>
### filter out 'uncommon' glyphs (whose rank exceeds rank limit) ###
# debug '©72bFK', glyph, rank if ( rank = ranks[ glyph ] )?
return done glyph if rank_limit < 0 or rank_limit is Infinity
return done glyph if ( rank = ranks[ glyph ] )? and rank < rank_limit
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
if sub_phrase is null
ranks[ glyph ] = Infinity
return done()
[ _, _, _, rank, ] = sub_phrase
ranks[ glyph ] = rank
return done() unless rank < rank_limit
done glyph
#.........................................................................................................
$test_for_predicate = ( prd ) =>
return $async ( glyph, done ) =>
sub_prefix = [ 'spo', glyph, prd, ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
return done glyph if sub_phrase is null
done()
#.........................................................................................................
v1_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#.........................................................................................................
$fetch_v1_entries = =>
return $async ( glyph, done ) =>
gte = "so|glyph:#{glyph}"
lte = v1_lte_from_gte gte
sub_input = v1_db[ '%self' ].createKeyStream { gte, lte, }
Z = []
# sub_input.on 'data', ( data ) -> debug '©9Wqdh', data
sub_input
.pipe $ ( key, send, end ) =>
if key?
Z.push key.toString 'utf-8'
if end?
end()
done [ glyph, Z, ]
#.........................................................................................................
# prefix = { prefix: [ 'pos', 'cp/cid', ], }
prefix = { prefix: [ 'pos', 'cp/inner/original', ], }
input = HOLLERITH.create_phrasestream v2_db, prefix
#.........................................................................................................
input
.pipe $extract_glyph()
.pipe $exclude_gaiji()
# .pipe $show_progress()
# .pipe $exclude_rare_glyphs()
.pipe $test_for_predicate prd
.pipe $fetch_v1_entries()
.pipe D.$show()
.pipe $show()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_encoding_sample = ->
encoding = HOLLERITH.CODEC.encodings[ 'dbcs2' ]
encoding = HOLLERITH.CODEC.encodings[ 'aleph' ]
encoding = HOLLERITH.CODEC.encodings[ 'rdctn' ]
phrases = [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
for [ sbj, prd, obj, ] in phrases
key = ( HOLLERITH.CODEC.encode [ sbj, prd, ], )
value = ( new Buffer JSON.stringify obj )
key_rpr = ( encoding[ key[ idx ] ] for idx in [ 0 ... key.length ] ).join ''
value_rpr = ( encoding[ value[ idx ] ] for idx in [ 0 ... value.length ] ).join ''
urge key_rpr, '┊', value_rpr
b = new Buffer '一x丁x丂'
# text = new Buffer '一'
# text_rpr =
# help b, text_rpr
help HOLLERITH.CODEC.rpr_of_buffer HOLLERITH.CODEC.encode [ true, -1 / 7, ]
# chrs = []
# for cid in [ 0 .. 255 ]
# chrs.push String.fromCodePoint cid
# chrs.push '\n' if cid > 0 and cid % 32 is 0
# debug '©ZgY4D', chrs
# help chrs.join ''
# urge ( String.fromCodePoint cid for cid in [ 0x2400 .. 0x2426 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0x24b6 .. 0x24e9 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0xff01 .. 0xff60 ] ).join ''
############################################################################################################
unless module.parent?
#---------------------------------------------------------------------------------------------------------
options =
#.......................................................................................................
# 'route': njs_path.join __dirname, '../dbs/demo'
'route': njs_path.resolve __dirname, '../../jizura-datasources/data/leveldb-v2'
# 'route': '/tmp/leveldb'
#---------------------------------------------------------------------------------------------------------
debug '©AoOAS', options
# @find_good_kwic_sample_glyphs_3()
@show_kwic_v2_and_v3_sample()
# @show_codepoints_with_missing_predicates()
# @show_encoding_sample()
# @compile_encodings()
| 184156 |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/demo'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
step = ( require 'coffeenode-suspend' ).step
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
ASYNC = require 'async'
CHR = require 'coffeenode-chr'
KWIC = require 'kwic'
#...........................................................................................................
new_db = require 'level'
HOLLERITH = require './main'
ƒ = CND.format_number.bind CND
#...........................................................................................................
options = null
#-----------------------------------------------------------------------------------------------------------
@_misfit = Symbol 'misfit'
#===========================================================================================================
# PIPEDREAMS
#-----------------------------------------------------------------------------------------------------------
D.new_indexer = ( idx = 0 ) -> ( data ) => [ idx++, data, ]
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@initialize = ( handler ) ->
options[ 'db' ] = HOLLERITH.new_db options[ 'route' ]
handler null
#-----------------------------------------------------------------------------------------------------------
@main = ( first_query ) ->
first_query ?= { gte: 'os|rank/cjt:0', lte: 'os|rank/cjt:9', }
step ( resume ) =>
yield @initialize resume
db = options[ 'db' ]
count_chrs = ( text ) -> ( CHR.chrs_from_text text, input: 'xncr' ).length
#.......................................................................................................
input = db[ '%self' ].createKeyStream first_query
# k = "so|glyph:繼|pod:"
# input = db[ '%self' ].createKeyStream db, { gte: k, lte: k + '\uffff' }
# debug '©cW8tK', HOLLERITH.new_key db, 'os', 'rank/cjt', '00000'
#.......................................................................................................
### TAINT We can currently not use `HOLLERITH2.read_sub` because HOLLERITH2 assumes a key-only
DB that uses binary encoding with a custom https://github.com/deanlandolt/bytewise layer; the current
Jizura DB version uses UTF-8 strings and is a key/value DB. ###
#.......................................................................................................
input
.pipe @_$split_bkey()
#.....................................................................................................
# .pipe HOLLERITH.read_sub db, indexed: yes, ( key ) =>
.pipe @read_sub db, indexed: yes, ( key ) =>
[ pt, ok, rank, sk, glyph, ] = key
sub_key = "<KEY>{glyph}|<KEY>
return db[ '%self' ].createValueStream { gte: sub_key, lte: sub_key + '\uffff' }
#.....................................................................................................
.pipe D.$densort 0, 0, true
#.....................................................................................................
.pipe $ ( [ idx, [ pod, ], ], send ) =>
debug '©jd5cE', pod
unless pod[ 'strokeorder/short' ]?
warn '©9YXoq', pod
else
glyph = pod[ 'glyph/uchr' ]
strokeorder = pod[ 'strokeorder/short' ][ 0 ].length
lineup = pod[ 'guide/lineup/uchr' ].replace /\u3000/g, ''
send [ glyph, strokeorder, lineup, ]
#.....................................................................................................
.pipe $ ( [ glyph, strokeorder, lineup, ], send ) =>
send [ glyph, strokeorder, count_chrs lineup, ]
#.....................................................................................................
.pipe D.$sort ( a, b ) ->
idx = 1
return +1 if a[ idx ] > b[ idx ]
return -1 if a[ idx ] < b[ idx ]
return 0
#.....................................................................................................
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@_$split_bkey = -> $ ( bkey, send ) => send @_split_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = ( R.split '|' )[ .. 2 ]
R = [ R[ 0 ], ( R[ 1 ].split ':' )..., ( R[ 2 ].split ':' )..., ]
return R
#-----------------------------------------------------------------------------------------------------------
@_$split_so_bkey = -> $ ( bkey, send ) => send @_split_so_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_so_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = R.split '|'
idx_txt = R[ 3 ]
R = [ ( R[ 1 ].split ':' )[ 1 ], ( R[ 2 ].split ':' )..., ]
R.push ( parseInt idx_txt, 10 ) if idx_txt? and idx_txt.length > 0
return R
#-----------------------------------------------------------------------------------------------------------
@_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#-----------------------------------------------------------------------------------------------------------
@$lineup_from_glyph = ( db ) ->
settings =
indexed: no
single: yes
return @read_sub db, settings, ( glyph ) =>
lte = "so|glyph:#{glyph}|guide/lineup/uchr:"
sub_input = db[ '%self' ].createKeyStream { gte: lte, lte: @_lte_from_gte lte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
@$shapeclass_wbf_from_glyph_and_lineup = ( db ) ->
### TAINT wrong ###
settings =
indexed: no
single: yes
return @read_sub db, settings, ( [ glyph, lineup_glyphs, ] ) =>
for lineup_glyph in lineup_glyphs
do ( lineup_glyph ) =>
gte = "so|glyph:#{lineup_glyph}|factor/strokeclass/wbf:"
sub_input = db[ '%self' ].createKeyStream { gte: gte, lte: @_lte_from_gte gte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_subject = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v0 else v1
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_object = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v1 else v0
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_values = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then [ v0, v1, ] else [ v1, v0, ]
#-----------------------------------------------------------------------------------------------------------
@dump_jizura_db = ->
source_db = HOLLERITH.new_db '/Volumes/Storage/temp/jizura-hollerith2'
prefix = [ 'spo', '𡏠', ]
prefix = [ 'spo', '㔰', ]
input = HOLLERITH.create_phrasestream source_db, prefix
#.........................................................................................................
input
.pipe D.$count ( count ) -> help "read #{count} keys"
.pipe $ ( data, send ) => send JSON.stringify data
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@find_good_kwic_sample_glyphs_3 = ( db ) ->
### version for Hollerith2 DBs; using `HOLLERITH.remit_async` instead of `HOLLERITH.read_sub`. ###
###
* ▶ '[["勷",5,9907,["亠","吅","𠀎","𧘇","力"]],"41","25","11","35","53"]'
* ▶ '[["噿",5,13090,["口","羽","亠","从","十"]],"25","54","41","34","12"]'
* ▶ '[["塾",5,3818,["亠","口","子","丸","土"]],"41","25","51","35","12"]'
* ▶ '[["墩",5,5457,["土","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["孃",5,7225,["女","亠","吅","𠀎","𧘇"]],"53","41","25","11","35"]'
* ▶ '[["寡",5,3412,["宀","丆","且","八","刀"]],"44","13","25","34","53"]'
* ▶ '[["巕",5,13586,["山","卄","𠂤","辛","女"]],"25","12","32","41","53"]'
* ▶ '[["橔",5,13883,["木","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["灂",5,12349,["氵","爫","罒","","寸"]],"44","34","25","51","12"]'
* ▶ '[["纏",5,3421,["糹","广","里","八","土"]],"55","41","25","34","12"]'
* ▶ '[["纕",5,8882,["糹","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
* ▶ '[["鄸",5,8392,["卄","罒","冖","夕","阝"]],"12","25","45","35","52"]'
* ▶ '[["韽",5,10377,["亽","𠃌","酉","立","日"]],"34","5","12","41","25"]'
* ▶ '[["頀",5,8385,["立","日","卄","隹","又"]],"41","25","12","32","54"]'
* ▶ '[["驐",5,12644,["馬","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["骧",5,6010,["马","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
###
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
#.......................................................................................................
CHR = require join __dirname, '../../coffeenode-chr'
chrs_from_text = ( text ) -> CHR.chrs_from_text text, input: 'xncr'
#.......................................................................................................
prefix = [ 'pos', 'guide/lineup/length', 5, ]
query = { prefix, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
decode_lineup = ( lineup ) =>
return chrs_from_text lineup.replace /\u3000/g, ''
#.......................................................................................................
xncr_from_uchr = ( uchr ) =>
return if ( CHR.as_rsg uchr ) is 'u-pua' then ( CHR.as_xncr uchr, csg: 'jzr' ) else uchr
#.......................................................................................................
input
#.....................................................................................................
.pipe $async ( phrase, done ) =>
[ _, _, lineup_length, glyph, ] = phrase
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_fallback = [ null, null, null, Infinity, ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
debug '©zfQhm', phrase, sub_prefix if glyph is '公'
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
# debug '©FST09', sub_phrase unless sub_phrase[ sub_phrase.length - 1 ] is Infinity
[ _, _, _, rank, ] = sub_phrase
done [ glyph, { lineup_length, rank, }, ]
#.....................................................................................................
# .pipe D.$show()
.pipe D.$filter ( [ glyph, { lineup_length, rank, }, ] ) -> rank < 15000
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, }, ] = entry
sub_prefix = [ 'spo', glyph, 'guide/lineup/uchr', ]
sub_query = { prefix: sub_prefix, star: '*', fallback: null, }
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
# debug '©h4GY2', sub_phrase
return done.error error if error?
return done() unless sub_phrase?
[ _, _, _, guides, ] = sub_phrase
guides = decode_lineup guides
done [ glyph, { lineup_length, rank, guides, }, ]
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, guides, }, ] = entry
tasks = []
#...................................................................................................
for guide in guides
do ( guide ) =>
guide_xncr = xncr_from_uchr guide
sub_prefix = [ 'spo', guide_xncr, 'factor/shapeclass/wbf', ]
sub_fallback = [ null, null, null, 'X', ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
tasks.push ( handler ) -> HOLLERITH.read_one_phrase db, sub_query, handler
#...................................................................................................
ASYNC.parallelLimit tasks, 10, ( error, sub_phrases ) =>
return done.error error if error?
strokeclasses = []
for sub_phrase, sub_idx in sub_phrases
[ _, _, _, strokeorder, ] = sub_phrase
strokeclasses[ sub_idx ] = strokeorder[ 0 ]
done [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
.pipe D.$filter ( entry ) =>
[ glyph, { lineup_length, rank, guides, strokeclasses, }, ] = entry
return ( strokeclasses[ .. ].sort().join '' ) is '12345'
#.....................................................................................................
.pipe $ ( [ glyph, { lineup_length, rank, guides, strokeclasses, }, ], send ) ->
guides = guides.join ''
strokeclasses = strokeclasses.join ''
send [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
# .pipe D.$filter ( entry ) => entry[ 1 ][ 'strokeclasses' ] is '12345'
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@read_factors = ( db, handler ) ->
#.........................................................................................................
step ( resume ) =>
Z = {}
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
prefix = [ 'pos', 'factor/', ]
query = { prefix, star: '*', }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe do =>
last_sbj = null
target = null
#...................................................................................................
return $ ( phrase, send, end ) =>
#.................................................................................................
if phrase?
[ _, prd, obj, sbj, ] = phrase
prd = prd.replace /^factor\//g, ''
sbj = CHR.as_uchr sbj, input: 'xncr'
if sbj isnt last_sbj
send target if target?
target = Z[ sbj ]?= { glyph: sbj, }
last_sbj = sbj
target[ prd ] = obj
Z[ obj ] = target if prd is 'sortcode'
#.................................................................................................
if end?
send target if target?
end()
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@read_sample = ( db, limit_or_list, handler ) ->
### Return a gamut of select glyphs from the DB. `limit_or_list` may be a list of glyphs or a number
representing an upper bound to the usage rank recorded as `rank/cjt`. If `limit_or_list` is a list,
a POD whose keys are the glyphs in the list is returned; if it is a number, a similar POD with all the
glyphs whose rank is not worse than the given limit is returned. If `limit_or_list` is smaller than zero
or equals infinity, `null` is returned to indicate absence of a filter. ###
Z = {}
#.......................................................................................................
if CND.isa_list limit_or_list
Z[ glyph ] = 1 for glyph in limit_or_list
return handler null, Z
#.......................................................................................................
return handler null, null if limit_or_list < 0 or limit_or_list is Infinity
#.......................................................................................................
throw new Error "expected list or number, got a #{type}" unless CND.isa_number limit_or_list
#.......................................................................................................
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
lo = [ 'pos', 'rank/cjt', 0, ]
hi = [ 'pos', 'rank/cjt', limit_or_list, ]
query = { lo, hi, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe $ ( phrase, send ) =>
[ _, _, _, glyph, ] = phrase
Z[ glyph ] = 1
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@show_kwic_v2_and_v3_sample = ( db ) ->
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
factor_infos = yield @read_factors db, resume
# debug '©g5bVR', factors; process.exit()
help "read #{( Object.keys factor_infos ).length} entries for factor_infos"
ranks = {}
include = Infinity
include = 15000
include = 50
include = 5000
# include = [ '寿', '邦', '帮', '畴', '铸', '筹', '涛', '祷', '绑', '綁', ]
# include = Array.from '未釐犛剺味昧眛魅鮇沬妹業寐鄴澲末抹茉枺沫袜妺'
# 'guide/hierarchy/uchr'
#.........................................................................................................
sample = yield @read_sample db, include, resume
#.........................................................................................................
$reorder_phrase = =>
return $ ( phrase, send ) =>
### extract sortcode ###
[ _, _, sortcode, glyph, _, ] = phrase
send [ glyph, sortcode, ]
#.........................................................................................................
$exclude_gaiji = =>
return D.$filter ( [ glyph, sortcode ] ) =>
return ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$include_sample = =>
return D.$filter ( [ glyph, sortcode ] ) => if sample? then ( glyph of sample ) else true
#.........................................................................................................
$extract_lineup = =>
return $ ( [ glyph, sortcode ], send ) =>
[ _, lineup, ] = sortcode.split ';'
[ infix, suffix, prefix, ] = lineup.split ','
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, ]
#.........................................................................................................
$format_sortcode_v3 = =>
return $ ( [ glyph, sortcode_plus, ], send ) =>
[ sortcode, infix, suffix, prefix, ] = sortcode_plus
prefix.unshift '\u3000' until prefix.length >= 6
suffix.push '\u3000' until suffix.length >= 6
prefix = prefix.join ''
suffix = suffix.join ''
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, sortcode, ]
#.........................................................................................................
$unpack = =>
return $ ( [ [ v1, v2, ], v3, ], send ) =>
send [ v1, v2, v3, ]
#.........................................................................................................
$transform_v1_v2 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$extract_lineup()
]
#.........................................................................................................
$transform_v3 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$format_sortcode_v3()
]
#.........................................................................................................
query_v1 = { prefix: [ 'pos', 'guide/kwic/v1/sortcode', ], }
query_v2 = { prefix: [ 'pos', 'guide/kwic/v2/sortcode', ], }
query_v3 = { prefix: [ 'pos', 'guide/kwic/v3/sortcode', ], }
input_v1 = ( HOLLERITH.create_phrasestream db, query_v1 ).pipe $transform_v1_v2()
input_v2 = ( HOLLERITH.create_phrasestream db, query_v2 ).pipe $transform_v1_v2()
input_v3 = ( HOLLERITH.create_phrasestream db, query_v3 ).pipe $transform_v3()
# .pipe D.$observe ( [ glyph, lineup, ] ) -> help glyph, lineup if glyph is '畴'
#.........................................................................................................
input_v1
.pipe D.$lockstep input_v2, fallback: [ null, null, ]
.pipe D.$lockstep input_v3
.pipe $unpack()
#.....................................................................................................
.pipe do =>
last_guide = null
return $ ( [ v1, v2, v3, ], send ) =>
[ glyph_v1
lineup_v1 ] = v1
this_guide = ( Array.from lineup_v1 )[ 7 ]
return send [ v1, v2, v3, ] if this_guide is last_guide
last_guide = this_guide
linup = " |#{this_guide} "
sortcode_v3 = v3[ 2 ]
send [ [ this_guide, linup, ], [ this_guide, linup, ], [ this_guide, linup, sortcode_v3, ], ]
send [ v1, v2, v3, ]
#.....................................................................................................
.pipe do =>
count = 0
wspc = '\u3000'
nspc = '\u3000'
# style = 'A'
# style = 'B'
style = 'C'
switch style
when 'A'
vsep = '◉'
include_v1 = no
include_sortcode = no
for_mkts = no
ldiff_0 = ' '
ldiff_1 = '<'
rdiff_0 = ' '
rdiff_1 = '>'
when 'B'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
when 'C'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
else throw new Error "unknown style #{rpr style}"
#...................................................................................................
return D.$observe ( [ v1, v2, v3, ] ) =>
[ glyph_v1, lineup_v1, ] = v1
[ glyph_v2, lineup_v2, ] = v2
[ glyph_v3, lineup_v3, sortcode_v3, ] = v3
#.................................................................................................
if include_sortcode
( sortcode_v3[ idx ] = '________' if code is null ) for code, idx in sortcode_v3
sortcode_v3 = sortcode_v3.join ' '
#.................................................................................................
diff = []
diff_v1 = if glyph_v1 is glyph_v2 then ldiff_0 else ldiff_1
diff_v2 = if glyph_v2 is glyph_v3 then rdiff_0 else rdiff_1
# debug '©28420', rpr lineup_v3
#.................................................................................................
if include_v1
line = lineup_v1 + nspc + glyph_v1
line += nspc + diff_v1 + vsep + nspc + lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
else
line = lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
if for_mkts
line += '<<< >>>'
line += sortcode_v3 if include_sortcode
#.................................................................................................
else
line += spc + spc + spc
#.................................................................................................
count += 1
help ƒ count if count % 500 is 0
echo line
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_codepoints_with_missing_predicates = ( v2_db, prd = 'guide/kwic/v1/lineup' ) ->
#.........................................................................................................
home = join __dirname, '../../jizura-datasources'
v1_route = join home, 'data/leveldb'
v2_route = join home, 'data/leveldb-v2'
v1_db = HOLLERITH.new_db v1_route, create: no
v2_db ?= HOLLERITH.new_db v2_route, create: no
help "using DB at #{v1_db[ '%self' ][ 'location' ]}"
help "using DB at #{v2_db[ '%self' ][ 'location' ]}"
rank_limit = Infinity
rank_limit = 100
#.........................................................................................................
$extract_glyph = => $ ( xpos, send ) => send xpos[ 3 ]
$exclude_gaiji = => D.$filter ( glyph ) => ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$show_progress = =>
count = 0
return D.$observe =>
count += +1
echo ƒ count if count % 10000 is 0
#.........................................................................................................
$show = => D.$observe ( [ glyph, keys, ] ) =>
echo ( CHR.as_fncr glyph, input: 'xncr' ), CHR.as_uchr glyph, input: 'xncr'
for key in keys
echo ' ' + key
#.........................................................................................................
$exclude_rare_glyphs = =>
### TAINT code duplication; factor out ###
ranks = {}
return $async ( glyph, done ) =>
### filter out 'uncommon' glyphs (whose rank exceeds rank limit) ###
# debug '©72bFK', glyph, rank if ( rank = ranks[ glyph ] )?
return done glyph if rank_limit < 0 or rank_limit is Infinity
return done glyph if ( rank = ranks[ glyph ] )? and rank < rank_limit
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
if sub_phrase is null
ranks[ glyph ] = Infinity
return done()
[ _, _, _, rank, ] = sub_phrase
ranks[ glyph ] = rank
return done() unless rank < rank_limit
done glyph
#.........................................................................................................
$test_for_predicate = ( prd ) =>
return $async ( glyph, done ) =>
sub_prefix = [ 'spo', glyph, prd, ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
return done glyph if sub_phrase is null
done()
#.........................................................................................................
v1_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#.........................................................................................................
$fetch_v1_entries = =>
return $async ( glyph, done ) =>
gte = "so|glyph:#{glyph}"
lte = v1_lte_from_gte gte
sub_input = v1_db[ '%self' ].createKeyStream { gte, lte, }
Z = []
# sub_input.on 'data', ( data ) -> debug '©9Wqdh', data
sub_input
.pipe $ ( key, send, end ) =>
if key?
Z.push key.toString 'utf-8'
if end?
end()
done [ glyph, Z, ]
#.........................................................................................................
# prefix = { prefix: [ 'pos', 'cp/cid', ], }
prefix = { prefix: [ 'pos', 'cp/inner/original', ], }
input = HOLLERITH.create_phrasestream v2_db, prefix
#.........................................................................................................
input
.pipe $extract_glyph()
.pipe $exclude_gaiji()
# .pipe $show_progress()
# .pipe $exclude_rare_glyphs()
.pipe $test_for_predicate prd
.pipe $fetch_v1_entries()
.pipe D.$show()
.pipe $show()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_encoding_sample = ->
encoding = HOLLERITH.CODEC.encodings[ 'dbcs2' ]
encoding = HOLLERITH.CODEC.encodings[ 'aleph' ]
encoding = HOLLERITH.CODEC.encodings[ 'rdctn' ]
phrases = [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
for [ sbj, prd, obj, ] in phrases
key = ( HOLLERITH.CODEC.encode [ sbj, prd, ], )
value = ( new Buffer JSON.stringify obj )
key_rpr = ( encoding[ key[ idx ] ] for idx in [ 0 ... key.length ] ).join ''
value_rpr = ( encoding[ value[ idx ] ] for idx in [ 0 ... value.length ] ).join ''
urge key_rpr, '┊', value_rpr
b = new Buffer '一x丁x丂'
# text = new Buffer '一'
# text_rpr =
# help b, text_rpr
help HOLLERITH.CODEC.rpr_of_buffer HOLLERITH.CODEC.encode [ true, -1 / 7, ]
# chrs = []
# for cid in [ 0 .. 255 ]
# chrs.push String.fromCodePoint cid
# chrs.push '\n' if cid > 0 and cid % 32 is 0
# debug '©ZgY4D', chrs
# help chrs.join ''
# urge ( String.fromCodePoint cid for cid in [ 0x2400 .. 0x2426 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0x24b6 .. 0x24e9 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0xff01 .. 0xff60 ] ).join ''
############################################################################################################
unless module.parent?
#---------------------------------------------------------------------------------------------------------
options =
#.......................................................................................................
# 'route': njs_path.join __dirname, '../dbs/demo'
'route': njs_path.resolve __dirname, '../../jizura-datasources/data/leveldb-v2'
# 'route': '/tmp/leveldb'
#---------------------------------------------------------------------------------------------------------
debug '©AoOAS', options
# @find_good_kwic_sample_glyphs_3()
@show_kwic_v2_and_v3_sample()
# @show_codepoints_with_missing_predicates()
# @show_encoding_sample()
# @compile_encodings()
| true |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/demo'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
step = ( require 'coffeenode-suspend' ).step
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
ASYNC = require 'async'
CHR = require 'coffeenode-chr'
KWIC = require 'kwic'
#...........................................................................................................
new_db = require 'level'
HOLLERITH = require './main'
ƒ = CND.format_number.bind CND
#...........................................................................................................
options = null
#-----------------------------------------------------------------------------------------------------------
@_misfit = Symbol 'misfit'
#===========================================================================================================
# PIPEDREAMS
#-----------------------------------------------------------------------------------------------------------
D.new_indexer = ( idx = 0 ) -> ( data ) => [ idx++, data, ]
#===========================================================================================================
#
#-----------------------------------------------------------------------------------------------------------
@initialize = ( handler ) ->
options[ 'db' ] = HOLLERITH.new_db options[ 'route' ]
handler null
#-----------------------------------------------------------------------------------------------------------
@main = ( first_query ) ->
first_query ?= { gte: 'os|rank/cjt:0', lte: 'os|rank/cjt:9', }
step ( resume ) =>
yield @initialize resume
db = options[ 'db' ]
count_chrs = ( text ) -> ( CHR.chrs_from_text text, input: 'xncr' ).length
#.......................................................................................................
input = db[ '%self' ].createKeyStream first_query
# k = "so|glyph:繼|pod:"
# input = db[ '%self' ].createKeyStream db, { gte: k, lte: k + '\uffff' }
# debug '©cW8tK', HOLLERITH.new_key db, 'os', 'rank/cjt', '00000'
#.......................................................................................................
### TAINT We can currently not use `HOLLERITH2.read_sub` because HOLLERITH2 assumes a key-only
DB that uses binary encoding with a custom https://github.com/deanlandolt/bytewise layer; the current
Jizura DB version uses UTF-8 strings and is a key/value DB. ###
#.......................................................................................................
input
.pipe @_$split_bkey()
#.....................................................................................................
# .pipe HOLLERITH.read_sub db, indexed: yes, ( key ) =>
.pipe @read_sub db, indexed: yes, ( key ) =>
[ pt, ok, rank, sk, glyph, ] = key
sub_key = "PI:KEY:<KEY>END_PI{glyph}|PI:KEY:<KEY>END_PI
return db[ '%self' ].createValueStream { gte: sub_key, lte: sub_key + '\uffff' }
#.....................................................................................................
.pipe D.$densort 0, 0, true
#.....................................................................................................
.pipe $ ( [ idx, [ pod, ], ], send ) =>
debug '©jd5cE', pod
unless pod[ 'strokeorder/short' ]?
warn '©9YXoq', pod
else
glyph = pod[ 'glyph/uchr' ]
strokeorder = pod[ 'strokeorder/short' ][ 0 ].length
lineup = pod[ 'guide/lineup/uchr' ].replace /\u3000/g, ''
send [ glyph, strokeorder, lineup, ]
#.....................................................................................................
.pipe $ ( [ glyph, strokeorder, lineup, ], send ) =>
send [ glyph, strokeorder, count_chrs lineup, ]
#.....................................................................................................
.pipe D.$sort ( a, b ) ->
idx = 1
return +1 if a[ idx ] > b[ idx ]
return -1 if a[ idx ] < b[ idx ]
return 0
#.....................................................................................................
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@_$split_bkey = -> $ ( bkey, send ) => send @_split_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = ( R.split '|' )[ .. 2 ]
R = [ R[ 0 ], ( R[ 1 ].split ':' )..., ( R[ 2 ].split ':' )..., ]
return R
#-----------------------------------------------------------------------------------------------------------
@_$split_so_bkey = -> $ ( bkey, send ) => send @_split_so_bkey bkey
#-----------------------------------------------------------------------------------------------------------
@_split_so_bkey = ( bkey ) ->
R = bkey.toString 'utf-8'
R = R.split '|'
idx_txt = R[ 3 ]
R = [ ( R[ 1 ].split ':' )[ 1 ], ( R[ 2 ].split ':' )..., ]
R.push ( parseInt idx_txt, 10 ) if idx_txt? and idx_txt.length > 0
return R
#-----------------------------------------------------------------------------------------------------------
@_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#-----------------------------------------------------------------------------------------------------------
@$lineup_from_glyph = ( db ) ->
settings =
indexed: no
single: yes
return @read_sub db, settings, ( glyph ) =>
lte = "so|glyph:#{glyph}|guide/lineup/uchr:"
sub_input = db[ '%self' ].createKeyStream { gte: lte, lte: @_lte_from_gte lte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
@$shapeclass_wbf_from_glyph_and_lineup = ( db ) ->
### TAINT wrong ###
settings =
indexed: no
single: yes
return @read_sub db, settings, ( [ glyph, lineup_glyphs, ] ) =>
for lineup_glyph in lineup_glyphs
do ( lineup_glyph ) =>
gte = "so|glyph:#{lineup_glyph}|factor/strokeclass/wbf:"
sub_input = db[ '%self' ].createKeyStream { gte: gte, lte: @_lte_from_gte gte, }
return sub_input
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_subject = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v0 else v1
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_object = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then v1 else v0
#-----------------------------------------------------------------------------------------------------------
HOLLERITH.$pick_values = ->
return $ ( lkey, send ) =>
[ pt, _, v0, _, v1, ] = lkey
send if pt is 'so' then [ v0, v1, ] else [ v1, v0, ]
#-----------------------------------------------------------------------------------------------------------
@dump_jizura_db = ->
source_db = HOLLERITH.new_db '/Volumes/Storage/temp/jizura-hollerith2'
prefix = [ 'spo', '𡏠', ]
prefix = [ 'spo', '㔰', ]
input = HOLLERITH.create_phrasestream source_db, prefix
#.........................................................................................................
input
.pipe D.$count ( count ) -> help "read #{count} keys"
.pipe $ ( data, send ) => send JSON.stringify data
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@find_good_kwic_sample_glyphs_3 = ( db ) ->
### version for Hollerith2 DBs; using `HOLLERITH.remit_async` instead of `HOLLERITH.read_sub`. ###
###
* ▶ '[["勷",5,9907,["亠","吅","𠀎","𧘇","力"]],"41","25","11","35","53"]'
* ▶ '[["噿",5,13090,["口","羽","亠","从","十"]],"25","54","41","34","12"]'
* ▶ '[["塾",5,3818,["亠","口","子","丸","土"]],"41","25","51","35","12"]'
* ▶ '[["墩",5,5457,["土","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["孃",5,7225,["女","亠","吅","𠀎","𧘇"]],"53","41","25","11","35"]'
* ▶ '[["寡",5,3412,["宀","丆","且","八","刀"]],"44","13","25","34","53"]'
* ▶ '[["巕",5,13586,["山","卄","𠂤","辛","女"]],"25","12","32","41","53"]'
* ▶ '[["橔",5,13883,["木","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["灂",5,12349,["氵","爫","罒","","寸"]],"44","34","25","51","12"]'
* ▶ '[["纏",5,3421,["糹","广","里","八","土"]],"55","41","25","34","12"]'
* ▶ '[["纕",5,8882,["糹","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
* ▶ '[["鄸",5,8392,["卄","罒","冖","夕","阝"]],"12","25","45","35","52"]'
* ▶ '[["韽",5,10377,["亽","𠃌","酉","立","日"]],"34","5","12","41","25"]'
* ▶ '[["頀",5,8385,["立","日","卄","隹","又"]],"41","25","12","32","54"]'
* ▶ '[["驐",5,12644,["馬","亠","口","子","夊"]],"12","41","25","51","35"]'
* ▶ '[["骧",5,6010,["马","亠","吅","𠀎","𧘇"]],"55","41","25","11","35"]'
###
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
#.......................................................................................................
CHR = require join __dirname, '../../coffeenode-chr'
chrs_from_text = ( text ) -> CHR.chrs_from_text text, input: 'xncr'
#.......................................................................................................
prefix = [ 'pos', 'guide/lineup/length', 5, ]
query = { prefix, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
decode_lineup = ( lineup ) =>
return chrs_from_text lineup.replace /\u3000/g, ''
#.......................................................................................................
xncr_from_uchr = ( uchr ) =>
return if ( CHR.as_rsg uchr ) is 'u-pua' then ( CHR.as_xncr uchr, csg: 'jzr' ) else uchr
#.......................................................................................................
input
#.....................................................................................................
.pipe $async ( phrase, done ) =>
[ _, _, lineup_length, glyph, ] = phrase
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_fallback = [ null, null, null, Infinity, ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
debug '©zfQhm', phrase, sub_prefix if glyph is '公'
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
# debug '©FST09', sub_phrase unless sub_phrase[ sub_phrase.length - 1 ] is Infinity
[ _, _, _, rank, ] = sub_phrase
done [ glyph, { lineup_length, rank, }, ]
#.....................................................................................................
# .pipe D.$show()
.pipe D.$filter ( [ glyph, { lineup_length, rank, }, ] ) -> rank < 15000
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, }, ] = entry
sub_prefix = [ 'spo', glyph, 'guide/lineup/uchr', ]
sub_query = { prefix: sub_prefix, star: '*', fallback: null, }
HOLLERITH.read_one_phrase db, sub_query, ( error, sub_phrase ) =>
# debug '©h4GY2', sub_phrase
return done.error error if error?
return done() unless sub_phrase?
[ _, _, _, guides, ] = sub_phrase
guides = decode_lineup guides
done [ glyph, { lineup_length, rank, guides, }, ]
#.....................................................................................................
.pipe $async ( entry, done ) =>
[ glyph, { lineup_length, rank, guides, }, ] = entry
tasks = []
#...................................................................................................
for guide in guides
do ( guide ) =>
guide_xncr = xncr_from_uchr guide
sub_prefix = [ 'spo', guide_xncr, 'factor/shapeclass/wbf', ]
sub_fallback = [ null, null, null, 'X', ]
sub_query = { prefix: sub_prefix, fallback: sub_fallback, }
tasks.push ( handler ) -> HOLLERITH.read_one_phrase db, sub_query, handler
#...................................................................................................
ASYNC.parallelLimit tasks, 10, ( error, sub_phrases ) =>
return done.error error if error?
strokeclasses = []
for sub_phrase, sub_idx in sub_phrases
[ _, _, _, strokeorder, ] = sub_phrase
strokeclasses[ sub_idx ] = strokeorder[ 0 ]
done [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
.pipe D.$filter ( entry ) =>
[ glyph, { lineup_length, rank, guides, strokeclasses, }, ] = entry
return ( strokeclasses[ .. ].sort().join '' ) is '12345'
#.....................................................................................................
.pipe $ ( [ glyph, { lineup_length, rank, guides, strokeclasses, }, ], send ) ->
guides = guides.join ''
strokeclasses = strokeclasses.join ''
send [ glyph, { lineup_length, rank, guides, strokeclasses, }, ]
#.....................................................................................................
# .pipe D.$filter ( entry ) => entry[ 1 ][ 'strokeclasses' ] is '12345'
.pipe D.$show()
#-----------------------------------------------------------------------------------------------------------
@read_factors = ( db, handler ) ->
#.........................................................................................................
step ( resume ) =>
Z = {}
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
prefix = [ 'pos', 'factor/', ]
query = { prefix, star: '*', }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe do =>
last_sbj = null
target = null
#...................................................................................................
return $ ( phrase, send, end ) =>
#.................................................................................................
if phrase?
[ _, prd, obj, sbj, ] = phrase
prd = prd.replace /^factor\//g, ''
sbj = CHR.as_uchr sbj, input: 'xncr'
if sbj isnt last_sbj
send target if target?
target = Z[ sbj ]?= { glyph: sbj, }
last_sbj = sbj
target[ prd ] = obj
Z[ obj ] = target if prd is 'sortcode'
#.................................................................................................
if end?
send target if target?
end()
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@read_sample = ( db, limit_or_list, handler ) ->
### Return a gamut of select glyphs from the DB. `limit_or_list` may be a list of glyphs or a number
representing an upper bound to the usage rank recorded as `rank/cjt`. If `limit_or_list` is a list,
a POD whose keys are the glyphs in the list is returned; if it is a number, a similar POD with all the
glyphs whose rank is not worse than the given limit is returned. If `limit_or_list` is smaller than zero
or equals infinity, `null` is returned to indicate absence of a filter. ###
Z = {}
#.......................................................................................................
if CND.isa_list limit_or_list
Z[ glyph ] = 1 for glyph in limit_or_list
return handler null, Z
#.......................................................................................................
return handler null, null if limit_or_list < 0 or limit_or_list is Infinity
#.......................................................................................................
throw new Error "expected list or number, got a #{type}" unless CND.isa_number limit_or_list
#.......................................................................................................
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
#.......................................................................................................
lo = [ 'pos', 'rank/cjt', 0, ]
hi = [ 'pos', 'rank/cjt', limit_or_list, ]
query = { lo, hi, }
input = HOLLERITH.create_phrasestream db, query
#.......................................................................................................
input
.pipe $ ( phrase, send ) =>
[ _, _, _, glyph, ] = phrase
Z[ glyph ] = 1
.pipe D.$on_end -> handler null, Z
#-----------------------------------------------------------------------------------------------------------
@show_kwic_v2_and_v3_sample = ( db ) ->
#.........................................................................................................
step ( resume ) =>
db_route = join __dirname, '../../jizura-datasources/data/leveldb-v2'
db ?= HOLLERITH.new_db db_route, create: no
help "using DB at #{db[ '%self' ][ 'location' ]}"
factor_infos = yield @read_factors db, resume
# debug '©g5bVR', factors; process.exit()
help "read #{( Object.keys factor_infos ).length} entries for factor_infos"
ranks = {}
include = Infinity
include = 15000
include = 50
include = 5000
# include = [ '寿', '邦', '帮', '畴', '铸', '筹', '涛', '祷', '绑', '綁', ]
# include = Array.from '未釐犛剺味昧眛魅鮇沬妹業寐鄴澲末抹茉枺沫袜妺'
# 'guide/hierarchy/uchr'
#.........................................................................................................
sample = yield @read_sample db, include, resume
#.........................................................................................................
$reorder_phrase = =>
return $ ( phrase, send ) =>
### extract sortcode ###
[ _, _, sortcode, glyph, _, ] = phrase
send [ glyph, sortcode, ]
#.........................................................................................................
$exclude_gaiji = =>
return D.$filter ( [ glyph, sortcode ] ) =>
return ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$include_sample = =>
return D.$filter ( [ glyph, sortcode ] ) => if sample? then ( glyph of sample ) else true
#.........................................................................................................
$extract_lineup = =>
return $ ( [ glyph, sortcode ], send ) =>
[ _, lineup, ] = sortcode.split ';'
[ infix, suffix, prefix, ] = lineup.split ','
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, ]
#.........................................................................................................
$format_sortcode_v3 = =>
return $ ( [ glyph, sortcode_plus, ], send ) =>
[ sortcode, infix, suffix, prefix, ] = sortcode_plus
prefix.unshift '\u3000' until prefix.length >= 6
suffix.push '\u3000' until suffix.length >= 6
prefix = prefix.join ''
suffix = suffix.join ''
lineup = prefix + '|' + infix + suffix
send [ glyph, lineup, sortcode, ]
#.........................................................................................................
$unpack = =>
return $ ( [ [ v1, v2, ], v3, ], send ) =>
send [ v1, v2, v3, ]
#.........................................................................................................
$transform_v1_v2 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$extract_lineup()
]
#.........................................................................................................
$transform_v3 = => D.combine [
$reorder_phrase()
$exclude_gaiji()
$include_sample()
$format_sortcode_v3()
]
#.........................................................................................................
query_v1 = { prefix: [ 'pos', 'guide/kwic/v1/sortcode', ], }
query_v2 = { prefix: [ 'pos', 'guide/kwic/v2/sortcode', ], }
query_v3 = { prefix: [ 'pos', 'guide/kwic/v3/sortcode', ], }
input_v1 = ( HOLLERITH.create_phrasestream db, query_v1 ).pipe $transform_v1_v2()
input_v2 = ( HOLLERITH.create_phrasestream db, query_v2 ).pipe $transform_v1_v2()
input_v3 = ( HOLLERITH.create_phrasestream db, query_v3 ).pipe $transform_v3()
# .pipe D.$observe ( [ glyph, lineup, ] ) -> help glyph, lineup if glyph is '畴'
#.........................................................................................................
input_v1
.pipe D.$lockstep input_v2, fallback: [ null, null, ]
.pipe D.$lockstep input_v3
.pipe $unpack()
#.....................................................................................................
.pipe do =>
last_guide = null
return $ ( [ v1, v2, v3, ], send ) =>
[ glyph_v1
lineup_v1 ] = v1
this_guide = ( Array.from lineup_v1 )[ 7 ]
return send [ v1, v2, v3, ] if this_guide is last_guide
last_guide = this_guide
linup = " |#{this_guide} "
sortcode_v3 = v3[ 2 ]
send [ [ this_guide, linup, ], [ this_guide, linup, ], [ this_guide, linup, sortcode_v3, ], ]
send [ v1, v2, v3, ]
#.....................................................................................................
.pipe do =>
count = 0
wspc = '\u3000'
nspc = '\u3000'
# style = 'A'
# style = 'B'
style = 'C'
switch style
when 'A'
vsep = '◉'
include_v1 = no
include_sortcode = no
for_mkts = no
ldiff_0 = ' '
ldiff_1 = '<'
rdiff_0 = ' '
rdiff_1 = '>'
when 'B'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
when 'C'
vsep = '║'
include_v1 = yes
for_mkts = yes
include_sortcode = no
ldiff_0 = wspc
ldiff_1 = '<'
rdiff_0 = wspc
rdiff_1 = '>'
else throw new Error "unknown style #{rpr style}"
#...................................................................................................
return D.$observe ( [ v1, v2, v3, ] ) =>
[ glyph_v1, lineup_v1, ] = v1
[ glyph_v2, lineup_v2, ] = v2
[ glyph_v3, lineup_v3, sortcode_v3, ] = v3
#.................................................................................................
if include_sortcode
( sortcode_v3[ idx ] = '________' if code is null ) for code, idx in sortcode_v3
sortcode_v3 = sortcode_v3.join ' '
#.................................................................................................
diff = []
diff_v1 = if glyph_v1 is glyph_v2 then ldiff_0 else ldiff_1
diff_v2 = if glyph_v2 is glyph_v3 then rdiff_0 else rdiff_1
# debug '©28420', rpr lineup_v3
#.................................................................................................
if include_v1
line = lineup_v1 + nspc + glyph_v1
line += nspc + diff_v1 + vsep + nspc + lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
else
line = lineup_v2 + nspc + glyph_v2
line += nspc + vsep + diff_v2 + nspc + lineup_v3 + nspc + glyph_v3
#.................................................................................................
if for_mkts
line += '<<< >>>'
line += sortcode_v3 if include_sortcode
#.................................................................................................
else
line += spc + spc + spc
#.................................................................................................
count += 1
help ƒ count if count % 500 is 0
echo line
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_codepoints_with_missing_predicates = ( v2_db, prd = 'guide/kwic/v1/lineup' ) ->
#.........................................................................................................
home = join __dirname, '../../jizura-datasources'
v1_route = join home, 'data/leveldb'
v2_route = join home, 'data/leveldb-v2'
v1_db = HOLLERITH.new_db v1_route, create: no
v2_db ?= HOLLERITH.new_db v2_route, create: no
help "using DB at #{v1_db[ '%self' ][ 'location' ]}"
help "using DB at #{v2_db[ '%self' ][ 'location' ]}"
rank_limit = Infinity
rank_limit = 100
#.........................................................................................................
$extract_glyph = => $ ( xpos, send ) => send xpos[ 3 ]
$exclude_gaiji = => D.$filter ( glyph ) => ( not glyph.startsWith '&' ) or ( glyph.startsWith '&jzr#' )
#.........................................................................................................
$show_progress = =>
count = 0
return D.$observe =>
count += +1
echo ƒ count if count % 10000 is 0
#.........................................................................................................
$show = => D.$observe ( [ glyph, keys, ] ) =>
echo ( CHR.as_fncr glyph, input: 'xncr' ), CHR.as_uchr glyph, input: 'xncr'
for key in keys
echo ' ' + key
#.........................................................................................................
$exclude_rare_glyphs = =>
### TAINT code duplication; factor out ###
ranks = {}
return $async ( glyph, done ) =>
### filter out 'uncommon' glyphs (whose rank exceeds rank limit) ###
# debug '©72bFK', glyph, rank if ( rank = ranks[ glyph ] )?
return done glyph if rank_limit < 0 or rank_limit is Infinity
return done glyph if ( rank = ranks[ glyph ] )? and rank < rank_limit
sub_prefix = [ 'spo', glyph, 'rank/cjt', ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
if sub_phrase is null
ranks[ glyph ] = Infinity
return done()
[ _, _, _, rank, ] = sub_phrase
ranks[ glyph ] = rank
return done() unless rank < rank_limit
done glyph
#.........................................................................................................
$test_for_predicate = ( prd ) =>
return $async ( glyph, done ) =>
sub_prefix = [ 'spo', glyph, prd, ]
sub_query = { prefix: sub_prefix, fallback: null, }
HOLLERITH.read_one_phrase v2_db, sub_query, ( error, sub_phrase ) =>
return done.error error if error?
return done glyph if sub_phrase is null
done()
#.........................................................................................................
v1_lte_from_gte = ( gte ) ->
R = new Buffer ( last_idx = Buffer.byteLength gte ) + 1
R.write gte
R[ last_idx ] = 0xff
return R
#.........................................................................................................
$fetch_v1_entries = =>
return $async ( glyph, done ) =>
gte = "so|glyph:#{glyph}"
lte = v1_lte_from_gte gte
sub_input = v1_db[ '%self' ].createKeyStream { gte, lte, }
Z = []
# sub_input.on 'data', ( data ) -> debug '©9Wqdh', data
sub_input
.pipe $ ( key, send, end ) =>
if key?
Z.push key.toString 'utf-8'
if end?
end()
done [ glyph, Z, ]
#.........................................................................................................
# prefix = { prefix: [ 'pos', 'cp/cid', ], }
prefix = { prefix: [ 'pos', 'cp/inner/original', ], }
input = HOLLERITH.create_phrasestream v2_db, prefix
#.........................................................................................................
input
.pipe $extract_glyph()
.pipe $exclude_gaiji()
# .pipe $show_progress()
# .pipe $exclude_rare_glyphs()
.pipe $test_for_predicate prd
.pipe $fetch_v1_entries()
.pipe D.$show()
.pipe $show()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@show_encoding_sample = ->
encoding = HOLLERITH.CODEC.encodings[ 'dbcs2' ]
encoding = HOLLERITH.CODEC.encodings[ 'aleph' ]
encoding = HOLLERITH.CODEC.encodings[ 'rdctn' ]
phrases = [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
for [ sbj, prd, obj, ] in phrases
key = ( HOLLERITH.CODEC.encode [ sbj, prd, ], )
value = ( new Buffer JSON.stringify obj )
key_rpr = ( encoding[ key[ idx ] ] for idx in [ 0 ... key.length ] ).join ''
value_rpr = ( encoding[ value[ idx ] ] for idx in [ 0 ... value.length ] ).join ''
urge key_rpr, '┊', value_rpr
b = new Buffer '一x丁x丂'
# text = new Buffer '一'
# text_rpr =
# help b, text_rpr
help HOLLERITH.CODEC.rpr_of_buffer HOLLERITH.CODEC.encode [ true, -1 / 7, ]
# chrs = []
# for cid in [ 0 .. 255 ]
# chrs.push String.fromCodePoint cid
# chrs.push '\n' if cid > 0 and cid % 32 is 0
# debug '©ZgY4D', chrs
# help chrs.join ''
# urge ( String.fromCodePoint cid for cid in [ 0x2400 .. 0x2426 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0x24b6 .. 0x24e9 ] ).join ''
# urge ( String.fromCodePoint cid for cid in [ 0xff01 .. 0xff60 ] ).join ''
############################################################################################################
unless module.parent?
#---------------------------------------------------------------------------------------------------------
options =
#.......................................................................................................
# 'route': njs_path.join __dirname, '../dbs/demo'
'route': njs_path.resolve __dirname, '../../jizura-datasources/data/leveldb-v2'
# 'route': '/tmp/leveldb'
#---------------------------------------------------------------------------------------------------------
debug '©AoOAS', options
# @find_good_kwic_sample_glyphs_3()
@show_kwic_v2_and_v3_sample()
# @show_codepoints_with_missing_predicates()
# @show_encoding_sample()
# @compile_encodings()
|
[
{
"context": "[[table.column('id'), 1], [table.column('name'), 'carl']]\n assert.equal manager.toSql(), 'INSERT IN",
"end": 1829,
"score": 0.999201774597168,
"start": 1825,
"tag": "NAME",
"value": "carl"
},
{
"context": " 'INSERT INTO \"users\" (\"id\", \"name\") VALUES (1, \\'carl\\')'\n\n it 'defaults the table', ->\n table ",
"end": 1922,
"score": 0.9989029169082642,
"start": 1918,
"tag": "NAME",
"value": "carl"
},
{
"context": "[[table.column('id'), 1], [table.column('name'), 'carl']]\n assert.equal manager.toSql(), 'INSERT IN",
"end": 2103,
"score": 0.999788761138916,
"start": 2099,
"tag": "NAME",
"value": "carl"
},
{
"context": " 'INSERT INTO \"users\" (\"id\", \"name\") VALUES (1, \\'carl\\')'\n\n it 'it takes an empty list', ->\n ma",
"end": 2196,
"score": 0.9997738003730774,
"start": 2192,
"tag": "NAME",
"value": "carl"
},
{
"context": "ble\n\n manager.values(new Nodes.Values([1, 'carl']))\n manager.columns().push table.column('",
"end": 3322,
"score": 0.9996207356452942,
"start": 3318,
"tag": "NAME",
"value": "carl"
},
{
"context": " 'INSERT INTO \"users\" (\"id\", \"name\") VALUES (1, \\'carl\\')'\n",
"end": 3521,
"score": 0.9997690916061401,
"start": 3517,
"tag": "NAME",
"value": "carl"
}
] | test/insert-manager-test.coffee | arthurschreiber/rel | 1 | assert = require('chai').assert
SelectManager = require '../src/select-manager'
InsertManager = require '../src/insert-manager'
Table = require '../src/table'
SqlLiteral = require('../src/nodes/sql-literal')
Rel = require('../src/rel')
Nodes = require '../src/nodes/nodes'
describe 'Inserting stuff', ->
describe 'An insert manager', ->
it 'can create a Values node', ->
table = new Table 'users'
manager = new InsertManager()
values = manager.createValues ['a', 'b'], ['c', 'd']
assert.equal values.left.length, ['a', 'b'].length
assert.equal values.right.length, ['c', 'd'].length
it 'allows sql literals', ->
table = new Table 'users'
manager = new InsertManager()
manager.values(manager.createValues [Rel.star()], ['a'])
assert.equal manager.toSql(), 'INSERT INTO NULL VALUES (*)'
it 'inserts false', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('bool'), false]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("bool") VALUES (false)'
it 'inserts null', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), null]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id") VALUES (NULL)'
it 'inserts time', ->
table = new Table 'users'
manager = new InsertManager()
time = new Date()
attribute = table.column('created_at')
manager.insert [[attribute, time]]
assert.equal manager.toSql(), "INSERT INTO \"users\" (\"created_at\") VALUES ('#{time.toISOString()}')"
it 'takes a list of lists', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.insert [[table.column('id'), 1], [table.column('name'), 'carl']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'carl\')'
it 'defaults the table', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), 1], [table.column('name'), 'carl']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'carl\')'
it 'it takes an empty list', ->
manager = new InsertManager()
manager.insert []
assert.strictEqual manager.ast.values, null
describe 'into', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
assert.equal manager.toSql(), 'INSERT INTO "users"'
describe 'columns', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.columns().push table.column('id')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id")'
describe 'values', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1]))
assert.equal manager.toSql(), 'INSERT INTO "users" VALUES (1)'
describe 'combo', ->
it 'puts shit together', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1, 'carl']))
manager.columns().push table.column('id')
manager.columns().push table.column('name')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'carl\')'
| 71585 | assert = require('chai').assert
SelectManager = require '../src/select-manager'
InsertManager = require '../src/insert-manager'
Table = require '../src/table'
SqlLiteral = require('../src/nodes/sql-literal')
Rel = require('../src/rel')
Nodes = require '../src/nodes/nodes'
describe 'Inserting stuff', ->
describe 'An insert manager', ->
it 'can create a Values node', ->
table = new Table 'users'
manager = new InsertManager()
values = manager.createValues ['a', 'b'], ['c', 'd']
assert.equal values.left.length, ['a', 'b'].length
assert.equal values.right.length, ['c', 'd'].length
it 'allows sql literals', ->
table = new Table 'users'
manager = new InsertManager()
manager.values(manager.createValues [Rel.star()], ['a'])
assert.equal manager.toSql(), 'INSERT INTO NULL VALUES (*)'
it 'inserts false', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('bool'), false]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("bool") VALUES (false)'
it 'inserts null', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), null]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id") VALUES (NULL)'
it 'inserts time', ->
table = new Table 'users'
manager = new InsertManager()
time = new Date()
attribute = table.column('created_at')
manager.insert [[attribute, time]]
assert.equal manager.toSql(), "INSERT INTO \"users\" (\"created_at\") VALUES ('#{time.toISOString()}')"
it 'takes a list of lists', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.insert [[table.column('id'), 1], [table.column('name'), '<NAME>']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'<NAME>\')'
it 'defaults the table', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), 1], [table.column('name'), '<NAME>']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'<NAME>\')'
it 'it takes an empty list', ->
manager = new InsertManager()
manager.insert []
assert.strictEqual manager.ast.values, null
describe 'into', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
assert.equal manager.toSql(), 'INSERT INTO "users"'
describe 'columns', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.columns().push table.column('id')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id")'
describe 'values', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1]))
assert.equal manager.toSql(), 'INSERT INTO "users" VALUES (1)'
describe 'combo', ->
it 'puts shit together', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1, '<NAME>']))
manager.columns().push table.column('id')
manager.columns().push table.column('name')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'<NAME>\')'
| true | assert = require('chai').assert
SelectManager = require '../src/select-manager'
InsertManager = require '../src/insert-manager'
Table = require '../src/table'
SqlLiteral = require('../src/nodes/sql-literal')
Rel = require('../src/rel')
Nodes = require '../src/nodes/nodes'
describe 'Inserting stuff', ->
describe 'An insert manager', ->
it 'can create a Values node', ->
table = new Table 'users'
manager = new InsertManager()
values = manager.createValues ['a', 'b'], ['c', 'd']
assert.equal values.left.length, ['a', 'b'].length
assert.equal values.right.length, ['c', 'd'].length
it 'allows sql literals', ->
table = new Table 'users'
manager = new InsertManager()
manager.values(manager.createValues [Rel.star()], ['a'])
assert.equal manager.toSql(), 'INSERT INTO NULL VALUES (*)'
it 'inserts false', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('bool'), false]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("bool") VALUES (false)'
it 'inserts null', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), null]]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id") VALUES (NULL)'
it 'inserts time', ->
table = new Table 'users'
manager = new InsertManager()
time = new Date()
attribute = table.column('created_at')
manager.insert [[attribute, time]]
assert.equal manager.toSql(), "INSERT INTO \"users\" (\"created_at\") VALUES ('#{time.toISOString()}')"
it 'takes a list of lists', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.insert [[table.column('id'), 1], [table.column('name'), 'PI:NAME:<NAME>END_PI']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'PI:NAME:<NAME>END_PI\')'
it 'defaults the table', ->
table = new Table 'users'
manager = new InsertManager()
manager.insert [[table.column('id'), 1], [table.column('name'), 'PI:NAME:<NAME>END_PI']]
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'PI:NAME:<NAME>END_PI\')'
it 'it takes an empty list', ->
manager = new InsertManager()
manager.insert []
assert.strictEqual manager.ast.values, null
describe 'into', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
assert.equal manager.toSql(), 'INSERT INTO "users"'
describe 'columns', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.columns().push table.column('id')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id")'
describe 'values', ->
it 'converts to sql', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1]))
assert.equal manager.toSql(), 'INSERT INTO "users" VALUES (1)'
describe 'combo', ->
it 'puts shit together', ->
table = new Table 'users'
manager = new InsertManager()
manager.into table
manager.values(new Nodes.Values([1, 'PI:NAME:<NAME>END_PI']))
manager.columns().push table.column('id')
manager.columns().push table.column('name')
assert.equal manager.toSql(), 'INSERT INTO "users" ("id", "name") VALUES (1, \'PI:NAME:<NAME>END_PI\')'
|
[
{
"context": " #\n # Tag.\n #\n # Created by hector spc <hector@aerstudio.com>\n # Aer Studio \n # http:/",
"end": 40,
"score": 0.976398229598999,
"start": 30,
"tag": "NAME",
"value": "hector spc"
},
{
"context": " #\n # Tag.\n #\n # Created by hector spc <hector@aerstudio.com>\n # Aer Studio \n # http://www.aerstudio.com\n #\n",
"end": 62,
"score": 0.9999329447746277,
"start": 42,
"tag": "EMAIL",
"value": "hector@aerstudio.com"
}
] | src/collections/tags_collection.coffee | aerstudio/Phallanxpress | 1 | #
# Tag.
#
# Created by hector spc <hector@aerstudio.com>
# Aer Studio
# http://www.aerstudio.com
#
# Sun Mar 04 2012
#
# collections/tags_collection.js.coffee
#
class Phallanxpress.Tags extends Phallanxpress.Collection
model: Phallanxpress.Tag
parseTag: 'tags'
tagList: (options)->
@_wpAPI('get_tag_index', options)
| 82551 | #
# Tag.
#
# Created by <NAME> <<EMAIL>>
# Aer Studio
# http://www.aerstudio.com
#
# Sun Mar 04 2012
#
# collections/tags_collection.js.coffee
#
class Phallanxpress.Tags extends Phallanxpress.Collection
model: Phallanxpress.Tag
parseTag: 'tags'
tagList: (options)->
@_wpAPI('get_tag_index', options)
| true | #
# Tag.
#
# Created by PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Aer Studio
# http://www.aerstudio.com
#
# Sun Mar 04 2012
#
# collections/tags_collection.js.coffee
#
class Phallanxpress.Tags extends Phallanxpress.Collection
model: Phallanxpress.Tag
parseTag: 'tags'
tagList: (options)->
@_wpAPI('get_tag_index', options)
|
[
{
"context": "###\n backbone-orm.js 0.5.12\n Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm\n Lice",
"end": 58,
"score": 0.998772144317627,
"start": 50,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": " Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm\n License: MIT (http://www.opensourc",
"end": 88,
"score": 0.9997255802154541,
"start": 80,
"tag": "USERNAME",
"value": "vidigami"
}
] | src/node/utils.coffee | michaelBenin/backbone-orm | 1 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
Backbone = require 'backbone'
Queue = require '../queue'
Utils = require '../utils'
# @private
module.exports = class NodeUtils
@findModels = (directory, options, callback) ->
model_types = []
findModelsInDirectory = (directory, options, callback) ->
fs.readdir directory, (err, files) ->
return callback(err) if err
return callback(null, model_types) unless files
queue = new Queue(1)
for file in files
do (file) -> queue.defer (callback) ->
pathed_file = path.join(directory, file)
fs.stat pathed_file, (err, stat) ->
return callback(err) if err
return findModelsInDirectory(pathed_file, options, callback) if stat.isDirectory() # a directory
extension = path.extname(pathed_file)
return callback() unless (extension is '.js' or extension is '.coffee')
try
model_path = path.join(directory, file)
model_type = require(model_path)
return callback() unless (model_type and _.isFunction(model_type) and Utils.isModel(new model_type()) and model_type.resetSchema)
model_type.path = model_path if options.append_path
model_types.push(model_type)
callback()
catch err
console.log "findModels: skipping: #{err}" if options.verbose
callback()
queue.await (err) ->
callback(err) if err
callback(null, model_types)
findModelsInDirectory(directory, options, callback)
@resetSchemasByDirectory: (directory, options, callback) =>
[options, callback] = [{}, options] if arguments.length is 2
@findModels directory, options, (err, model_types) ->
return callback(err) if err
queue = new Queue(1)
for model_type in model_types
do (model_type) -> queue.defer (callback) -> model_type.resetSchema options, callback
queue.await (err) ->
console.log "resetSchemasByDirectory: failed to reset schemas: #{err}" if err
callback(err)
| 104 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 <NAME> - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
Backbone = require 'backbone'
Queue = require '../queue'
Utils = require '../utils'
# @private
module.exports = class NodeUtils
@findModels = (directory, options, callback) ->
model_types = []
findModelsInDirectory = (directory, options, callback) ->
fs.readdir directory, (err, files) ->
return callback(err) if err
return callback(null, model_types) unless files
queue = new Queue(1)
for file in files
do (file) -> queue.defer (callback) ->
pathed_file = path.join(directory, file)
fs.stat pathed_file, (err, stat) ->
return callback(err) if err
return findModelsInDirectory(pathed_file, options, callback) if stat.isDirectory() # a directory
extension = path.extname(pathed_file)
return callback() unless (extension is '.js' or extension is '.coffee')
try
model_path = path.join(directory, file)
model_type = require(model_path)
return callback() unless (model_type and _.isFunction(model_type) and Utils.isModel(new model_type()) and model_type.resetSchema)
model_type.path = model_path if options.append_path
model_types.push(model_type)
callback()
catch err
console.log "findModels: skipping: #{err}" if options.verbose
callback()
queue.await (err) ->
callback(err) if err
callback(null, model_types)
findModelsInDirectory(directory, options, callback)
@resetSchemasByDirectory: (directory, options, callback) =>
[options, callback] = [{}, options] if arguments.length is 2
@findModels directory, options, (err, model_types) ->
return callback(err) if err
queue = new Queue(1)
for model_type in model_types
do (model_type) -> queue.defer (callback) -> model_type.resetSchema options, callback
queue.await (err) ->
console.log "resetSchemasByDirectory: failed to reset schemas: #{err}" if err
callback(err)
| true | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 PI:NAME:<NAME>END_PI - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
fs = require 'fs'
path = require 'path'
_ = require 'underscore'
Backbone = require 'backbone'
Queue = require '../queue'
Utils = require '../utils'
# @private
module.exports = class NodeUtils
@findModels = (directory, options, callback) ->
model_types = []
findModelsInDirectory = (directory, options, callback) ->
fs.readdir directory, (err, files) ->
return callback(err) if err
return callback(null, model_types) unless files
queue = new Queue(1)
for file in files
do (file) -> queue.defer (callback) ->
pathed_file = path.join(directory, file)
fs.stat pathed_file, (err, stat) ->
return callback(err) if err
return findModelsInDirectory(pathed_file, options, callback) if stat.isDirectory() # a directory
extension = path.extname(pathed_file)
return callback() unless (extension is '.js' or extension is '.coffee')
try
model_path = path.join(directory, file)
model_type = require(model_path)
return callback() unless (model_type and _.isFunction(model_type) and Utils.isModel(new model_type()) and model_type.resetSchema)
model_type.path = model_path if options.append_path
model_types.push(model_type)
callback()
catch err
console.log "findModels: skipping: #{err}" if options.verbose
callback()
queue.await (err) ->
callback(err) if err
callback(null, model_types)
findModelsInDirectory(directory, options, callback)
@resetSchemasByDirectory: (directory, options, callback) =>
[options, callback] = [{}, options] if arguments.length is 2
@findModels directory, options, (err, model_types) ->
return callback(err) if err
queue = new Queue(1)
for model_type in model_types
do (model_type) -> queue.defer (callback) -> model_type.resetSchema options, callback
queue.await (err) ->
console.log "resetSchemasByDirectory: failed to reset schemas: #{err}" if err
callback(err)
|
[
{
"context": "plication/xml;q=0.9,*/*;q=0.8'\n 'Cookie': 'visid_nxg=CMS4MlQAAAAABQdallWJwB5iSDr0b8sm1J+UV2m8qms6Dib2+",
"end": 1334,
"score": 0.6833659410476685,
"start": 1325,
"tag": "KEY",
"value": "visid_nxg"
},
{
"context": "xml;q=0.9,*/*;q=0.8'\n 'Cookie': 'visid_nxg=CMS4MlQAAAAABQdallWJwB5iSDr0b8sm1J+UV2m8qms6Dib2+4LZG1v/1+58mxYJkudELIcW77d8tZz6Ss23KRmrgDaf4Z/gpA3G5Ci9ttj54aY='\n 'Accept-Language': 'en-US,en;q=0.5'\n\n ",
"end": 1448,
"score": 0.7411609888076782,
"start": 1335,
"tag": "KEY",
"value": "CMS4MlQAAAAABQdallWJwB5iSDr0b8sm1J+UV2m8qms6Dib2+4LZG1v/1+58mxYJkudELIcW77d8tZz6Ss23KRmrgDaf4Z/gpA3G5Ci9ttj54aY='"
}
] | api/oversea_rate.coffee | indiejoseph/hkpost-api-server | 0 | 'use strict'
router = (require 'express').Router()
LRU = require 'lru-cache'
config = require '../config'
request = require 'request'
cheerio = require 'cheerio'
cache = LRU { maxAge: config.cache.maxAge }
#routes
router.route('/oversea_rate')
.get (req, res, next) ->
# mail = ['letter_postcard', 'printed_papers', 'small_packet', 'parcel']
weight = req.query.weight
dest = req.query.dest
mail = req.query.mail
# validation
errors = []
unless dest
errors.push 'Please select destination.'
unless mail
errors.push 'Please select mail type.'
unless weight
errors.push 'Please enter correct number for weight (Numeric Value).'
if errors.length
return res.status(500).json { errors: errors }
key = "mail:#{ mail },dest:#{ dest },weight:#{ weight }"
# check cache
if cache.has key
return res.json cache.get(key)
options =
url: config.hkpost.overseaRate
method: 'POST'
form:
weight: weight
mail: mail
destination: dest
headers:
'Host': 'app1.hongkongpost.hk'
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0'
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
'Cookie': 'visid_nxg=CMS4MlQAAAAABQdallWJwB5iSDr0b8sm1J+UV2m8qms6Dib2+4LZG1v/1+58mxYJkudELIcW77d8tZz6Ss23KRmrgDaf4Z/gpA3G5Ci9ttj54aY='
'Accept-Language': 'en-US,en;q=0.5'
request options, (err, response, html) ->
if err or response.statusCode isnt 200
return response.status(500).json { errors: [ err.message ] }
prices = {}
# initial cheerio
$ = cheerio.load html
selectors = [
's_rate'
'a_rate'
'spt_rate'
'm05_rate'
'm10_rate'
'm20_rate'
'm30_rate'
]
for selector in selectors
$input = $ '#' + selector
if $input.length
prices[selector] = parseInt $input.val()
# add to cache
cache.set key, prices
# return
res.json prices
module.exports = router
| 109675 | 'use strict'
router = (require 'express').Router()
LRU = require 'lru-cache'
config = require '../config'
request = require 'request'
cheerio = require 'cheerio'
cache = LRU { maxAge: config.cache.maxAge }
#routes
router.route('/oversea_rate')
.get (req, res, next) ->
# mail = ['letter_postcard', 'printed_papers', 'small_packet', 'parcel']
weight = req.query.weight
dest = req.query.dest
mail = req.query.mail
# validation
errors = []
unless dest
errors.push 'Please select destination.'
unless mail
errors.push 'Please select mail type.'
unless weight
errors.push 'Please enter correct number for weight (Numeric Value).'
if errors.length
return res.status(500).json { errors: errors }
key = "mail:#{ mail },dest:#{ dest },weight:#{ weight }"
# check cache
if cache.has key
return res.json cache.get(key)
options =
url: config.hkpost.overseaRate
method: 'POST'
form:
weight: weight
mail: mail
destination: dest
headers:
'Host': 'app1.hongkongpost.hk'
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0'
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
'Cookie': '<KEY>=<KEY>
'Accept-Language': 'en-US,en;q=0.5'
request options, (err, response, html) ->
if err or response.statusCode isnt 200
return response.status(500).json { errors: [ err.message ] }
prices = {}
# initial cheerio
$ = cheerio.load html
selectors = [
's_rate'
'a_rate'
'spt_rate'
'm05_rate'
'm10_rate'
'm20_rate'
'm30_rate'
]
for selector in selectors
$input = $ '#' + selector
if $input.length
prices[selector] = parseInt $input.val()
# add to cache
cache.set key, prices
# return
res.json prices
module.exports = router
| true | 'use strict'
router = (require 'express').Router()
LRU = require 'lru-cache'
config = require '../config'
request = require 'request'
cheerio = require 'cheerio'
cache = LRU { maxAge: config.cache.maxAge }
#routes
router.route('/oversea_rate')
.get (req, res, next) ->
# mail = ['letter_postcard', 'printed_papers', 'small_packet', 'parcel']
weight = req.query.weight
dest = req.query.dest
mail = req.query.mail
# validation
errors = []
unless dest
errors.push 'Please select destination.'
unless mail
errors.push 'Please select mail type.'
unless weight
errors.push 'Please enter correct number for weight (Numeric Value).'
if errors.length
return res.status(500).json { errors: errors }
key = "mail:#{ mail },dest:#{ dest },weight:#{ weight }"
# check cache
if cache.has key
return res.json cache.get(key)
options =
url: config.hkpost.overseaRate
method: 'POST'
form:
weight: weight
mail: mail
destination: dest
headers:
'Host': 'app1.hongkongpost.hk'
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0'
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
'Cookie': 'PI:KEY:<KEY>END_PI=PI:KEY:<KEY>END_PI
'Accept-Language': 'en-US,en;q=0.5'
request options, (err, response, html) ->
if err or response.statusCode isnt 200
return response.status(500).json { errors: [ err.message ] }
prices = {}
# initial cheerio
$ = cheerio.load html
selectors = [
's_rate'
'a_rate'
'spt_rate'
'm05_rate'
'm10_rate'
'm20_rate'
'm30_rate'
]
for selector in selectors
$input = $ '#' + selector
if $input.length
prices[selector] = parseInt $input.val()
# add to cache
cache.set key, prices
# return
res.json prices
module.exports = router
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9991912841796875,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-client-agent.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
request = (i) ->
req = http.get(
port: common.PORT
path: "/" + i
, (res) ->
socket = req.socket
socket.on "close", ->
++count
if count < max
assert.equal http.globalAgent.sockets[name].indexOf(socket), -1
else
assert not http.globalAgent.sockets.hasOwnProperty(name)
assert not http.globalAgent.requests.hasOwnProperty(name)
server.close()
return
res.resume()
return
)
return
common = require("../common")
assert = require("assert")
http = require("http")
name = http.globalAgent.getName(port: common.PORT)
max = 3
count = 0
server = http.Server((req, res) ->
if req.url is "/0"
setTimeout (->
res.writeHead 200
res.end "Hello, World!"
return
), 100
else
res.writeHead 200
res.end "Hello, World!"
return
)
server.listen common.PORT, ->
i = 0
while i < max
request i
++i
return
process.on "exit", ->
assert.equal count, max
return
| 44288 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
request = (i) ->
req = http.get(
port: common.PORT
path: "/" + i
, (res) ->
socket = req.socket
socket.on "close", ->
++count
if count < max
assert.equal http.globalAgent.sockets[name].indexOf(socket), -1
else
assert not http.globalAgent.sockets.hasOwnProperty(name)
assert not http.globalAgent.requests.hasOwnProperty(name)
server.close()
return
res.resume()
return
)
return
common = require("../common")
assert = require("assert")
http = require("http")
name = http.globalAgent.getName(port: common.PORT)
max = 3
count = 0
server = http.Server((req, res) ->
if req.url is "/0"
setTimeout (->
res.writeHead 200
res.end "Hello, World!"
return
), 100
else
res.writeHead 200
res.end "Hello, World!"
return
)
server.listen common.PORT, ->
i = 0
while i < max
request i
++i
return
process.on "exit", ->
assert.equal count, max
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
request = (i) ->
req = http.get(
port: common.PORT
path: "/" + i
, (res) ->
socket = req.socket
socket.on "close", ->
++count
if count < max
assert.equal http.globalAgent.sockets[name].indexOf(socket), -1
else
assert not http.globalAgent.sockets.hasOwnProperty(name)
assert not http.globalAgent.requests.hasOwnProperty(name)
server.close()
return
res.resume()
return
)
return
common = require("../common")
assert = require("assert")
http = require("http")
name = http.globalAgent.getName(port: common.PORT)
max = 3
count = 0
server = http.Server((req, res) ->
if req.url is "/0"
setTimeout (->
res.writeHead 200
res.end "Hello, World!"
return
), 100
else
res.writeHead 200
res.end "Hello, World!"
return
)
server.listen common.PORT, ->
i = 0
while i < max
request i
++i
return
process.on "exit", ->
assert.equal count, max
return
|
[
{
"context": " options = {} if not options\n processKey = command.join(\"_\")\n\n for directory in atom.project.getDirec",
"end": 779,
"score": 0.9916990995407104,
"start": 764,
"tag": "KEY",
"value": "command.join(\"_"
}
] | lib/services/php-proxy.coffee | ll-xzx/ll-php-support | 0 | exec = require "child_process"
process = require "process"
config = require "../config.coffee"
md5 = require 'md5'
fs = require 'fs'
module.exports =
data:
methods: [],
autocomplete: [],
composer: null
currentProcesses: []
###*
* Executes a command to PHP proxy
* @param {string} command Command to execute
* @param {boolean} async Must be async or not
* @param {array} options Options for the command
* @param {boolean} noparser Do not use php/parser.php
* @return {array} Json of the response
###
execute: (command, async, options, noparser, editor) ->
if !editor
console.log 'empty'
options = {} if not options
processKey = command.join("_")
for directory in atom.project.getDirectories()
for c in command
c.replace(/\\/g, '\\\\')
if not async
try
# avoid multiple processes of the same command
if not @currentProcesses[processKey]?
@currentProcesses[processKey] = true
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
stdout = exec.spawnSync(config.config.php, args, options).output[1].toString('ascii')
delete @currentProcesses[processKey]
if noparser
res =
result: stdout
else
res = JSON.parse(stdout)
catch err
console.log err
res =
error: err
if !res
return []
if res.error?
@printError(res.error)
return res
else
if not @currentProcesses[processKey]?
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", true)
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
@currentProcesses[processKey] = exec.exec(config.config.php + " " + args.join(" "), options, (error, stdout, stderr) =>
delete @currentProcesses[processKey]
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", false)
return stdout
)
###*
* Reads an index by its name (file in indexes/index.[name].json)
* @param {string} name Name of the index to read
###
readIndex: (name) ->
for directory in atom.project.getDirectories()
crypt = md5(directory.path)
path = __dirname + "/../../indexes/" + crypt + "/index." + name + ".json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
return []
options =
encoding: 'UTF-8'
return JSON.parse(fs.readFileSync(path, options))
break
###*
* Open and read the composer.json file in the current folder
###
readComposer: () ->
for directory in atom.project.getDirectories()
path = "#{directory.path}/composer.json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
continue
options =
encoding: 'UTF-8'
@data.composer = JSON.parse(fs.readFileSync(path, options))
return @data.composer
console.log 'Unable to find composer.json file or to open it. The plugin will not work as expected. It only works on composer project'
throw "Error"
###*
* Throw a formatted error
* @param {object} error Error to show
###
printError:(error) ->
@data.error = true
message = error.message
#if error.file? and error.line?
#message = message + ' [from file ' + error.file + ' - Line ' + error.line + ']'
#throw new Error(message)
###*
* Clear all cache of the plugin
###
clearCache: () ->
@data =
error: false,
autocomplete: [],
methods: [],
composer: null
###*
* Autocomplete for classes name
* @return {array}
###
classes: () ->
return @readIndex('classes')
###*
* Returns composer.json file
* @return {Object}
###
composer: () ->
return @readComposer()
###*
* Autocomplete for internal PHP constants
* @return {array}
###
constants: (editor) ->
if not @data.constants?
res = @execute(["--constants"], false, {}, false, editor)
@data.constants = res
return @data.constants
###*
* Autocomplete for internal PHP functions
* @return {array}
###
functions: (editor) ->
if not @data.functions?
res = @execute(["--functions"], false, {}, false,editor)
@data.functions = res
return @data.functions
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
methods: (className, editor) ->
if not @data.methods[className]?
res = @execute(["--methods","#{className}"], false, {}, false,editor)
@data.methods[className] = res
return @data.methods[className]
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
autocomplete: (className, name, editor) ->
cacheKey = className + "." + name
if not @data.autocomplete[cacheKey]?
res = @execute(["--autocomplete", className, name], false, {}, false, editor)
@data.autocomplete[cacheKey] = res
return @data.autocomplete[cacheKey]
###*
* Returns params from the documentation of the given function
*
* @param {string} className
* @param {string} functionName
###
docParams: (className, functionName, editor) ->
res = @execute(["--doc-params", "#{className}", "#{functionName}"], false, {}, false,editor)
return res
###*
* Refresh the full index or only for the given classPath
* @param {string} classPath Full path (dir) of the class to refresh
###
refresh: (classPath, editor) ->
if not classPath?
@execute(["--refresh"], true, {}, false,editor)
else
@execute(["--refresh", "#{classPath}"], true, {}, false,editor)
###*
* Method called on plugin activation
###
init: () ->
@refresh()
atom.workspace.observeTextEditors (editor) =>
editor.onDidSave((event) =>
# Only .php file
if editor.getGrammar().scopeName.match /text.html.php$/
@clearCache()
# For Windows - Replace \ in class namespace to / because
# composer use / instead of \
path = event.path
for directory in atom.project.getDirectories()
if path.indexOf(directory.path) == 0
classPath = path.substr(0, directory.path.length+1)
path = path.substr(directory.path.length+1)
break
@refresh(classPath + path.replace(/\\/g, '/'))
)
atom.config.onDidChange 'atom-autocomplete-php.binPhp', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.binComposer', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.autoloadPaths', () =>
@clearCache()
| 133432 | exec = require "child_process"
process = require "process"
config = require "../config.coffee"
md5 = require 'md5'
fs = require 'fs'
module.exports =
data:
methods: [],
autocomplete: [],
composer: null
currentProcesses: []
###*
* Executes a command to PHP proxy
* @param {string} command Command to execute
* @param {boolean} async Must be async or not
* @param {array} options Options for the command
* @param {boolean} noparser Do not use php/parser.php
* @return {array} Json of the response
###
execute: (command, async, options, noparser, editor) ->
if !editor
console.log 'empty'
options = {} if not options
processKey = <KEY>")
for directory in atom.project.getDirectories()
for c in command
c.replace(/\\/g, '\\\\')
if not async
try
# avoid multiple processes of the same command
if not @currentProcesses[processKey]?
@currentProcesses[processKey] = true
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
stdout = exec.spawnSync(config.config.php, args, options).output[1].toString('ascii')
delete @currentProcesses[processKey]
if noparser
res =
result: stdout
else
res = JSON.parse(stdout)
catch err
console.log err
res =
error: err
if !res
return []
if res.error?
@printError(res.error)
return res
else
if not @currentProcesses[processKey]?
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", true)
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
@currentProcesses[processKey] = exec.exec(config.config.php + " " + args.join(" "), options, (error, stdout, stderr) =>
delete @currentProcesses[processKey]
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", false)
return stdout
)
###*
* Reads an index by its name (file in indexes/index.[name].json)
* @param {string} name Name of the index to read
###
readIndex: (name) ->
for directory in atom.project.getDirectories()
crypt = md5(directory.path)
path = __dirname + "/../../indexes/" + crypt + "/index." + name + ".json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
return []
options =
encoding: 'UTF-8'
return JSON.parse(fs.readFileSync(path, options))
break
###*
* Open and read the composer.json file in the current folder
###
readComposer: () ->
for directory in atom.project.getDirectories()
path = "#{directory.path}/composer.json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
continue
options =
encoding: 'UTF-8'
@data.composer = JSON.parse(fs.readFileSync(path, options))
return @data.composer
console.log 'Unable to find composer.json file or to open it. The plugin will not work as expected. It only works on composer project'
throw "Error"
###*
* Throw a formatted error
* @param {object} error Error to show
###
printError:(error) ->
@data.error = true
message = error.message
#if error.file? and error.line?
#message = message + ' [from file ' + error.file + ' - Line ' + error.line + ']'
#throw new Error(message)
###*
* Clear all cache of the plugin
###
clearCache: () ->
@data =
error: false,
autocomplete: [],
methods: [],
composer: null
###*
* Autocomplete for classes name
* @return {array}
###
classes: () ->
return @readIndex('classes')
###*
* Returns composer.json file
* @return {Object}
###
composer: () ->
return @readComposer()
###*
* Autocomplete for internal PHP constants
* @return {array}
###
constants: (editor) ->
if not @data.constants?
res = @execute(["--constants"], false, {}, false, editor)
@data.constants = res
return @data.constants
###*
* Autocomplete for internal PHP functions
* @return {array}
###
functions: (editor) ->
if not @data.functions?
res = @execute(["--functions"], false, {}, false,editor)
@data.functions = res
return @data.functions
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
methods: (className, editor) ->
if not @data.methods[className]?
res = @execute(["--methods","#{className}"], false, {}, false,editor)
@data.methods[className] = res
return @data.methods[className]
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
autocomplete: (className, name, editor) ->
cacheKey = className + "." + name
if not @data.autocomplete[cacheKey]?
res = @execute(["--autocomplete", className, name], false, {}, false, editor)
@data.autocomplete[cacheKey] = res
return @data.autocomplete[cacheKey]
###*
* Returns params from the documentation of the given function
*
* @param {string} className
* @param {string} functionName
###
docParams: (className, functionName, editor) ->
res = @execute(["--doc-params", "#{className}", "#{functionName}"], false, {}, false,editor)
return res
###*
* Refresh the full index or only for the given classPath
* @param {string} classPath Full path (dir) of the class to refresh
###
refresh: (classPath, editor) ->
if not classPath?
@execute(["--refresh"], true, {}, false,editor)
else
@execute(["--refresh", "#{classPath}"], true, {}, false,editor)
###*
* Method called on plugin activation
###
init: () ->
@refresh()
atom.workspace.observeTextEditors (editor) =>
editor.onDidSave((event) =>
# Only .php file
if editor.getGrammar().scopeName.match /text.html.php$/
@clearCache()
# For Windows - Replace \ in class namespace to / because
# composer use / instead of \
path = event.path
for directory in atom.project.getDirectories()
if path.indexOf(directory.path) == 0
classPath = path.substr(0, directory.path.length+1)
path = path.substr(directory.path.length+1)
break
@refresh(classPath + path.replace(/\\/g, '/'))
)
atom.config.onDidChange 'atom-autocomplete-php.binPhp', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.binComposer', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.autoloadPaths', () =>
@clearCache()
| true | exec = require "child_process"
process = require "process"
config = require "../config.coffee"
md5 = require 'md5'
fs = require 'fs'
module.exports =
data:
methods: [],
autocomplete: [],
composer: null
currentProcesses: []
###*
* Executes a command to PHP proxy
* @param {string} command Command to execute
* @param {boolean} async Must be async or not
* @param {array} options Options for the command
* @param {boolean} noparser Do not use php/parser.php
* @return {array} Json of the response
###
execute: (command, async, options, noparser, editor) ->
if !editor
console.log 'empty'
options = {} if not options
processKey = PI:KEY:<KEY>END_PI")
for directory in atom.project.getDirectories()
for c in command
c.replace(/\\/g, '\\\\')
if not async
try
# avoid multiple processes of the same command
if not @currentProcesses[processKey]?
@currentProcesses[processKey] = true
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
stdout = exec.spawnSync(config.config.php, args, options).output[1].toString('ascii')
delete @currentProcesses[processKey]
if noparser
res =
result: stdout
else
res = JSON.parse(stdout)
catch err
console.log err
res =
error: err
if !res
return []
if res.error?
@printError(res.error)
return res
else
if not @currentProcesses[processKey]?
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", true)
args = [__dirname + "/../../php/parser.php", directory.path].concat(command).concat([editor.getPath()])
if noparser
args = command
# console.log args.join(' ')
@currentProcesses[processKey] = exec.exec(config.config.php + " " + args.join(" "), options, (error, stdout, stderr) =>
delete @currentProcesses[processKey]
if processKey.indexOf("--refresh") != -1
config.statusInProgress.update("Indexing...", false)
return stdout
)
###*
* Reads an index by its name (file in indexes/index.[name].json)
* @param {string} name Name of the index to read
###
readIndex: (name) ->
for directory in atom.project.getDirectories()
crypt = md5(directory.path)
path = __dirname + "/../../indexes/" + crypt + "/index." + name + ".json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
return []
options =
encoding: 'UTF-8'
return JSON.parse(fs.readFileSync(path, options))
break
###*
* Open and read the composer.json file in the current folder
###
readComposer: () ->
for directory in atom.project.getDirectories()
path = "#{directory.path}/composer.json"
try
fs.accessSync(path, fs.F_OK | fs.R_OK)
catch err
continue
options =
encoding: 'UTF-8'
@data.composer = JSON.parse(fs.readFileSync(path, options))
return @data.composer
console.log 'Unable to find composer.json file or to open it. The plugin will not work as expected. It only works on composer project'
throw "Error"
###*
* Throw a formatted error
* @param {object} error Error to show
###
printError:(error) ->
@data.error = true
message = error.message
#if error.file? and error.line?
#message = message + ' [from file ' + error.file + ' - Line ' + error.line + ']'
#throw new Error(message)
###*
* Clear all cache of the plugin
###
clearCache: () ->
@data =
error: false,
autocomplete: [],
methods: [],
composer: null
###*
* Autocomplete for classes name
* @return {array}
###
classes: () ->
return @readIndex('classes')
###*
* Returns composer.json file
* @return {Object}
###
composer: () ->
return @readComposer()
###*
* Autocomplete for internal PHP constants
* @return {array}
###
constants: (editor) ->
if not @data.constants?
res = @execute(["--constants"], false, {}, false, editor)
@data.constants = res
return @data.constants
###*
* Autocomplete for internal PHP functions
* @return {array}
###
functions: (editor) ->
if not @data.functions?
res = @execute(["--functions"], false, {}, false,editor)
@data.functions = res
return @data.functions
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
methods: (className, editor) ->
if not @data.methods[className]?
res = @execute(["--methods","#{className}"], false, {}, false,editor)
@data.methods[className] = res
return @data.methods[className]
###*
* Autocomplete for methods & properties of a class
* @param {string} className Class complete name (with namespace)
* @return {array}
###
autocomplete: (className, name, editor) ->
cacheKey = className + "." + name
if not @data.autocomplete[cacheKey]?
res = @execute(["--autocomplete", className, name], false, {}, false, editor)
@data.autocomplete[cacheKey] = res
return @data.autocomplete[cacheKey]
###*
* Returns params from the documentation of the given function
*
* @param {string} className
* @param {string} functionName
###
docParams: (className, functionName, editor) ->
res = @execute(["--doc-params", "#{className}", "#{functionName}"], false, {}, false,editor)
return res
###*
* Refresh the full index or only for the given classPath
* @param {string} classPath Full path (dir) of the class to refresh
###
refresh: (classPath, editor) ->
if not classPath?
@execute(["--refresh"], true, {}, false,editor)
else
@execute(["--refresh", "#{classPath}"], true, {}, false,editor)
###*
* Method called on plugin activation
###
init: () ->
@refresh()
atom.workspace.observeTextEditors (editor) =>
editor.onDidSave((event) =>
# Only .php file
if editor.getGrammar().scopeName.match /text.html.php$/
@clearCache()
# For Windows - Replace \ in class namespace to / because
# composer use / instead of \
path = event.path
for directory in atom.project.getDirectories()
if path.indexOf(directory.path) == 0
classPath = path.substr(0, directory.path.length+1)
path = path.substr(directory.path.length+1)
break
@refresh(classPath + path.replace(/\\/g, '/'))
)
atom.config.onDidChange 'atom-autocomplete-php.binPhp', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.binComposer', () =>
@clearCache()
atom.config.onDidChange 'atom-autocomplete-php.autoloadPaths', () =>
@clearCache()
|
[
{
"context": "google.com/feeds/download/spreadsheets/Export?key=0AvVyfy1LBTe3dEVHWk9GbTdWSWkyZFBJRldaMDJQVmc&exportFormat=csv\" #HACK\n hasChanged = true\n\n ",
"end": 6363,
"score": 0.9997476935386658,
"start": 6319,
"tag": "KEY",
"value": "0AvVyfy1LBTe3dEVHWk9GbTdWSWkyZFBJRldaMDJQVmc"
}
] | app-src/console_app.coffee | hyperbotic/crowdgame-trivially | 0 | # ==================================================================================================================
#
# IT IS REALLY IMPORTANT THAT App-level event handlers return null.
# Ti.App.addEventListener("eventName", (event)=>null)
#
#
class ConsoleApp extends Hy.UI.Application
gInstance = null
# ----------------------------------------------------------------------------------------------------------------
@get: ()-> gInstance
# ----------------------------------------------------------------------------------------------------------------
constructor: (backgroundWindow, tempImage)->
gInstance = this
@singleUser = false
super backgroundWindow, tempImage
this.initSetup()
Hy.Pages.StartPage.addObserver this
Hy.Trace.debug "ConsoleApp::constructor (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
this
# ----------------------------------------------------------------------------------------------------------------
initSetup: ()->
@initFnChain = [
{label: "Trace", init: ()=>Hy.Trace.init(this)}
{label: "Analytics", init: ()=>@analytics = Hy.Analytics.Analytics.init()}
# {label: "CommerceManager", init: ()=>Hy.Commerce.CommerceManager.init()} # As of 2.7
{label: "Page/Video", init: ()=>@pageState = Hy.Pages.PageState.init(this)}
{label: "Splash Page", init: ()=>this.showSplashPage()}
{label: "SoundManager", init: ()=>Hy.Media.SoundManager.init()}
{label: "Network Service", init: ()=>this.initNetwork()}
{label: "Player Network", init: ()=>this.initPlayerNetwork()}
{label: "Update Service", init: ()=>Hy.Update.UpdateService.init()}
{label: "ContentManager", init: ()=>Hy.Content.ContentManager.init(this.checkURLArg())}
{label: "AvatarSet", init: ()=>Hy.Avatars.AvatarSet.init()}
{label: "Console Player", init: ()=>Hy.Player.ConsolePlayer.init()}
# {label: "CommerceManagerInventory", init: ()=>Hy.Commerce.CommerceManager.inventoryManagedFeatures()} # As of 2.7
]
this
# ----------------------------------------------------------------------------------------------------------------
init: ()->
super
Hy.Utils.MemInfo.init()
Hy.Utils.MemInfo.log "INITIALIZING (init #=#{_.size(@initFnChain)})"
@timedOperation = new Hy.Utils.TimedOperation("INITIALIZATION")
fnExecute = ()=>
while _.size(@initFnChain) > 0
fnSpec = _.first(@initFnChain)
fnSpec.init()
@initFnChain.shift()
@timedOperation.mark(fnSpec.label)
null
fnExecute()
this
# ----------------------------------------------------------------------------------------------------------------
start: ()->
Hy.Trace.debug "ConsoleApp::start"
super
@analytics?.logApplicationLaunch()
this
# ----------------------------------------------------------------------------------------------------------------
# Triggered when the app is backgrounded. Have to work quick here. Do the important stuff first
pause: (evt)->
Hy.Trace.debug "ConsoleApp::pause (ENTER)"
this.getPage()?.pause()
@playerNetwork?.sendAll('suspend', {})
@playerNetwork?.pause()
@httpPort = null
Hy.Network.NetworkService.get().pause()
super evt
Hy.Trace.debug "ConsoleApp::pause (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
# Triggered at the start of the process of being foregrounded. Nothing to do here.
resume: (evt)->
Hy.Trace.debug "ConsoleApp::resume (ENTER page=#{this.getPage()?.constructor.name})"
super evt
Hy.Trace.debug "ConsoleApp::resume (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
# Triggered when app is fully foregrounded.
resumed: (evt)->
Hy.Trace.debug "ConsoleApp::resumed (ENTER page=#{this.getPage()?.constructor.name})"
super
this.init()
Hy.Network.NetworkService.get().resumed()
Hy.Network.NetworkService.get().setImmediate()
@playerNetwork?.resumed()
if false #(newUrl = this.checkURLArg())?
null # do something
else
this.resumedPage()
Hy.Trace.debug "ConsoleApp::resumed (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
#
# To handle the edge case where we were backgrounded while transitioning to a new page.
# In the more complex cases, the tactic for handling this is to simply go back to the previous page.
# This approach seems to be needed when the page is based on a web view and requires images to load
#
resumedPage: ()->
Hy.Trace.debug "ConsoleApp::resumedPage (ENTER (transitioning=#{@pageState.isTransitioning()?})"
fn = ()=>@pageState.resumed()
if @pageState.isTransitioning()?
stopTransitioning = true
switch (oldPageState = @pageState.getOldPageState())
when Hy.Pages.PageState.Intro, Hy.Pages.PageState.Start, null, Hy.Pages.PageState.Splash
fn = ()=>this.showStartPage()
when Hy.Pages.PageState.Any, Hy.Pages.PageState.Unknown
new Hy.Utils.ErrorMessage("fatal", "Console App", "Unexpected state \"#{oldPageState}\" in resumedPage") #will display popup dialog
fn = ()=>this.showStartPage()
else # About, UCCInfo, Join, Answer, Scoreboard, Completed
stopTransitioning = false
null
if stopTransitioning
@pageState.stopTransitioning()
else
if not this.getPage()?
fn = ()=>this.showStartPage()
Hy.Trace.debug "ConsoleApp::resumedPage (EXIT: \"#{fn}\")"
fn?()
this
# ----------------------------------------------------------------------------------------------------------------
checkURLArg: ()->
args = Ti.App.getArguments()
hasChanged = false
if (url = args.url)?
hasChanged = @argURL? and (@argURL != url)
# HACK
url = "https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=0AvVyfy1LBTe3dEVHWk9GbTdWSWkyZFBJRldaMDJQVmc&exportFormat=csv" #HACK
hasChanged = true
@argURL = url
if hasChanged then @argURL else null
# ----------------------------------------------------------------------------------------------------------------
showPage: (newPageState, fn_newPageInit, postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showPage (ENTER #{newPageState} #{@pageState?.display()})"
fn_showPage = ()=>
Hy.Trace.debug "ConsoleApp::showPage (FIRING #{newPageState} #{@pageState.display()})"
@pageState?.showPage(newPageState, fn_newPageInit, postFunctions)
f = ()=> Hy.Utils.Deferral.create(0, ()=>fn_showPage())
if (newPageState1 = @pageState.isTransitioning())?
if newPageState1 isnt newPageState
@pageState.addPostTransitionAction(f)
else
f()
Hy.Trace.debug "ConsoleApp::showPage (EXIT #{newPageState} #{@pageState.display()})"
this
# ----------------------------------------------------------------------------------------------------------------
showSplashPage: ()->
if not @splashShown?
this.showPage(Hy.Pages.PageState.Splash, (page)=>page.initialize())
@splashShown = true
null
# ----------------------------------------------------------------------------------------------------------------
initNetwork: ()->
Hy.Network.NetworkService.init().setImmediate()
Hy.Network.NetworkService.addObserver this
# ----------------------------------------------------------------------------------------------------------------
# Called by NetworkService when there's a change in the network scenery (since ConsoleApp is an "observer")
#
obs_networkChanged: (reason)->
super
if not @pageState.isTransitioning()?
this.getPage()?.obs_networkChanged(reason)
this
# ----------------------------------------------------------------------------------------------------------------
initPlayerNetwork: ()->
if @playerNetwork?
return
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (ENTER)"
fnReady = (httpPort)=>
Hy.Trace.debug "ConsoleApp::Network Ready (port=#{httpPort})"
Hy.Trace.debug "ConsoleApp::Network Ready (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
@timedOperation.mark("Network Ready")
@httpPort = httpPort
if this.getPage()?
if this.getPage().getState() is Hy.Pages.PageState.Splash
this.showIntroPage()
else
this.getPage().resumed()
remotePage = this.remotePlayerMapPage()
@playerNetwork?.sendAll(remotePage.op, remotePage.data)
else
if (newPageState = @pageState.isTransitioning())?
if newPageState is Hy.Pages.PageState.Splash
this.showIntroPage()
Hy.Network.NetworkService.setConsoleHTTPPort(@httpPort) # will trigger a "obs_networkChanged" event
null
fnError = (error, restartNetwork)=>
Hy.Trace.debug "ConsoleApp (NETWORK ERROR /#{error}/)"
if restartNetwork
Hy.Trace.debug "ConsoleApp (NETWORK ERROR - RESTARTING)"
new Hy.Utils.ErrorMessage("fatal", "Player Network", error) #will display popup dialog
# this.restartPlayerNetwork()
null
fnMessageReceived = (connection, op, data)=>this.remotePlayerMessage(connection, op, data)
fnAddPlayer = (connection, label, majorVersion, minorVersion)=>this.remotePlayerAdded(connection, label, majorVersion, minorVersion)
fnRemovePlayer = (connection)=>this.remotePlayerRemoved(connection)
fnPlayerStatusChange = (connection, status)=>this.remotePlayerStatusChanged(connection, status)
fnServiceStatusChange = (serviceStatus)=>this.serviceStatusChange(serviceStatus)
if @singleUser
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (SINGLE USER)"
fnReady(null)
else
@playerNetwork = Hy.Network.PlayerNetworkProxy.create(fnReady, fnError, fnMessageReceived, fnAddPlayer, fnRemovePlayer, fnPlayerStatusChange, fnServiceStatusChange)
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
restartPlayerNetwork: ()->
Hy.Trace.debug "ConsoleApp::restartPlayerNetwork"
this.stopPlayerNetwork()
this.initPlayerNetwork()
# ----------------------------------------------------------------------------------------------------------------
stopPlayerNetwork: ()->
@playerNetwork?.stop()
@playerNetwork = null
# ----------------------------------------------------------------------------------------------------------------
serviceStatusChange: (serviceStatus)->
Hy.Network.NetworkService.setBonjourPublishInfo(serviceStatus)
this
# ----------------------------------------------------------------------------------------------------------------
showIntroPage: ()->
Hy.Trace.debug "ConsoleApp::ShowIntroPage (ENTER)"
fn = ()=>this.showStartPage()
this.showPage(Hy.Pages.PageState.Intro, (page)=>page.initialize(fn))
# It seems that we'll sometimes hang on startup if there's no wifi... something about the iOS "Turn on Wifi" dialog,
# and the resulting suspend/resume, messes up the Intro page... doesn't show. This is a hack to get around that.
fnIntroPageTimeout = ()=>
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout)"
if this.getPage()? and this.getPage().getState() is Hy.Pages.PageState.Intro
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout - forcing transition to Start Page)"
fn()
if not Hy.Network.NetworkService.isOnlineWifi()
Hy.Utils.Deferral.create(7 * 1000, fnIntroPageTimeout)
Hy.Trace.debug "ConsoleApp::ShowIntroPage (EXIT)"
this
# ----------------------------------------------------------------------------------------------------------------
showAboutPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.About, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
showJoinCodeInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.JoinCodeInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
# Show the start page, and then execute the specified functions
#
showStartPage: (postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showStartPage"
@questionChallengeInProgress = false
this.showPage(Hy.Pages.PageState.Start, ((page)=>page.initialize()), postFunctions)
@playerNetwork?.sendAll("prepForContest", {})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from "StartPage" when the enabled state of the Start Button changes.
#
obs_startPageStartButtonStateChanged: (state, reason)->
@playerNetwork?.sendAll("prepForContest", {startEnabled: state, reason: reason})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from Page "StartPage" when "Start Game" button is touched
#
contestStart: ()->
page = this.getPage()
Hy.Media.SoundManager.get().playEvent("gameStart")
page.contentPacksLoadingStart()
if this.loadQuestions()
@nQuestions = @contest.contestQuestions.length
@iQuestion = 0
@nAnswered = 0 # number of times at least one player responded
page.contentPacksLoadingCompleted()
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(false)
Hy.Network.NetworkService.get().setSuspended()
@playerNetwork?.sendAll('startContest', {})
this.showCurrentQuestion()
@analytics?.logContestStart()
else
page.contentPacksLoadingCompleted()
page.resetStartButtonClicked()
this
# ----------------------------------------------------------------------------------------------------------------
#
loadQuestions: ()->
fnEdgeCaseError = (message)=>
new Hy.Utils.ErrorMessage("fatal", "Console App Options", message) #will display popup dialog
false
contentManager = Hy.Content.ContentManager.get()
this.getPage().contentPacksLoading("Loading topics...")
@contentLoadTimer = new Hy.Utils.TimedOperation("INITIAL CONTENT LOAD")
totalNumQuestions = 0
for contentPack in (contentPacks = _.select(contentManager.getLatestContentPacksOKToDisplay(), (c)=>c.isSelected()))
# Edge case: content pack isn't actually local...!
if contentPack.isReadyForPlay()
contentPack.load()
totalNumQuestions += contentPack.getNumRecords()
else
return fnEdgeCaseError("Topic \"#{contentPack.getDisplayName()}\" not ready for play. Please unselect it")
numSelectedContentPacks = _.size(contentPacks)
@contentLoadTimer.mark("done")
# Edge case: Shouldn't be here if no topics chosen...
if numSelectedContentPacks is 0
return fnEdgeCaseError("No topics chosen - Please choose one or more topics and try again")
# Edge case: corruption in the option
if not (numQuestionsNeeded = Hy.Options.numQuestions.getValue())? or not Hy.Options.numQuestions.isValidValue(numQuestionsNeeded)
fnEdgeCaseError("Invalid \"Number of Questions\" option, resetting to 5 (#{numQuestionsNeeded})")
numQuestionsNeeded = 5
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
# Special Case: numQuestions is -1, means "Play as many questions as possible, up to some limit"
if numQuestionsNeeded is -1
# Enforce max number
numQuestionsNeeded = Math.min(totalNumQuestions, Hy.Config.Dynamics.maxNumQuestions)
# Edge case: Shouldn't really be in this situation, either: not enough questions!
# We should be able to set numQuestionsNeeded to a lower value to make it work, since
# we know that the min number of questions in any contest is 5.
if (shortfall = (numQuestionsNeeded - totalNumQuestions)) > 0
for choice in Hy.Options.numQuestions.getChoices().slice(0).reverse()
if choice isnt -1
if (shortfall = (choice-totalNumQuestions)) <= 0
numQuestionsNeeded = choice
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
this.getPage().contentPacksLoading("Number of questions reduced to accomodate selected topics...")
break
# Something's wrong: apparently have a contest with fewer than 5 questions
if shortfall > 0
return fnEdgeCaseError("Not enough questions - Please choose more topics and try again (requested=#{numQuestionsNeeded} shortfall=#{shortfall})")
this.getPage().contentPacksLoading("Selecting questions...")
# Edge case: if number of selected content packs > number of requested questions...
numQuestionsPerPack = Math.max(1, Math.floor(numQuestionsNeeded / numSelectedContentPacks))
@contest = new Hy.Contest.Contest()
# This loop should always terminate because we know there are more than enough questions
contentPacks = Hy.Utils.Array.shuffle(contentPacks)
index = -1
numQuestionsFound = 0
while numQuestionsFound < numQuestionsNeeded
if index < (numSelectedContentPacks - 1)
index++
else
index = 0
numQuestionsPerPack = 1 # To fill in the remainder
contentPack = contentPacks[index]
numQuestionsFound += (numQuestionsAdded = @contest.addQuestions(contentPack, numQuestionsPerPack))
if numQuestionsAdded < numQuestionsPerPack
Hy.Trace.debug "ConsoleApp::loadQuestions (NOT ENOUGH QUESTIONS FOUND pack=#{contentPack.getProductID()} #requested=#{numQuestionsPerPack} #found=#{numQuestionsAdded})"
# return false # We should be ok, because we know there are enough questions in total...
for contestQuestion in @contest.getQuestions()
question = contestQuestion.getQuestion()
Hy.Trace.debug "ConsoleApp::contestStart (question ##{question.id} #{question.topic})"
true
# ----------------------------------------------------------------------------------------------------------------
contestPaused: (remotePage)->
@playerNetwork?.sendAll('gamePaused', {page: remotePage})
this
# ----------------------------------------------------------------------------------------------------------------
contestRestart: (completed = true)->
# set this before showing the Start Page
Hy.Network.NetworkService.get().setImmediate()
this.showStartPage()
# this.logContestEnd(completed, @nQuestions, @nAnswered, @contest)
this
# ----------------------------------------------------------------------------------------------------------------
# Player requested that we skip to the final Scoreboard
#
contestForceFinish: ()->
this.contestCompleted()
this
# ----------------------------------------------------------------------------------------------------------------
contestCompleted: ()->
Hy.Trace.debug("ConsoleApp::contestCompleted")
fnNotify = ()=>
for o in (leaderboard = this.getPage().getLeaderboard())
for player in o.group
Hy.Trace.debug("ConsoleApp::contestCompleted (score: #{o.score} player#: #{player})")
@playerNetwork?.sendAll('contestCompleted', {leaderboard: leaderboard})
null
iQuestion = @iQuestion # By the time the init function below is called, @iQuestion will have been nulled out
Hy.Network.NetworkService.get().setImmediate()
this.showPage(Hy.Pages.PageState.Completed, (page)=>page.initialize(fnNotify))
Hy.Network.NetworkService.get().setImmediate()
this.logContestEnd(true, @iQuestion, @nAnswered, @contest)
@nQuestions = null
@iQuestion = null
@cq = null
Hy.Utils.MemInfo.log "Contest Completed"
this
# ----------------------------------------------------------------------------------------------------------------
showQuestionChallengePage: (startingDelay)->
someText = @cq.question.question
someText = someText.substr(0, Math.min(30, someText.length))
Hy.Trace.debug "ConsoleApp::showQuestionChallengePage (#=#{@iQuestion} question=#{@cq.question.id}/#{someText})"
@currentPageHadResponses = false #set to true if at least one player responded to current question
@questionChallengeInProgress = true
# we copy these here to avoid possible issues with variable bindings, when the callbacks below are invoked
cq = @cq
iQuestion = @iQuestion
nQuestions = @nQuestions
fnNotify = ()=>@playerNetwork?.sendAll('showQuestion', {questionId: cq.question.id})
fnPause = ()=>this.contestPaused("showQuestion")
fnCompleted = ()=>this.challengeCompleted()
nSeconds = Hy.Options.secondsPerQuestion.choices[Hy.Options.secondsPerQuestion.index]
if not nSeconds? or not (nSeconds >= 10 and nSeconds <= 570) # this is brittle. HACK
error = "INVALID nSeconds: #{nSeconds}"
Hy.Trace.debug "ConsoleApp (#{error})"
new Hy.Utils.ErrorMessage("fatal", "Console App Options", error) #will display popup dialog
nSeconds = 10
Hy.Options.secondsPerQuestion.setIndex(0)
this.getPage().panelSecondsPerQuestion.syncCurrentChoiceWithAppOption()
this.showPage(Hy.Pages.PageState.Question, (page)=>page.initializeForQuestion(fnNotify, fnPause, fnCompleted, nSeconds, startingDelay, cq, iQuestion, nQuestions))
# ----------------------------------------------------------------------------------------------------------------
challengeCompleted: (finishedEarly=false)->
if @questionChallengeInProgress
Hy.Media.SoundManager.get().playEvent("challengeCompleted")
this.getPage().animateCountdownQuestionCompleted()
this.getPage().stop() #haltCountdown() #adding this here to ensure that countdown stops immediately, avoid overlapping countdowns
@questionChallengeInProgress = false
@cq.setUsed()
@nAnswered++ if @currentPageHadResponses
this.showQuestionAnswerPage()
# ----------------------------------------------------------------------------------------------------------------
showQuestionAnswerPage: ()->
# Hy.Trace.debug "ConsoleApp::showQuestionAnswerPage(#=#{@iQuestion} question=#{@cq.question.id} Responses=#{@currentPageHadResponses} nAnswered=#{@nAnswered})"
responseVector = []
# Tell the remotes if we received their responses in time
for response in Hy.Contest.ContestResponse.selectByQuestionID(@cq.question.id)
responseVector.push {player: response.getPlayer().getIndex(), score: response.getScore()}
fnNotify = ()=>@playerNetwork?.sendAll('revealAnswer', {questionId: @cq.question.id, indexCorrectAnswer: @cq.indexCorrectAnswer, responses:responseVector})
fnPause = ()=>this.contestPaused("revealAnswer")
fnCompleted = ()=>this.questionAnswerCompleted()
this.showPage(Hy.Pages.PageState.Answer, (page)=>page.initializeForAnswers(fnNotify, fnPause, fnCompleted, Hy.Config.Dynamics.revealAnswerTime, 0))
# ----------------------------------------------------------------------------------------------------------------
questionAnswerCompleted: ()->
Hy.Trace.debug "ConsoleApp::questionAnswerCompleted(#=#{@iQuestion} question=#{@cq.question.id})"
@iQuestion++
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showCurrentQuestion()
this
# ----------------------------------------------------------------------------------------------------------------
showCurrentQuestion: ()->
Hy.Trace.debug "ConsoleApp::showCurrentQuestion(#=#{@iQuestion})"
@cq = @contest.contestQuestions[@iQuestion]
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showQuestionChallengePage(500)
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerAdded: (connection, label, majorVersion, minorVersion)->
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (##{connection}/#{label})"
s = "?"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
player.reactivate()
s = "EXISTING"
else
player = Hy.Player.RemotePlayer.create(connection, label, majorVersion, minorVersion)
@analytics?.logNewPlayer(Hy.Player.Player.count() - 1 ) # Don't count the console player
s = "NEW"
Hy.Media.SoundManager.get().playEvent("remotePlayerJoined")
remotePage = this.remotePlayerMapPage()
currentResponse = null
if @cq?
currentResponse = Hy.Contest.ContestResponse.selectByQuestionIDAndPlayer @cq.question.id, player
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (#{s} #{player.dumpStr()} page=#{remotePage.op} currentAnswerIndex=#{if currentResponse? then currentResponse.answerIndex else -1})"
op = "welcome"
data = {}
data.index = player.index
data.page = remotePage
data.questionId = (if @cq? then @cq.question.id else -1)
data.answerIndex = (if currentResponse? then currentResponse.answerIndex else -1)
data.score = player.score()
data.addressEncoding = Hy.Network.NetworkService.getAddressEncoding()
data.playerName = player.getName()
avatar = player.getAvatar()
data.avatar = {}
data.avatar.name = avatar.getName()
data.avatar.avatarSet = avatar.getAvatarSetName()
data.avatar.avatarHeight = avatar.getHeight()
data.avatar.avatarWidth = avatar.getWidth()
@playerNetwork?.sendSingle(player.getConnection(), op, data)
player
# ----------------------------------------------------------------------------------------------------------------
remotePlayerRemoved: (connection)->
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{connection})"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{player.dumpStr()})"
player.destroy()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerStatusChanged: (connection, status)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerStatusChanged (status=#{status} #{player.dumpStr()})"
if status
player.reactivate()
else
player.deactivate()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMessage: (connection, op, data)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
if @pageState.isTransitioning()
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (IGNORING, in Page Transition)"
else
handled = if op is "playerNameChangeRequest"
this.doPlayerNameChangeRequest(player, data)
else
this.doGameOp(player, op, data)
if not handled
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN OP #{op} #{connection})"
else
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN PLAYER #{connection})"
this
# ----------------------------------------------------------------------------------------------------------------
doPlayerNameChangeRequest: (player, data)->
result = player.setName(data.name)
if result.errorMessage?
data.errorMessage = result.errorMessage
else
data.givenName = result.givenName
@playerNetwork?.sendSingle(player.getConnection(), "playerNameChangeRequestResponse", data)
true
# ----------------------------------------------------------------------------------------------------------------
doGameOp: (player, op, data)->
handled = true
switch op
when "answer"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (Answered: question=#{data.questionId} answer=#{data.answerIndex} player=#{player.dumpStr()})"
this.playerAnswered(player, data.questionId, data.answerIndex)
when "pauseRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (pauseRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if not this.getPage().isPaused()
this.getPage().fnPauseClick()
when "continueRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (continueRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickContinueGame()
when "newGameRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (newGameRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Start
this.getPage().fnClickStartGame()
when Hy.Pages.PageState.Completed
this.getPage().fnClickPlayAgain()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickNewGame()
else
handled = false
handled
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMapPage: ()->
page = this.getPage()
remotePage = if page?
switch page.constructor.name
when "SplashPage", "IntroPage"
{op: "introPage"}
when "StartPage"
[state, reason] = page.getStartEnabled()
{op: "prepForContest", data: {startEnabled:state, reason: reason}}
when "AboutPage", "UserCreatedContentInfoPage"
{op: "aboutPage"}
when "QuestionPage"
if page.isPaused()
{op: "gamePaused"}
else
if @questionChallengeInProgress && page.getCountdownValue() > 5
{op: "showQuestion", data: {questionId: (if @cq? then @cq.question.id else -1)}}
else
{op: "waitingForQuestion"}
when "ContestCompletedPage"
{op: "contestCompleted"}
else
{op: "prepForContest"}
else
{op: "prepForContest"}
remotePage
# ----------------------------------------------------------------------------------------------------------------
consolePlayerAnswered: (answerIndex)->
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(true)
this.playerAnswered(Hy.Player.ConsolePlayer.findConsolePlayer(), @cq.question.id, answerIndex)
this
# ----------------------------------------------------------------------------------------------------------------
playerAnswered: (player, questionId, answerIndex)->
if not this.answeringAllowed(questionId)
return
isConsole = player.isKind(Hy.Player.Player.kKindConsole)
responses = Hy.Contest.ContestResponse.selectByQuestionID(questionId)
if (r = this.playerAlreadyAnswered(player, responses))?
# Hy.Trace.debug "ConsoleApp::playerAnswered(Player already answered! questionId=#{questionId}, answerIndex (last time)=#{r.answerIndex} answerIndex (this time)=#{answerIndex} player => #{player.index})"
return
cq = Hy.Contest.ContestQuestion.findByQuestionID(@contest.contestQuestions, questionId)
isCorrectAnswer = cq.indexCorrectAnswer is answerIndex
# Hy.Trace.debug "ConsoleApp::playerAnswered(#=#{@iQuestion} questionId=#{questionId} answerIndex=#{answerIndex} correct=#{cq.indexCorrectAnswer} #{if isCorrectAnswer then "CORRECT" else "INCORRECT"} player=#{player.index}/#{player.label})"
response = player.buildResponse(cq, answerIndex, this.getPage().getCountdownStartValue(), this.getPage().getCountdownValue())
this.getPage().playerAnswered(response)
@currentPageHadResponses = true
firstCorrectMode = Hy.Options.firstCorrect.getValue() is "yes"
# if all remote players have answered OR if the console player answers, end this challenge
done = if isConsole
true
else
if firstCorrectMode and isCorrectAnswer
true
else
activeRemotePlayers = Hy.Player.Player.getActivePlayersByKind(Hy.Player.Player.kKindRemote)
if (activeRemotePlayers.length is responses.length+1)
true
else
false
if done
this.challengeCompleted(true)
this
# ----------------------------------------------------------------------------------------------------------------
playerAlreadyAnswered: (player, responses)->
return _.detect(responses, (r)=>r.player.index is player.index)
# ----------------------------------------------------------------------------------------------------------------
answeringAllowed: (questionId)->
(@questionChallengeInProgress is true) && (questionId is @cq.question.id)
# ----------------------------------------------------------------------------------------------------------------
logContestEnd: (completed, nQuestions, nAnswered, contest)->
numUserCreatedQuestions = 0
topics = []
for contestQuestion in contest.getQuestions()
if contestQuestion.wasUsed()
# Find the contentPack via the topic, which is really a ProductID
if (contentPack = Hy.Content.ContentPack.findLatestVersion(topic = contestQuestion.getQuestion().topic))?
if contentPack.isThirdParty()
numUserCreatedQuestions++
else
topics.push(topic)
@analytics?.logContestEnd(completed, nQuestions, nAnswered, topics, numUserCreatedQuestions)
this
# ----------------------------------------------------------------------------------------------------------------
userCreatedContentAction: (action, context = null, showStartPage = false)->
contentManager = Hy.Content.ContentManager.get()
if showStartPage
this.showStartPage([(page)=>this.userCreatedContentAction(action, context, false)])
else
switch action
when "refresh"
contentManager.userCreatedContentRefreshRequested(context)
when "delete"
contentManager.userCreatedContentDeleteRequested(context)
when "upsell"
contentManager.userCreatedContentUpsell()
when "buy"
contentManager.userCreatedContentBuyFeature()
when "add"
contentManager.userCreatedContentAddRequested()
when "info"
this.showUserCreatedContentInfoPage()
this
# ----------------------------------------------------------------------------------------------------------------
showUserCreatedContentInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.UCCInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
restoreAction: ()->
this.showStartPage([(page)=>Hy.Content.ContentManager.get().restore()])
this
# ==================================================================================================================
# assign to global namespace:
Hy.ConsoleApp = ConsoleApp
| 61027 | # ==================================================================================================================
#
# IT IS REALLY IMPORTANT THAT App-level event handlers return null.
# Ti.App.addEventListener("eventName", (event)=>null)
#
#
class ConsoleApp extends Hy.UI.Application
gInstance = null
# ----------------------------------------------------------------------------------------------------------------
@get: ()-> gInstance
# ----------------------------------------------------------------------------------------------------------------
constructor: (backgroundWindow, tempImage)->
gInstance = this
@singleUser = false
super backgroundWindow, tempImage
this.initSetup()
Hy.Pages.StartPage.addObserver this
Hy.Trace.debug "ConsoleApp::constructor (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
this
# ----------------------------------------------------------------------------------------------------------------
initSetup: ()->
@initFnChain = [
{label: "Trace", init: ()=>Hy.Trace.init(this)}
{label: "Analytics", init: ()=>@analytics = Hy.Analytics.Analytics.init()}
# {label: "CommerceManager", init: ()=>Hy.Commerce.CommerceManager.init()} # As of 2.7
{label: "Page/Video", init: ()=>@pageState = Hy.Pages.PageState.init(this)}
{label: "Splash Page", init: ()=>this.showSplashPage()}
{label: "SoundManager", init: ()=>Hy.Media.SoundManager.init()}
{label: "Network Service", init: ()=>this.initNetwork()}
{label: "Player Network", init: ()=>this.initPlayerNetwork()}
{label: "Update Service", init: ()=>Hy.Update.UpdateService.init()}
{label: "ContentManager", init: ()=>Hy.Content.ContentManager.init(this.checkURLArg())}
{label: "AvatarSet", init: ()=>Hy.Avatars.AvatarSet.init()}
{label: "Console Player", init: ()=>Hy.Player.ConsolePlayer.init()}
# {label: "CommerceManagerInventory", init: ()=>Hy.Commerce.CommerceManager.inventoryManagedFeatures()} # As of 2.7
]
this
# ----------------------------------------------------------------------------------------------------------------
init: ()->
super
Hy.Utils.MemInfo.init()
Hy.Utils.MemInfo.log "INITIALIZING (init #=#{_.size(@initFnChain)})"
@timedOperation = new Hy.Utils.TimedOperation("INITIALIZATION")
fnExecute = ()=>
while _.size(@initFnChain) > 0
fnSpec = _.first(@initFnChain)
fnSpec.init()
@initFnChain.shift()
@timedOperation.mark(fnSpec.label)
null
fnExecute()
this
# ----------------------------------------------------------------------------------------------------------------
start: ()->
Hy.Trace.debug "ConsoleApp::start"
super
@analytics?.logApplicationLaunch()
this
# ----------------------------------------------------------------------------------------------------------------
# Triggered when the app is backgrounded. Have to work quick here. Do the important stuff first
pause: (evt)->
Hy.Trace.debug "ConsoleApp::pause (ENTER)"
this.getPage()?.pause()
@playerNetwork?.sendAll('suspend', {})
@playerNetwork?.pause()
@httpPort = null
Hy.Network.NetworkService.get().pause()
super evt
Hy.Trace.debug "ConsoleApp::pause (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
# Triggered at the start of the process of being foregrounded. Nothing to do here.
resume: (evt)->
Hy.Trace.debug "ConsoleApp::resume (ENTER page=#{this.getPage()?.constructor.name})"
super evt
Hy.Trace.debug "ConsoleApp::resume (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
# Triggered when app is fully foregrounded.
resumed: (evt)->
Hy.Trace.debug "ConsoleApp::resumed (ENTER page=#{this.getPage()?.constructor.name})"
super
this.init()
Hy.Network.NetworkService.get().resumed()
Hy.Network.NetworkService.get().setImmediate()
@playerNetwork?.resumed()
if false #(newUrl = this.checkURLArg())?
null # do something
else
this.resumedPage()
Hy.Trace.debug "ConsoleApp::resumed (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
#
# To handle the edge case where we were backgrounded while transitioning to a new page.
# In the more complex cases, the tactic for handling this is to simply go back to the previous page.
# This approach seems to be needed when the page is based on a web view and requires images to load
#
resumedPage: ()->
Hy.Trace.debug "ConsoleApp::resumedPage (ENTER (transitioning=#{@pageState.isTransitioning()?})"
fn = ()=>@pageState.resumed()
if @pageState.isTransitioning()?
stopTransitioning = true
switch (oldPageState = @pageState.getOldPageState())
when Hy.Pages.PageState.Intro, Hy.Pages.PageState.Start, null, Hy.Pages.PageState.Splash
fn = ()=>this.showStartPage()
when Hy.Pages.PageState.Any, Hy.Pages.PageState.Unknown
new Hy.Utils.ErrorMessage("fatal", "Console App", "Unexpected state \"#{oldPageState}\" in resumedPage") #will display popup dialog
fn = ()=>this.showStartPage()
else # About, UCCInfo, Join, Answer, Scoreboard, Completed
stopTransitioning = false
null
if stopTransitioning
@pageState.stopTransitioning()
else
if not this.getPage()?
fn = ()=>this.showStartPage()
Hy.Trace.debug "ConsoleApp::resumedPage (EXIT: \"#{fn}\")"
fn?()
this
# ----------------------------------------------------------------------------------------------------------------
checkURLArg: ()->
args = Ti.App.getArguments()
hasChanged = false
if (url = args.url)?
hasChanged = @argURL? and (@argURL != url)
# HACK
url = "https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=<KEY>&exportFormat=csv" #HACK
hasChanged = true
@argURL = url
if hasChanged then @argURL else null
# ----------------------------------------------------------------------------------------------------------------
showPage: (newPageState, fn_newPageInit, postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showPage (ENTER #{newPageState} #{@pageState?.display()})"
fn_showPage = ()=>
Hy.Trace.debug "ConsoleApp::showPage (FIRING #{newPageState} #{@pageState.display()})"
@pageState?.showPage(newPageState, fn_newPageInit, postFunctions)
f = ()=> Hy.Utils.Deferral.create(0, ()=>fn_showPage())
if (newPageState1 = @pageState.isTransitioning())?
if newPageState1 isnt newPageState
@pageState.addPostTransitionAction(f)
else
f()
Hy.Trace.debug "ConsoleApp::showPage (EXIT #{newPageState} #{@pageState.display()})"
this
# ----------------------------------------------------------------------------------------------------------------
showSplashPage: ()->
if not @splashShown?
this.showPage(Hy.Pages.PageState.Splash, (page)=>page.initialize())
@splashShown = true
null
# ----------------------------------------------------------------------------------------------------------------
initNetwork: ()->
Hy.Network.NetworkService.init().setImmediate()
Hy.Network.NetworkService.addObserver this
# ----------------------------------------------------------------------------------------------------------------
# Called by NetworkService when there's a change in the network scenery (since ConsoleApp is an "observer")
#
obs_networkChanged: (reason)->
super
if not @pageState.isTransitioning()?
this.getPage()?.obs_networkChanged(reason)
this
# ----------------------------------------------------------------------------------------------------------------
initPlayerNetwork: ()->
if @playerNetwork?
return
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (ENTER)"
fnReady = (httpPort)=>
Hy.Trace.debug "ConsoleApp::Network Ready (port=#{httpPort})"
Hy.Trace.debug "ConsoleApp::Network Ready (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
@timedOperation.mark("Network Ready")
@httpPort = httpPort
if this.getPage()?
if this.getPage().getState() is Hy.Pages.PageState.Splash
this.showIntroPage()
else
this.getPage().resumed()
remotePage = this.remotePlayerMapPage()
@playerNetwork?.sendAll(remotePage.op, remotePage.data)
else
if (newPageState = @pageState.isTransitioning())?
if newPageState is Hy.Pages.PageState.Splash
this.showIntroPage()
Hy.Network.NetworkService.setConsoleHTTPPort(@httpPort) # will trigger a "obs_networkChanged" event
null
fnError = (error, restartNetwork)=>
Hy.Trace.debug "ConsoleApp (NETWORK ERROR /#{error}/)"
if restartNetwork
Hy.Trace.debug "ConsoleApp (NETWORK ERROR - RESTARTING)"
new Hy.Utils.ErrorMessage("fatal", "Player Network", error) #will display popup dialog
# this.restartPlayerNetwork()
null
fnMessageReceived = (connection, op, data)=>this.remotePlayerMessage(connection, op, data)
fnAddPlayer = (connection, label, majorVersion, minorVersion)=>this.remotePlayerAdded(connection, label, majorVersion, minorVersion)
fnRemovePlayer = (connection)=>this.remotePlayerRemoved(connection)
fnPlayerStatusChange = (connection, status)=>this.remotePlayerStatusChanged(connection, status)
fnServiceStatusChange = (serviceStatus)=>this.serviceStatusChange(serviceStatus)
if @singleUser
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (SINGLE USER)"
fnReady(null)
else
@playerNetwork = Hy.Network.PlayerNetworkProxy.create(fnReady, fnError, fnMessageReceived, fnAddPlayer, fnRemovePlayer, fnPlayerStatusChange, fnServiceStatusChange)
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
restartPlayerNetwork: ()->
Hy.Trace.debug "ConsoleApp::restartPlayerNetwork"
this.stopPlayerNetwork()
this.initPlayerNetwork()
# ----------------------------------------------------------------------------------------------------------------
stopPlayerNetwork: ()->
@playerNetwork?.stop()
@playerNetwork = null
# ----------------------------------------------------------------------------------------------------------------
serviceStatusChange: (serviceStatus)->
Hy.Network.NetworkService.setBonjourPublishInfo(serviceStatus)
this
# ----------------------------------------------------------------------------------------------------------------
showIntroPage: ()->
Hy.Trace.debug "ConsoleApp::ShowIntroPage (ENTER)"
fn = ()=>this.showStartPage()
this.showPage(Hy.Pages.PageState.Intro, (page)=>page.initialize(fn))
# It seems that we'll sometimes hang on startup if there's no wifi... something about the iOS "Turn on Wifi" dialog,
# and the resulting suspend/resume, messes up the Intro page... doesn't show. This is a hack to get around that.
fnIntroPageTimeout = ()=>
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout)"
if this.getPage()? and this.getPage().getState() is Hy.Pages.PageState.Intro
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout - forcing transition to Start Page)"
fn()
if not Hy.Network.NetworkService.isOnlineWifi()
Hy.Utils.Deferral.create(7 * 1000, fnIntroPageTimeout)
Hy.Trace.debug "ConsoleApp::ShowIntroPage (EXIT)"
this
# ----------------------------------------------------------------------------------------------------------------
showAboutPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.About, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
showJoinCodeInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.JoinCodeInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
# Show the start page, and then execute the specified functions
#
showStartPage: (postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showStartPage"
@questionChallengeInProgress = false
this.showPage(Hy.Pages.PageState.Start, ((page)=>page.initialize()), postFunctions)
@playerNetwork?.sendAll("prepForContest", {})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from "StartPage" when the enabled state of the Start Button changes.
#
obs_startPageStartButtonStateChanged: (state, reason)->
@playerNetwork?.sendAll("prepForContest", {startEnabled: state, reason: reason})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from Page "StartPage" when "Start Game" button is touched
#
contestStart: ()->
page = this.getPage()
Hy.Media.SoundManager.get().playEvent("gameStart")
page.contentPacksLoadingStart()
if this.loadQuestions()
@nQuestions = @contest.contestQuestions.length
@iQuestion = 0
@nAnswered = 0 # number of times at least one player responded
page.contentPacksLoadingCompleted()
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(false)
Hy.Network.NetworkService.get().setSuspended()
@playerNetwork?.sendAll('startContest', {})
this.showCurrentQuestion()
@analytics?.logContestStart()
else
page.contentPacksLoadingCompleted()
page.resetStartButtonClicked()
this
# ----------------------------------------------------------------------------------------------------------------
#
loadQuestions: ()->
fnEdgeCaseError = (message)=>
new Hy.Utils.ErrorMessage("fatal", "Console App Options", message) #will display popup dialog
false
contentManager = Hy.Content.ContentManager.get()
this.getPage().contentPacksLoading("Loading topics...")
@contentLoadTimer = new Hy.Utils.TimedOperation("INITIAL CONTENT LOAD")
totalNumQuestions = 0
for contentPack in (contentPacks = _.select(contentManager.getLatestContentPacksOKToDisplay(), (c)=>c.isSelected()))
# Edge case: content pack isn't actually local...!
if contentPack.isReadyForPlay()
contentPack.load()
totalNumQuestions += contentPack.getNumRecords()
else
return fnEdgeCaseError("Topic \"#{contentPack.getDisplayName()}\" not ready for play. Please unselect it")
numSelectedContentPacks = _.size(contentPacks)
@contentLoadTimer.mark("done")
# Edge case: Shouldn't be here if no topics chosen...
if numSelectedContentPacks is 0
return fnEdgeCaseError("No topics chosen - Please choose one or more topics and try again")
# Edge case: corruption in the option
if not (numQuestionsNeeded = Hy.Options.numQuestions.getValue())? or not Hy.Options.numQuestions.isValidValue(numQuestionsNeeded)
fnEdgeCaseError("Invalid \"Number of Questions\" option, resetting to 5 (#{numQuestionsNeeded})")
numQuestionsNeeded = 5
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
# Special Case: numQuestions is -1, means "Play as many questions as possible, up to some limit"
if numQuestionsNeeded is -1
# Enforce max number
numQuestionsNeeded = Math.min(totalNumQuestions, Hy.Config.Dynamics.maxNumQuestions)
# Edge case: Shouldn't really be in this situation, either: not enough questions!
# We should be able to set numQuestionsNeeded to a lower value to make it work, since
# we know that the min number of questions in any contest is 5.
if (shortfall = (numQuestionsNeeded - totalNumQuestions)) > 0
for choice in Hy.Options.numQuestions.getChoices().slice(0).reverse()
if choice isnt -1
if (shortfall = (choice-totalNumQuestions)) <= 0
numQuestionsNeeded = choice
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
this.getPage().contentPacksLoading("Number of questions reduced to accomodate selected topics...")
break
# Something's wrong: apparently have a contest with fewer than 5 questions
if shortfall > 0
return fnEdgeCaseError("Not enough questions - Please choose more topics and try again (requested=#{numQuestionsNeeded} shortfall=#{shortfall})")
this.getPage().contentPacksLoading("Selecting questions...")
# Edge case: if number of selected content packs > number of requested questions...
numQuestionsPerPack = Math.max(1, Math.floor(numQuestionsNeeded / numSelectedContentPacks))
@contest = new Hy.Contest.Contest()
# This loop should always terminate because we know there are more than enough questions
contentPacks = Hy.Utils.Array.shuffle(contentPacks)
index = -1
numQuestionsFound = 0
while numQuestionsFound < numQuestionsNeeded
if index < (numSelectedContentPacks - 1)
index++
else
index = 0
numQuestionsPerPack = 1 # To fill in the remainder
contentPack = contentPacks[index]
numQuestionsFound += (numQuestionsAdded = @contest.addQuestions(contentPack, numQuestionsPerPack))
if numQuestionsAdded < numQuestionsPerPack
Hy.Trace.debug "ConsoleApp::loadQuestions (NOT ENOUGH QUESTIONS FOUND pack=#{contentPack.getProductID()} #requested=#{numQuestionsPerPack} #found=#{numQuestionsAdded})"
# return false # We should be ok, because we know there are enough questions in total...
for contestQuestion in @contest.getQuestions()
question = contestQuestion.getQuestion()
Hy.Trace.debug "ConsoleApp::contestStart (question ##{question.id} #{question.topic})"
true
# ----------------------------------------------------------------------------------------------------------------
contestPaused: (remotePage)->
@playerNetwork?.sendAll('gamePaused', {page: remotePage})
this
# ----------------------------------------------------------------------------------------------------------------
contestRestart: (completed = true)->
# set this before showing the Start Page
Hy.Network.NetworkService.get().setImmediate()
this.showStartPage()
# this.logContestEnd(completed, @nQuestions, @nAnswered, @contest)
this
# ----------------------------------------------------------------------------------------------------------------
# Player requested that we skip to the final Scoreboard
#
contestForceFinish: ()->
this.contestCompleted()
this
# ----------------------------------------------------------------------------------------------------------------
contestCompleted: ()->
Hy.Trace.debug("ConsoleApp::contestCompleted")
fnNotify = ()=>
for o in (leaderboard = this.getPage().getLeaderboard())
for player in o.group
Hy.Trace.debug("ConsoleApp::contestCompleted (score: #{o.score} player#: #{player})")
@playerNetwork?.sendAll('contestCompleted', {leaderboard: leaderboard})
null
iQuestion = @iQuestion # By the time the init function below is called, @iQuestion will have been nulled out
Hy.Network.NetworkService.get().setImmediate()
this.showPage(Hy.Pages.PageState.Completed, (page)=>page.initialize(fnNotify))
Hy.Network.NetworkService.get().setImmediate()
this.logContestEnd(true, @iQuestion, @nAnswered, @contest)
@nQuestions = null
@iQuestion = null
@cq = null
Hy.Utils.MemInfo.log "Contest Completed"
this
# ----------------------------------------------------------------------------------------------------------------
showQuestionChallengePage: (startingDelay)->
someText = @cq.question.question
someText = someText.substr(0, Math.min(30, someText.length))
Hy.Trace.debug "ConsoleApp::showQuestionChallengePage (#=#{@iQuestion} question=#{@cq.question.id}/#{someText})"
@currentPageHadResponses = false #set to true if at least one player responded to current question
@questionChallengeInProgress = true
# we copy these here to avoid possible issues with variable bindings, when the callbacks below are invoked
cq = @cq
iQuestion = @iQuestion
nQuestions = @nQuestions
fnNotify = ()=>@playerNetwork?.sendAll('showQuestion', {questionId: cq.question.id})
fnPause = ()=>this.contestPaused("showQuestion")
fnCompleted = ()=>this.challengeCompleted()
nSeconds = Hy.Options.secondsPerQuestion.choices[Hy.Options.secondsPerQuestion.index]
if not nSeconds? or not (nSeconds >= 10 and nSeconds <= 570) # this is brittle. HACK
error = "INVALID nSeconds: #{nSeconds}"
Hy.Trace.debug "ConsoleApp (#{error})"
new Hy.Utils.ErrorMessage("fatal", "Console App Options", error) #will display popup dialog
nSeconds = 10
Hy.Options.secondsPerQuestion.setIndex(0)
this.getPage().panelSecondsPerQuestion.syncCurrentChoiceWithAppOption()
this.showPage(Hy.Pages.PageState.Question, (page)=>page.initializeForQuestion(fnNotify, fnPause, fnCompleted, nSeconds, startingDelay, cq, iQuestion, nQuestions))
# ----------------------------------------------------------------------------------------------------------------
challengeCompleted: (finishedEarly=false)->
if @questionChallengeInProgress
Hy.Media.SoundManager.get().playEvent("challengeCompleted")
this.getPage().animateCountdownQuestionCompleted()
this.getPage().stop() #haltCountdown() #adding this here to ensure that countdown stops immediately, avoid overlapping countdowns
@questionChallengeInProgress = false
@cq.setUsed()
@nAnswered++ if @currentPageHadResponses
this.showQuestionAnswerPage()
# ----------------------------------------------------------------------------------------------------------------
showQuestionAnswerPage: ()->
# Hy.Trace.debug "ConsoleApp::showQuestionAnswerPage(#=#{@iQuestion} question=#{@cq.question.id} Responses=#{@currentPageHadResponses} nAnswered=#{@nAnswered})"
responseVector = []
# Tell the remotes if we received their responses in time
for response in Hy.Contest.ContestResponse.selectByQuestionID(@cq.question.id)
responseVector.push {player: response.getPlayer().getIndex(), score: response.getScore()}
fnNotify = ()=>@playerNetwork?.sendAll('revealAnswer', {questionId: @cq.question.id, indexCorrectAnswer: @cq.indexCorrectAnswer, responses:responseVector})
fnPause = ()=>this.contestPaused("revealAnswer")
fnCompleted = ()=>this.questionAnswerCompleted()
this.showPage(Hy.Pages.PageState.Answer, (page)=>page.initializeForAnswers(fnNotify, fnPause, fnCompleted, Hy.Config.Dynamics.revealAnswerTime, 0))
# ----------------------------------------------------------------------------------------------------------------
questionAnswerCompleted: ()->
Hy.Trace.debug "ConsoleApp::questionAnswerCompleted(#=#{@iQuestion} question=#{@cq.question.id})"
@iQuestion++
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showCurrentQuestion()
this
# ----------------------------------------------------------------------------------------------------------------
showCurrentQuestion: ()->
Hy.Trace.debug "ConsoleApp::showCurrentQuestion(#=#{@iQuestion})"
@cq = @contest.contestQuestions[@iQuestion]
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showQuestionChallengePage(500)
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerAdded: (connection, label, majorVersion, minorVersion)->
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (##{connection}/#{label})"
s = "?"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
player.reactivate()
s = "EXISTING"
else
player = Hy.Player.RemotePlayer.create(connection, label, majorVersion, minorVersion)
@analytics?.logNewPlayer(Hy.Player.Player.count() - 1 ) # Don't count the console player
s = "NEW"
Hy.Media.SoundManager.get().playEvent("remotePlayerJoined")
remotePage = this.remotePlayerMapPage()
currentResponse = null
if @cq?
currentResponse = Hy.Contest.ContestResponse.selectByQuestionIDAndPlayer @cq.question.id, player
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (#{s} #{player.dumpStr()} page=#{remotePage.op} currentAnswerIndex=#{if currentResponse? then currentResponse.answerIndex else -1})"
op = "welcome"
data = {}
data.index = player.index
data.page = remotePage
data.questionId = (if @cq? then @cq.question.id else -1)
data.answerIndex = (if currentResponse? then currentResponse.answerIndex else -1)
data.score = player.score()
data.addressEncoding = Hy.Network.NetworkService.getAddressEncoding()
data.playerName = player.getName()
avatar = player.getAvatar()
data.avatar = {}
data.avatar.name = avatar.getName()
data.avatar.avatarSet = avatar.getAvatarSetName()
data.avatar.avatarHeight = avatar.getHeight()
data.avatar.avatarWidth = avatar.getWidth()
@playerNetwork?.sendSingle(player.getConnection(), op, data)
player
# ----------------------------------------------------------------------------------------------------------------
remotePlayerRemoved: (connection)->
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{connection})"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{player.dumpStr()})"
player.destroy()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerStatusChanged: (connection, status)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerStatusChanged (status=#{status} #{player.dumpStr()})"
if status
player.reactivate()
else
player.deactivate()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMessage: (connection, op, data)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
if @pageState.isTransitioning()
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (IGNORING, in Page Transition)"
else
handled = if op is "playerNameChangeRequest"
this.doPlayerNameChangeRequest(player, data)
else
this.doGameOp(player, op, data)
if not handled
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN OP #{op} #{connection})"
else
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN PLAYER #{connection})"
this
# ----------------------------------------------------------------------------------------------------------------
doPlayerNameChangeRequest: (player, data)->
result = player.setName(data.name)
if result.errorMessage?
data.errorMessage = result.errorMessage
else
data.givenName = result.givenName
@playerNetwork?.sendSingle(player.getConnection(), "playerNameChangeRequestResponse", data)
true
# ----------------------------------------------------------------------------------------------------------------
doGameOp: (player, op, data)->
handled = true
switch op
when "answer"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (Answered: question=#{data.questionId} answer=#{data.answerIndex} player=#{player.dumpStr()})"
this.playerAnswered(player, data.questionId, data.answerIndex)
when "pauseRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (pauseRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if not this.getPage().isPaused()
this.getPage().fnPauseClick()
when "continueRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (continueRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickContinueGame()
when "newGameRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (newGameRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Start
this.getPage().fnClickStartGame()
when Hy.Pages.PageState.Completed
this.getPage().fnClickPlayAgain()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickNewGame()
else
handled = false
handled
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMapPage: ()->
page = this.getPage()
remotePage = if page?
switch page.constructor.name
when "SplashPage", "IntroPage"
{op: "introPage"}
when "StartPage"
[state, reason] = page.getStartEnabled()
{op: "prepForContest", data: {startEnabled:state, reason: reason}}
when "AboutPage", "UserCreatedContentInfoPage"
{op: "aboutPage"}
when "QuestionPage"
if page.isPaused()
{op: "gamePaused"}
else
if @questionChallengeInProgress && page.getCountdownValue() > 5
{op: "showQuestion", data: {questionId: (if @cq? then @cq.question.id else -1)}}
else
{op: "waitingForQuestion"}
when "ContestCompletedPage"
{op: "contestCompleted"}
else
{op: "prepForContest"}
else
{op: "prepForContest"}
remotePage
# ----------------------------------------------------------------------------------------------------------------
consolePlayerAnswered: (answerIndex)->
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(true)
this.playerAnswered(Hy.Player.ConsolePlayer.findConsolePlayer(), @cq.question.id, answerIndex)
this
# ----------------------------------------------------------------------------------------------------------------
playerAnswered: (player, questionId, answerIndex)->
if not this.answeringAllowed(questionId)
return
isConsole = player.isKind(Hy.Player.Player.kKindConsole)
responses = Hy.Contest.ContestResponse.selectByQuestionID(questionId)
if (r = this.playerAlreadyAnswered(player, responses))?
# Hy.Trace.debug "ConsoleApp::playerAnswered(Player already answered! questionId=#{questionId}, answerIndex (last time)=#{r.answerIndex} answerIndex (this time)=#{answerIndex} player => #{player.index})"
return
cq = Hy.Contest.ContestQuestion.findByQuestionID(@contest.contestQuestions, questionId)
isCorrectAnswer = cq.indexCorrectAnswer is answerIndex
# Hy.Trace.debug "ConsoleApp::playerAnswered(#=#{@iQuestion} questionId=#{questionId} answerIndex=#{answerIndex} correct=#{cq.indexCorrectAnswer} #{if isCorrectAnswer then "CORRECT" else "INCORRECT"} player=#{player.index}/#{player.label})"
response = player.buildResponse(cq, answerIndex, this.getPage().getCountdownStartValue(), this.getPage().getCountdownValue())
this.getPage().playerAnswered(response)
@currentPageHadResponses = true
firstCorrectMode = Hy.Options.firstCorrect.getValue() is "yes"
# if all remote players have answered OR if the console player answers, end this challenge
done = if isConsole
true
else
if firstCorrectMode and isCorrectAnswer
true
else
activeRemotePlayers = Hy.Player.Player.getActivePlayersByKind(Hy.Player.Player.kKindRemote)
if (activeRemotePlayers.length is responses.length+1)
true
else
false
if done
this.challengeCompleted(true)
this
# ----------------------------------------------------------------------------------------------------------------
playerAlreadyAnswered: (player, responses)->
return _.detect(responses, (r)=>r.player.index is player.index)
# ----------------------------------------------------------------------------------------------------------------
answeringAllowed: (questionId)->
(@questionChallengeInProgress is true) && (questionId is @cq.question.id)
# ----------------------------------------------------------------------------------------------------------------
logContestEnd: (completed, nQuestions, nAnswered, contest)->
numUserCreatedQuestions = 0
topics = []
for contestQuestion in contest.getQuestions()
if contestQuestion.wasUsed()
# Find the contentPack via the topic, which is really a ProductID
if (contentPack = Hy.Content.ContentPack.findLatestVersion(topic = contestQuestion.getQuestion().topic))?
if contentPack.isThirdParty()
numUserCreatedQuestions++
else
topics.push(topic)
@analytics?.logContestEnd(completed, nQuestions, nAnswered, topics, numUserCreatedQuestions)
this
# ----------------------------------------------------------------------------------------------------------------
userCreatedContentAction: (action, context = null, showStartPage = false)->
contentManager = Hy.Content.ContentManager.get()
if showStartPage
this.showStartPage([(page)=>this.userCreatedContentAction(action, context, false)])
else
switch action
when "refresh"
contentManager.userCreatedContentRefreshRequested(context)
when "delete"
contentManager.userCreatedContentDeleteRequested(context)
when "upsell"
contentManager.userCreatedContentUpsell()
when "buy"
contentManager.userCreatedContentBuyFeature()
when "add"
contentManager.userCreatedContentAddRequested()
when "info"
this.showUserCreatedContentInfoPage()
this
# ----------------------------------------------------------------------------------------------------------------
showUserCreatedContentInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.UCCInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
restoreAction: ()->
this.showStartPage([(page)=>Hy.Content.ContentManager.get().restore()])
this
# ==================================================================================================================
# assign to global namespace:
Hy.ConsoleApp = ConsoleApp
| true | # ==================================================================================================================
#
# IT IS REALLY IMPORTANT THAT App-level event handlers return null.
# Ti.App.addEventListener("eventName", (event)=>null)
#
#
class ConsoleApp extends Hy.UI.Application
gInstance = null
# ----------------------------------------------------------------------------------------------------------------
@get: ()-> gInstance
# ----------------------------------------------------------------------------------------------------------------
constructor: (backgroundWindow, tempImage)->
gInstance = this
@singleUser = false
super backgroundWindow, tempImage
this.initSetup()
Hy.Pages.StartPage.addObserver this
Hy.Trace.debug "ConsoleApp::constructor (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
this
# ----------------------------------------------------------------------------------------------------------------
initSetup: ()->
@initFnChain = [
{label: "Trace", init: ()=>Hy.Trace.init(this)}
{label: "Analytics", init: ()=>@analytics = Hy.Analytics.Analytics.init()}
# {label: "CommerceManager", init: ()=>Hy.Commerce.CommerceManager.init()} # As of 2.7
{label: "Page/Video", init: ()=>@pageState = Hy.Pages.PageState.init(this)}
{label: "Splash Page", init: ()=>this.showSplashPage()}
{label: "SoundManager", init: ()=>Hy.Media.SoundManager.init()}
{label: "Network Service", init: ()=>this.initNetwork()}
{label: "Player Network", init: ()=>this.initPlayerNetwork()}
{label: "Update Service", init: ()=>Hy.Update.UpdateService.init()}
{label: "ContentManager", init: ()=>Hy.Content.ContentManager.init(this.checkURLArg())}
{label: "AvatarSet", init: ()=>Hy.Avatars.AvatarSet.init()}
{label: "Console Player", init: ()=>Hy.Player.ConsolePlayer.init()}
# {label: "CommerceManagerInventory", init: ()=>Hy.Commerce.CommerceManager.inventoryManagedFeatures()} # As of 2.7
]
this
# ----------------------------------------------------------------------------------------------------------------
init: ()->
super
Hy.Utils.MemInfo.init()
Hy.Utils.MemInfo.log "INITIALIZING (init #=#{_.size(@initFnChain)})"
@timedOperation = new Hy.Utils.TimedOperation("INITIALIZATION")
fnExecute = ()=>
while _.size(@initFnChain) > 0
fnSpec = _.first(@initFnChain)
fnSpec.init()
@initFnChain.shift()
@timedOperation.mark(fnSpec.label)
null
fnExecute()
this
# ----------------------------------------------------------------------------------------------------------------
start: ()->
Hy.Trace.debug "ConsoleApp::start"
super
@analytics?.logApplicationLaunch()
this
# ----------------------------------------------------------------------------------------------------------------
# Triggered when the app is backgrounded. Have to work quick here. Do the important stuff first
pause: (evt)->
Hy.Trace.debug "ConsoleApp::pause (ENTER)"
this.getPage()?.pause()
@playerNetwork?.sendAll('suspend', {})
@playerNetwork?.pause()
@httpPort = null
Hy.Network.NetworkService.get().pause()
super evt
Hy.Trace.debug "ConsoleApp::pause (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
# Triggered at the start of the process of being foregrounded. Nothing to do here.
resume: (evt)->
Hy.Trace.debug "ConsoleApp::resume (ENTER page=#{this.getPage()?.constructor.name})"
super evt
Hy.Trace.debug "ConsoleApp::resume (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
# Triggered when app is fully foregrounded.
resumed: (evt)->
Hy.Trace.debug "ConsoleApp::resumed (ENTER page=#{this.getPage()?.constructor.name})"
super
this.init()
Hy.Network.NetworkService.get().resumed()
Hy.Network.NetworkService.get().setImmediate()
@playerNetwork?.resumed()
if false #(newUrl = this.checkURLArg())?
null # do something
else
this.resumedPage()
Hy.Trace.debug "ConsoleApp::resumed (EXIT page=#{this.getPage()?.constructor.name})"
# ----------------------------------------------------------------------------------------------------------------
#
# To handle the edge case where we were backgrounded while transitioning to a new page.
# In the more complex cases, the tactic for handling this is to simply go back to the previous page.
# This approach seems to be needed when the page is based on a web view and requires images to load
#
resumedPage: ()->
Hy.Trace.debug "ConsoleApp::resumedPage (ENTER (transitioning=#{@pageState.isTransitioning()?})"
fn = ()=>@pageState.resumed()
if @pageState.isTransitioning()?
stopTransitioning = true
switch (oldPageState = @pageState.getOldPageState())
when Hy.Pages.PageState.Intro, Hy.Pages.PageState.Start, null, Hy.Pages.PageState.Splash
fn = ()=>this.showStartPage()
when Hy.Pages.PageState.Any, Hy.Pages.PageState.Unknown
new Hy.Utils.ErrorMessage("fatal", "Console App", "Unexpected state \"#{oldPageState}\" in resumedPage") #will display popup dialog
fn = ()=>this.showStartPage()
else # About, UCCInfo, Join, Answer, Scoreboard, Completed
stopTransitioning = false
null
if stopTransitioning
@pageState.stopTransitioning()
else
if not this.getPage()?
fn = ()=>this.showStartPage()
Hy.Trace.debug "ConsoleApp::resumedPage (EXIT: \"#{fn}\")"
fn?()
this
# ----------------------------------------------------------------------------------------------------------------
checkURLArg: ()->
args = Ti.App.getArguments()
hasChanged = false
if (url = args.url)?
hasChanged = @argURL? and (@argURL != url)
# HACK
url = "https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=PI:KEY:<KEY>END_PI&exportFormat=csv" #HACK
hasChanged = true
@argURL = url
if hasChanged then @argURL else null
# ----------------------------------------------------------------------------------------------------------------
showPage: (newPageState, fn_newPageInit, postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showPage (ENTER #{newPageState} #{@pageState?.display()})"
fn_showPage = ()=>
Hy.Trace.debug "ConsoleApp::showPage (FIRING #{newPageState} #{@pageState.display()})"
@pageState?.showPage(newPageState, fn_newPageInit, postFunctions)
f = ()=> Hy.Utils.Deferral.create(0, ()=>fn_showPage())
if (newPageState1 = @pageState.isTransitioning())?
if newPageState1 isnt newPageState
@pageState.addPostTransitionAction(f)
else
f()
Hy.Trace.debug "ConsoleApp::showPage (EXIT #{newPageState} #{@pageState.display()})"
this
# ----------------------------------------------------------------------------------------------------------------
showSplashPage: ()->
if not @splashShown?
this.showPage(Hy.Pages.PageState.Splash, (page)=>page.initialize())
@splashShown = true
null
# ----------------------------------------------------------------------------------------------------------------
initNetwork: ()->
Hy.Network.NetworkService.init().setImmediate()
Hy.Network.NetworkService.addObserver this
# ----------------------------------------------------------------------------------------------------------------
# Called by NetworkService when there's a change in the network scenery (since ConsoleApp is an "observer")
#
obs_networkChanged: (reason)->
super
if not @pageState.isTransitioning()?
this.getPage()?.obs_networkChanged(reason)
this
# ----------------------------------------------------------------------------------------------------------------
initPlayerNetwork: ()->
if @playerNetwork?
return
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (ENTER)"
fnReady = (httpPort)=>
Hy.Trace.debug "ConsoleApp::Network Ready (port=#{httpPort})"
Hy.Trace.debug "ConsoleApp::Network Ready (Clipboard=#{Ti.UI.Clipboard.getText()})" #2.6.2
@timedOperation.mark("Network Ready")
@httpPort = httpPort
if this.getPage()?
if this.getPage().getState() is Hy.Pages.PageState.Splash
this.showIntroPage()
else
this.getPage().resumed()
remotePage = this.remotePlayerMapPage()
@playerNetwork?.sendAll(remotePage.op, remotePage.data)
else
if (newPageState = @pageState.isTransitioning())?
if newPageState is Hy.Pages.PageState.Splash
this.showIntroPage()
Hy.Network.NetworkService.setConsoleHTTPPort(@httpPort) # will trigger a "obs_networkChanged" event
null
fnError = (error, restartNetwork)=>
Hy.Trace.debug "ConsoleApp (NETWORK ERROR /#{error}/)"
if restartNetwork
Hy.Trace.debug "ConsoleApp (NETWORK ERROR - RESTARTING)"
new Hy.Utils.ErrorMessage("fatal", "Player Network", error) #will display popup dialog
# this.restartPlayerNetwork()
null
fnMessageReceived = (connection, op, data)=>this.remotePlayerMessage(connection, op, data)
fnAddPlayer = (connection, label, majorVersion, minorVersion)=>this.remotePlayerAdded(connection, label, majorVersion, minorVersion)
fnRemovePlayer = (connection)=>this.remotePlayerRemoved(connection)
fnPlayerStatusChange = (connection, status)=>this.remotePlayerStatusChanged(connection, status)
fnServiceStatusChange = (serviceStatus)=>this.serviceStatusChange(serviceStatus)
if @singleUser
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (SINGLE USER)"
fnReady(null)
else
@playerNetwork = Hy.Network.PlayerNetworkProxy.create(fnReady, fnError, fnMessageReceived, fnAddPlayer, fnRemovePlayer, fnPlayerStatusChange, fnServiceStatusChange)
Hy.Trace.debugM "ConsoleApp::initPlayerNetwork (EXIT)"
# ----------------------------------------------------------------------------------------------------------------
restartPlayerNetwork: ()->
Hy.Trace.debug "ConsoleApp::restartPlayerNetwork"
this.stopPlayerNetwork()
this.initPlayerNetwork()
# ----------------------------------------------------------------------------------------------------------------
stopPlayerNetwork: ()->
@playerNetwork?.stop()
@playerNetwork = null
# ----------------------------------------------------------------------------------------------------------------
serviceStatusChange: (serviceStatus)->
Hy.Network.NetworkService.setBonjourPublishInfo(serviceStatus)
this
# ----------------------------------------------------------------------------------------------------------------
showIntroPage: ()->
Hy.Trace.debug "ConsoleApp::ShowIntroPage (ENTER)"
fn = ()=>this.showStartPage()
this.showPage(Hy.Pages.PageState.Intro, (page)=>page.initialize(fn))
# It seems that we'll sometimes hang on startup if there's no wifi... something about the iOS "Turn on Wifi" dialog,
# and the resulting suspend/resume, messes up the Intro page... doesn't show. This is a hack to get around that.
fnIntroPageTimeout = ()=>
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout)"
if this.getPage()? and this.getPage().getState() is Hy.Pages.PageState.Intro
Hy.Trace.debug "ConsoleApp::showIntroPage (IntroPageTimeout - forcing transition to Start Page)"
fn()
if not Hy.Network.NetworkService.isOnlineWifi()
Hy.Utils.Deferral.create(7 * 1000, fnIntroPageTimeout)
Hy.Trace.debug "ConsoleApp::ShowIntroPage (EXIT)"
this
# ----------------------------------------------------------------------------------------------------------------
showAboutPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.About, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
showJoinCodeInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.JoinCodeInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
# Show the start page, and then execute the specified functions
#
showStartPage: (postFunctions = [])->
Hy.Trace.debug "ConsoleApp::showStartPage"
@questionChallengeInProgress = false
this.showPage(Hy.Pages.PageState.Start, ((page)=>page.initialize()), postFunctions)
@playerNetwork?.sendAll("prepForContest", {})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from "StartPage" when the enabled state of the Start Button changes.
#
obs_startPageStartButtonStateChanged: (state, reason)->
@playerNetwork?.sendAll("prepForContest", {startEnabled: state, reason: reason})
this
# ----------------------------------------------------------------------------------------------------------------
# Invoked from Page "StartPage" when "Start Game" button is touched
#
contestStart: ()->
page = this.getPage()
Hy.Media.SoundManager.get().playEvent("gameStart")
page.contentPacksLoadingStart()
if this.loadQuestions()
@nQuestions = @contest.contestQuestions.length
@iQuestion = 0
@nAnswered = 0 # number of times at least one player responded
page.contentPacksLoadingCompleted()
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(false)
Hy.Network.NetworkService.get().setSuspended()
@playerNetwork?.sendAll('startContest', {})
this.showCurrentQuestion()
@analytics?.logContestStart()
else
page.contentPacksLoadingCompleted()
page.resetStartButtonClicked()
this
# ----------------------------------------------------------------------------------------------------------------
#
loadQuestions: ()->
fnEdgeCaseError = (message)=>
new Hy.Utils.ErrorMessage("fatal", "Console App Options", message) #will display popup dialog
false
contentManager = Hy.Content.ContentManager.get()
this.getPage().contentPacksLoading("Loading topics...")
@contentLoadTimer = new Hy.Utils.TimedOperation("INITIAL CONTENT LOAD")
totalNumQuestions = 0
for contentPack in (contentPacks = _.select(contentManager.getLatestContentPacksOKToDisplay(), (c)=>c.isSelected()))
# Edge case: content pack isn't actually local...!
if contentPack.isReadyForPlay()
contentPack.load()
totalNumQuestions += contentPack.getNumRecords()
else
return fnEdgeCaseError("Topic \"#{contentPack.getDisplayName()}\" not ready for play. Please unselect it")
numSelectedContentPacks = _.size(contentPacks)
@contentLoadTimer.mark("done")
# Edge case: Shouldn't be here if no topics chosen...
if numSelectedContentPacks is 0
return fnEdgeCaseError("No topics chosen - Please choose one or more topics and try again")
# Edge case: corruption in the option
if not (numQuestionsNeeded = Hy.Options.numQuestions.getValue())? or not Hy.Options.numQuestions.isValidValue(numQuestionsNeeded)
fnEdgeCaseError("Invalid \"Number of Questions\" option, resetting to 5 (#{numQuestionsNeeded})")
numQuestionsNeeded = 5
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
# Special Case: numQuestions is -1, means "Play as many questions as possible, up to some limit"
if numQuestionsNeeded is -1
# Enforce max number
numQuestionsNeeded = Math.min(totalNumQuestions, Hy.Config.Dynamics.maxNumQuestions)
# Edge case: Shouldn't really be in this situation, either: not enough questions!
# We should be able to set numQuestionsNeeded to a lower value to make it work, since
# we know that the min number of questions in any contest is 5.
if (shortfall = (numQuestionsNeeded - totalNumQuestions)) > 0
for choice in Hy.Options.numQuestions.getChoices().slice(0).reverse()
if choice isnt -1
if (shortfall = (choice-totalNumQuestions)) <= 0
numQuestionsNeeded = choice
Hy.Options.numQuestions.setValue(numQuestionsNeeded)
this.getPage().panelNumberOfQuestions.syncCurrentChoiceWithAppOption()
this.getPage().contentPacksLoading("Number of questions reduced to accomodate selected topics...")
break
# Something's wrong: apparently have a contest with fewer than 5 questions
if shortfall > 0
return fnEdgeCaseError("Not enough questions - Please choose more topics and try again (requested=#{numQuestionsNeeded} shortfall=#{shortfall})")
this.getPage().contentPacksLoading("Selecting questions...")
# Edge case: if number of selected content packs > number of requested questions...
numQuestionsPerPack = Math.max(1, Math.floor(numQuestionsNeeded / numSelectedContentPacks))
@contest = new Hy.Contest.Contest()
# This loop should always terminate because we know there are more than enough questions
contentPacks = Hy.Utils.Array.shuffle(contentPacks)
index = -1
numQuestionsFound = 0
while numQuestionsFound < numQuestionsNeeded
if index < (numSelectedContentPacks - 1)
index++
else
index = 0
numQuestionsPerPack = 1 # To fill in the remainder
contentPack = contentPacks[index]
numQuestionsFound += (numQuestionsAdded = @contest.addQuestions(contentPack, numQuestionsPerPack))
if numQuestionsAdded < numQuestionsPerPack
Hy.Trace.debug "ConsoleApp::loadQuestions (NOT ENOUGH QUESTIONS FOUND pack=#{contentPack.getProductID()} #requested=#{numQuestionsPerPack} #found=#{numQuestionsAdded})"
# return false # We should be ok, because we know there are enough questions in total...
for contestQuestion in @contest.getQuestions()
question = contestQuestion.getQuestion()
Hy.Trace.debug "ConsoleApp::contestStart (question ##{question.id} #{question.topic})"
true
# ----------------------------------------------------------------------------------------------------------------
contestPaused: (remotePage)->
@playerNetwork?.sendAll('gamePaused', {page: remotePage})
this
# ----------------------------------------------------------------------------------------------------------------
contestRestart: (completed = true)->
# set this before showing the Start Page
Hy.Network.NetworkService.get().setImmediate()
this.showStartPage()
# this.logContestEnd(completed, @nQuestions, @nAnswered, @contest)
this
# ----------------------------------------------------------------------------------------------------------------
# Player requested that we skip to the final Scoreboard
#
contestForceFinish: ()->
this.contestCompleted()
this
# ----------------------------------------------------------------------------------------------------------------
contestCompleted: ()->
Hy.Trace.debug("ConsoleApp::contestCompleted")
fnNotify = ()=>
for o in (leaderboard = this.getPage().getLeaderboard())
for player in o.group
Hy.Trace.debug("ConsoleApp::contestCompleted (score: #{o.score} player#: #{player})")
@playerNetwork?.sendAll('contestCompleted', {leaderboard: leaderboard})
null
iQuestion = @iQuestion # By the time the init function below is called, @iQuestion will have been nulled out
Hy.Network.NetworkService.get().setImmediate()
this.showPage(Hy.Pages.PageState.Completed, (page)=>page.initialize(fnNotify))
Hy.Network.NetworkService.get().setImmediate()
this.logContestEnd(true, @iQuestion, @nAnswered, @contest)
@nQuestions = null
@iQuestion = null
@cq = null
Hy.Utils.MemInfo.log "Contest Completed"
this
# ----------------------------------------------------------------------------------------------------------------
showQuestionChallengePage: (startingDelay)->
someText = @cq.question.question
someText = someText.substr(0, Math.min(30, someText.length))
Hy.Trace.debug "ConsoleApp::showQuestionChallengePage (#=#{@iQuestion} question=#{@cq.question.id}/#{someText})"
@currentPageHadResponses = false #set to true if at least one player responded to current question
@questionChallengeInProgress = true
# we copy these here to avoid possible issues with variable bindings, when the callbacks below are invoked
cq = @cq
iQuestion = @iQuestion
nQuestions = @nQuestions
fnNotify = ()=>@playerNetwork?.sendAll('showQuestion', {questionId: cq.question.id})
fnPause = ()=>this.contestPaused("showQuestion")
fnCompleted = ()=>this.challengeCompleted()
nSeconds = Hy.Options.secondsPerQuestion.choices[Hy.Options.secondsPerQuestion.index]
if not nSeconds? or not (nSeconds >= 10 and nSeconds <= 570) # this is brittle. HACK
error = "INVALID nSeconds: #{nSeconds}"
Hy.Trace.debug "ConsoleApp (#{error})"
new Hy.Utils.ErrorMessage("fatal", "Console App Options", error) #will display popup dialog
nSeconds = 10
Hy.Options.secondsPerQuestion.setIndex(0)
this.getPage().panelSecondsPerQuestion.syncCurrentChoiceWithAppOption()
this.showPage(Hy.Pages.PageState.Question, (page)=>page.initializeForQuestion(fnNotify, fnPause, fnCompleted, nSeconds, startingDelay, cq, iQuestion, nQuestions))
# ----------------------------------------------------------------------------------------------------------------
challengeCompleted: (finishedEarly=false)->
if @questionChallengeInProgress
Hy.Media.SoundManager.get().playEvent("challengeCompleted")
this.getPage().animateCountdownQuestionCompleted()
this.getPage().stop() #haltCountdown() #adding this here to ensure that countdown stops immediately, avoid overlapping countdowns
@questionChallengeInProgress = false
@cq.setUsed()
@nAnswered++ if @currentPageHadResponses
this.showQuestionAnswerPage()
# ----------------------------------------------------------------------------------------------------------------
showQuestionAnswerPage: ()->
# Hy.Trace.debug "ConsoleApp::showQuestionAnswerPage(#=#{@iQuestion} question=#{@cq.question.id} Responses=#{@currentPageHadResponses} nAnswered=#{@nAnswered})"
responseVector = []
# Tell the remotes if we received their responses in time
for response in Hy.Contest.ContestResponse.selectByQuestionID(@cq.question.id)
responseVector.push {player: response.getPlayer().getIndex(), score: response.getScore()}
fnNotify = ()=>@playerNetwork?.sendAll('revealAnswer', {questionId: @cq.question.id, indexCorrectAnswer: @cq.indexCorrectAnswer, responses:responseVector})
fnPause = ()=>this.contestPaused("revealAnswer")
fnCompleted = ()=>this.questionAnswerCompleted()
this.showPage(Hy.Pages.PageState.Answer, (page)=>page.initializeForAnswers(fnNotify, fnPause, fnCompleted, Hy.Config.Dynamics.revealAnswerTime, 0))
# ----------------------------------------------------------------------------------------------------------------
questionAnswerCompleted: ()->
Hy.Trace.debug "ConsoleApp::questionAnswerCompleted(#=#{@iQuestion} question=#{@cq.question.id})"
@iQuestion++
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showCurrentQuestion()
this
# ----------------------------------------------------------------------------------------------------------------
showCurrentQuestion: ()->
Hy.Trace.debug "ConsoleApp::showCurrentQuestion(#=#{@iQuestion})"
@cq = @contest.contestQuestions[@iQuestion]
if @iQuestion >= @nQuestions
this.contestCompleted()
else
this.showQuestionChallengePage(500)
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerAdded: (connection, label, majorVersion, minorVersion)->
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (##{connection}/#{label})"
s = "?"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
player.reactivate()
s = "EXISTING"
else
player = Hy.Player.RemotePlayer.create(connection, label, majorVersion, minorVersion)
@analytics?.logNewPlayer(Hy.Player.Player.count() - 1 ) # Don't count the console player
s = "NEW"
Hy.Media.SoundManager.get().playEvent("remotePlayerJoined")
remotePage = this.remotePlayerMapPage()
currentResponse = null
if @cq?
currentResponse = Hy.Contest.ContestResponse.selectByQuestionIDAndPlayer @cq.question.id, player
Hy.Trace.debug "ConsoleApp::remotePlayerAdded (#{s} #{player.dumpStr()} page=#{remotePage.op} currentAnswerIndex=#{if currentResponse? then currentResponse.answerIndex else -1})"
op = "welcome"
data = {}
data.index = player.index
data.page = remotePage
data.questionId = (if @cq? then @cq.question.id else -1)
data.answerIndex = (if currentResponse? then currentResponse.answerIndex else -1)
data.score = player.score()
data.addressEncoding = Hy.Network.NetworkService.getAddressEncoding()
data.playerName = player.getName()
avatar = player.getAvatar()
data.avatar = {}
data.avatar.name = avatar.getName()
data.avatar.avatarSet = avatar.getAvatarSetName()
data.avatar.avatarHeight = avatar.getHeight()
data.avatar.avatarWidth = avatar.getWidth()
@playerNetwork?.sendSingle(player.getConnection(), op, data)
player
# ----------------------------------------------------------------------------------------------------------------
remotePlayerRemoved: (connection)->
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{connection})"
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerRemoved (#{player.dumpStr()})"
player.destroy()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerStatusChanged: (connection, status)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
Hy.Trace.debug "ConsoleApp::remotePlayerStatusChanged (status=#{status} #{player.dumpStr()})"
if status
player.reactivate()
else
player.deactivate()
this
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMessage: (connection, op, data)->
player = Hy.Player.RemotePlayer.findByConnection(connection)
if player?
if @pageState.isTransitioning()
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (IGNORING, in Page Transition)"
else
handled = if op is "playerNameChangeRequest"
this.doPlayerNameChangeRequest(player, data)
else
this.doGameOp(player, op, data)
if not handled
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN OP #{op} #{connection})"
else
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (UNKNOWN PLAYER #{connection})"
this
# ----------------------------------------------------------------------------------------------------------------
doPlayerNameChangeRequest: (player, data)->
result = player.setName(data.name)
if result.errorMessage?
data.errorMessage = result.errorMessage
else
data.givenName = result.givenName
@playerNetwork?.sendSingle(player.getConnection(), "playerNameChangeRequestResponse", data)
true
# ----------------------------------------------------------------------------------------------------------------
doGameOp: (player, op, data)->
handled = true
switch op
when "answer"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (Answered: question=#{data.questionId} answer=#{data.answerIndex} player=#{player.dumpStr()})"
this.playerAnswered(player, data.questionId, data.answerIndex)
when "pauseRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (pauseRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if not this.getPage().isPaused()
this.getPage().fnPauseClick()
when "continueRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (continueRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickContinueGame()
when "newGameRequested"
Hy.Trace.debug "ConsoleApp::messageReceivedFromPlayer (newGameRequested: player=#{player.dumpStr()})"
if this.getPage()?
switch this.getPage().getState()
when Hy.Pages.PageState.Start
this.getPage().fnClickStartGame()
when Hy.Pages.PageState.Completed
this.getPage().fnClickPlayAgain()
when Hy.Pages.PageState.Question, Hy.Pages.PageState.Answer
if this.getPage().isPaused()
this.getPage().fnClickNewGame()
else
handled = false
handled
# ----------------------------------------------------------------------------------------------------------------
remotePlayerMapPage: ()->
page = this.getPage()
remotePage = if page?
switch page.constructor.name
when "SplashPage", "IntroPage"
{op: "introPage"}
when "StartPage"
[state, reason] = page.getStartEnabled()
{op: "prepForContest", data: {startEnabled:state, reason: reason}}
when "AboutPage", "UserCreatedContentInfoPage"
{op: "aboutPage"}
when "QuestionPage"
if page.isPaused()
{op: "gamePaused"}
else
if @questionChallengeInProgress && page.getCountdownValue() > 5
{op: "showQuestion", data: {questionId: (if @cq? then @cq.question.id else -1)}}
else
{op: "waitingForQuestion"}
when "ContestCompletedPage"
{op: "contestCompleted"}
else
{op: "prepForContest"}
else
{op: "prepForContest"}
remotePage
# ----------------------------------------------------------------------------------------------------------------
consolePlayerAnswered: (answerIndex)->
Hy.Player.ConsolePlayer.findConsolePlayer().setHasAnswered(true)
this.playerAnswered(Hy.Player.ConsolePlayer.findConsolePlayer(), @cq.question.id, answerIndex)
this
# ----------------------------------------------------------------------------------------------------------------
playerAnswered: (player, questionId, answerIndex)->
if not this.answeringAllowed(questionId)
return
isConsole = player.isKind(Hy.Player.Player.kKindConsole)
responses = Hy.Contest.ContestResponse.selectByQuestionID(questionId)
if (r = this.playerAlreadyAnswered(player, responses))?
# Hy.Trace.debug "ConsoleApp::playerAnswered(Player already answered! questionId=#{questionId}, answerIndex (last time)=#{r.answerIndex} answerIndex (this time)=#{answerIndex} player => #{player.index})"
return
cq = Hy.Contest.ContestQuestion.findByQuestionID(@contest.contestQuestions, questionId)
isCorrectAnswer = cq.indexCorrectAnswer is answerIndex
# Hy.Trace.debug "ConsoleApp::playerAnswered(#=#{@iQuestion} questionId=#{questionId} answerIndex=#{answerIndex} correct=#{cq.indexCorrectAnswer} #{if isCorrectAnswer then "CORRECT" else "INCORRECT"} player=#{player.index}/#{player.label})"
response = player.buildResponse(cq, answerIndex, this.getPage().getCountdownStartValue(), this.getPage().getCountdownValue())
this.getPage().playerAnswered(response)
@currentPageHadResponses = true
firstCorrectMode = Hy.Options.firstCorrect.getValue() is "yes"
# if all remote players have answered OR if the console player answers, end this challenge
done = if isConsole
true
else
if firstCorrectMode and isCorrectAnswer
true
else
activeRemotePlayers = Hy.Player.Player.getActivePlayersByKind(Hy.Player.Player.kKindRemote)
if (activeRemotePlayers.length is responses.length+1)
true
else
false
if done
this.challengeCompleted(true)
this
# ----------------------------------------------------------------------------------------------------------------
playerAlreadyAnswered: (player, responses)->
return _.detect(responses, (r)=>r.player.index is player.index)
# ----------------------------------------------------------------------------------------------------------------
answeringAllowed: (questionId)->
(@questionChallengeInProgress is true) && (questionId is @cq.question.id)
# ----------------------------------------------------------------------------------------------------------------
logContestEnd: (completed, nQuestions, nAnswered, contest)->
numUserCreatedQuestions = 0
topics = []
for contestQuestion in contest.getQuestions()
if contestQuestion.wasUsed()
# Find the contentPack via the topic, which is really a ProductID
if (contentPack = Hy.Content.ContentPack.findLatestVersion(topic = contestQuestion.getQuestion().topic))?
if contentPack.isThirdParty()
numUserCreatedQuestions++
else
topics.push(topic)
@analytics?.logContestEnd(completed, nQuestions, nAnswered, topics, numUserCreatedQuestions)
this
# ----------------------------------------------------------------------------------------------------------------
userCreatedContentAction: (action, context = null, showStartPage = false)->
contentManager = Hy.Content.ContentManager.get()
if showStartPage
this.showStartPage([(page)=>this.userCreatedContentAction(action, context, false)])
else
switch action
when "refresh"
contentManager.userCreatedContentRefreshRequested(context)
when "delete"
contentManager.userCreatedContentDeleteRequested(context)
when "upsell"
contentManager.userCreatedContentUpsell()
when "buy"
contentManager.userCreatedContentBuyFeature()
when "add"
contentManager.userCreatedContentAddRequested()
when "info"
this.showUserCreatedContentInfoPage()
this
# ----------------------------------------------------------------------------------------------------------------
showUserCreatedContentInfoPage: ()->
@playerNetwork?.sendAll("aboutPage", {})
this.showPage(Hy.Pages.PageState.UCCInfo, (page)=>page.initialize())
# ----------------------------------------------------------------------------------------------------------------
restoreAction: ()->
this.showStartPage([(page)=>Hy.Content.ContentManager.get().restore()])
this
# ==================================================================================================================
# assign to global namespace:
Hy.ConsoleApp = ConsoleApp
|
[
{
"context": " identity:\n user: 'root'\n effective: 0\n }\n ",
"end": 318,
"score": 0.5382782816886902,
"start": 314,
"tag": "USERNAME",
"value": "root"
},
{
"context": ".parse(data)\n if data.username == 'root' and data.password == '123'\n r",
"end": 612,
"score": 0.9801880121231079,
"start": 608,
"tag": "USERNAME",
"value": "root"
},
{
"context": " if data.username == 'root' and data.password == '123'\n return [200, {success: true}",
"end": 639,
"score": 0.9993229508399963,
"start": 636,
"tag": "PASSWORD",
"value": "123"
}
] | tests-karma/tests/core/identity.service.coffee | ajenti/ajen | 3,777 | describe 'identity service', () ->
beforeEach () ->
inject ($httpBackend) ->
$httpBackend.when('GET', '/api/core/identity').respond {
machine:
hostname: "test-box"
name: 'test box'
identity:
user: 'root'
effective: 0
}
$httpBackend.when('GET', '/api/core/logout').respond {}
$httpBackend.when('POST', '/api/core/auth').respond (method, url, data, headers) ->
data = JSON.parse(data)
if data.username == 'root' and data.password == '123'
return [200, {success: true}]
return [200, {success: false, error: 'error'}]
it 'init()', (done) ->
inject ($httpBackend, identity) ->
identity.init()
$httpBackend.expectGET('/api/core/identity')
identity.promise.then () ->
expect(identity.user).to.equal('root')
expect(identity.effective).to.equal(0)
done()
assert.isFulfilled(identity.promise)
$httpBackend.flush()
it 'elevate()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.elevate()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/sudo:undefined//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
it 'auth() succeeds', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', '123', 'normal')
p.then () ->
setTimeout () ->
done()
assert.isFulfilled(p)
$httpBackend.flush()
it 'auth() fails', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', 'boo', 'normal')
p.catch () ->
setTimeout () ->
done()
assert.isRejected(p)
$httpBackend.flush()
it 'login()', () ->
inject ($httpBackend, $location, $window, urlPrefix, identity) ->
@sinon.stub($location, 'path').returns('/test')
@sinon.stub($window.location, 'assign')
identity.login()
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
it 'logout()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.logout()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
| 43518 | describe 'identity service', () ->
beforeEach () ->
inject ($httpBackend) ->
$httpBackend.when('GET', '/api/core/identity').respond {
machine:
hostname: "test-box"
name: 'test box'
identity:
user: 'root'
effective: 0
}
$httpBackend.when('GET', '/api/core/logout').respond {}
$httpBackend.when('POST', '/api/core/auth').respond (method, url, data, headers) ->
data = JSON.parse(data)
if data.username == 'root' and data.password == '<PASSWORD>'
return [200, {success: true}]
return [200, {success: false, error: 'error'}]
it 'init()', (done) ->
inject ($httpBackend, identity) ->
identity.init()
$httpBackend.expectGET('/api/core/identity')
identity.promise.then () ->
expect(identity.user).to.equal('root')
expect(identity.effective).to.equal(0)
done()
assert.isFulfilled(identity.promise)
$httpBackend.flush()
it 'elevate()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.elevate()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/sudo:undefined//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
it 'auth() succeeds', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', '123', 'normal')
p.then () ->
setTimeout () ->
done()
assert.isFulfilled(p)
$httpBackend.flush()
it 'auth() fails', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', 'boo', 'normal')
p.catch () ->
setTimeout () ->
done()
assert.isRejected(p)
$httpBackend.flush()
it 'login()', () ->
inject ($httpBackend, $location, $window, urlPrefix, identity) ->
@sinon.stub($location, 'path').returns('/test')
@sinon.stub($window.location, 'assign')
identity.login()
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
it 'logout()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.logout()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
| true | describe 'identity service', () ->
beforeEach () ->
inject ($httpBackend) ->
$httpBackend.when('GET', '/api/core/identity').respond {
machine:
hostname: "test-box"
name: 'test box'
identity:
user: 'root'
effective: 0
}
$httpBackend.when('GET', '/api/core/logout').respond {}
$httpBackend.when('POST', '/api/core/auth').respond (method, url, data, headers) ->
data = JSON.parse(data)
if data.username == 'root' and data.password == 'PI:PASSWORD:<PASSWORD>END_PI'
return [200, {success: true}]
return [200, {success: false, error: 'error'}]
it 'init()', (done) ->
inject ($httpBackend, identity) ->
identity.init()
$httpBackend.expectGET('/api/core/identity')
identity.promise.then () ->
expect(identity.user).to.equal('root')
expect(identity.effective).to.equal(0)
done()
assert.isFulfilled(identity.promise)
$httpBackend.flush()
it 'elevate()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.elevate()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/sudo:undefined//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
it 'auth() succeeds', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', '123', 'normal')
p.then () ->
setTimeout () ->
done()
assert.isFulfilled(p)
$httpBackend.flush()
it 'auth() fails', (done) ->
inject ($httpBackend, identity) ->
$httpBackend.expectPOST('/api/core/auth')
p = identity.auth('root', 'boo', 'normal')
p.catch () ->
setTimeout () ->
done()
assert.isRejected(p)
$httpBackend.flush()
it 'login()', () ->
inject ($httpBackend, $location, $window, urlPrefix, identity) ->
@sinon.stub($location, 'path').returns('/test')
@sinon.stub($window.location, 'assign')
identity.login()
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
it 'logout()', (done) ->
inject ($httpBackend, $window, $location, $timeout, urlPrefix, identity) ->
$httpBackend.expectGET('/api/core/logout')
@sinon.stub($window.location, 'assign')
@sinon.stub($location, 'path').returns('/test')
p = identity.logout()
p.then () ->
$window.location.assign.should.have.been.calledWith("#{urlPrefix}/view/login/normal//test")
setTimeout () ->
done()
assert.isFulfilled(p)
$timeout.flush(1000)
$httpBackend.flush()
|
[
{
"context": "n\n login: ->\n payload = \n username: @username\n password: @password\n @$dialog.show '",
"end": 504,
"score": 0.9987956285476685,
"start": 495,
"tag": "USERNAME",
"value": "@username"
},
{
"context": "d = \n username: @username\n password: @password\n @$dialog.show 'Loging in...'\n @$http.p",
"end": 532,
"score": 0.9654160737991333,
"start": 523,
"tag": "PASSWORD",
"value": "@password"
}
] | src/script/main.coffee | carrasquel/tulipan-example | 0 | `import html from "../views/main.html";`
`var backend_url = process.env.BACKEND_URL || "http://localhost:5000/"`
app = new Tulipan(
template:
html: html
async: false
route: '/'
data:
username: ''
password: ''
methods:
after: ->
apiKey = @$store.get('apiKey')
if apiKey
@$router.navigate '/application'
return
clear: ->
@$set 'username', ''
@$set 'password', ''
return
login: ->
payload =
username: @username
password: @password
@$dialog.show 'Loging in...'
@$http.post(backend_url + 'api/auth/login', payload).then ((res) ->
@$store.set 'apiKey', res.data.api_key
@clear()
@$dialog.hide()
@$router.navigate '/application'
return
), (err) ->
@$dialog.hide()
console.log err
return
return
) | 128113 | `import html from "../views/main.html";`
`var backend_url = process.env.BACKEND_URL || "http://localhost:5000/"`
app = new Tulipan(
template:
html: html
async: false
route: '/'
data:
username: ''
password: ''
methods:
after: ->
apiKey = @$store.get('apiKey')
if apiKey
@$router.navigate '/application'
return
clear: ->
@$set 'username', ''
@$set 'password', ''
return
login: ->
payload =
username: @username
password: <PASSWORD>
@$dialog.show 'Loging in...'
@$http.post(backend_url + 'api/auth/login', payload).then ((res) ->
@$store.set 'apiKey', res.data.api_key
@clear()
@$dialog.hide()
@$router.navigate '/application'
return
), (err) ->
@$dialog.hide()
console.log err
return
return
) | true | `import html from "../views/main.html";`
`var backend_url = process.env.BACKEND_URL || "http://localhost:5000/"`
app = new Tulipan(
template:
html: html
async: false
route: '/'
data:
username: ''
password: ''
methods:
after: ->
apiKey = @$store.get('apiKey')
if apiKey
@$router.navigate '/application'
return
clear: ->
@$set 'username', ''
@$set 'password', ''
return
login: ->
payload =
username: @username
password: PI:PASSWORD:<PASSWORD>END_PI
@$dialog.show 'Loging in...'
@$http.post(backend_url + 'api/auth/login', payload).then ((res) ->
@$store.set 'apiKey', res.data.api_key
@clear()
@$dialog.hide()
@$router.navigate '/application'
return
), (err) ->
@$dialog.hide()
console.log err
return
return
) |
[
{
"context": "son.version}\n license: #{pjson.license}\n author: Roman Pushkin\n #{pjson.homepage}\n*/\n\n\"\"\"\n\ndest = 'dist/'\nsourc",
"end": 349,
"score": 0.99971604347229,
"start": 336,
"tag": "NAME",
"value": "Roman Pushkin"
}
] | assets/jquery.ns-autogrow-1.1.6/gulpfile.coffee | velioo/animesite | 6 | gulp = require 'gulp'
del = require 'del'
coffee = require 'gulp-coffee'
coffeelint = require 'gulp-coffeelint'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
rename = require 'gulp-rename'
pjson = require './package.json'
copyright = """
/*!
Non-Sucking Autogrow #{pjson.version}
license: #{pjson.license}
author: Roman Pushkin
#{pjson.homepage}
*/
"""
dest = 'dist/'
source = 'src/'
code =
in: "#{source}*.coffee"
out: "#{dest}"
gulp.task 'clean', ->
del [dest + '*']
gulp.task 'build', ->
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe header copyright
.pipe gulp.dest(code.out)
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe uglify()
.pipe rename({ suffix: '.min' })
.pipe header copyright
.pipe gulp.dest(code.out)
gulp.task 'watch', ->
gulp
.watch code.in, ['build']
gulp.task 'default', ['clean', 'build', 'watch'], ->
| 11487 | gulp = require 'gulp'
del = require 'del'
coffee = require 'gulp-coffee'
coffeelint = require 'gulp-coffeelint'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
rename = require 'gulp-rename'
pjson = require './package.json'
copyright = """
/*!
Non-Sucking Autogrow #{pjson.version}
license: #{pjson.license}
author: <NAME>
#{pjson.homepage}
*/
"""
dest = 'dist/'
source = 'src/'
code =
in: "#{source}*.coffee"
out: "#{dest}"
gulp.task 'clean', ->
del [dest + '*']
gulp.task 'build', ->
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe header copyright
.pipe gulp.dest(code.out)
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe uglify()
.pipe rename({ suffix: '.min' })
.pipe header copyright
.pipe gulp.dest(code.out)
gulp.task 'watch', ->
gulp
.watch code.in, ['build']
gulp.task 'default', ['clean', 'build', 'watch'], ->
| true | gulp = require 'gulp'
del = require 'del'
coffee = require 'gulp-coffee'
coffeelint = require 'gulp-coffeelint'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
rename = require 'gulp-rename'
pjson = require './package.json'
copyright = """
/*!
Non-Sucking Autogrow #{pjson.version}
license: #{pjson.license}
author: PI:NAME:<NAME>END_PI
#{pjson.homepage}
*/
"""
dest = 'dist/'
source = 'src/'
code =
in: "#{source}*.coffee"
out: "#{dest}"
gulp.task 'clean', ->
del [dest + '*']
gulp.task 'build', ->
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe header copyright
.pipe gulp.dest(code.out)
gulp
.src code.in
.pipe coffeelint()
.pipe coffeelint.reporter() # Show coffeelint errors
.pipe coffeelint.reporter('fail') # Make sure it fails in case of error
.pipe coffee()
.pipe uglify()
.pipe rename({ suffix: '.min' })
.pipe header copyright
.pipe gulp.dest(code.out)
gulp.task 'watch', ->
gulp
.watch code.in, ['build']
gulp.task 'default', ['clean', 'build', 'watch'], ->
|
[
{
"context": "'\nSpecHelper = require './helper'\n\nCHANNEL_KEY = 'OrderXmlFileExport'\nCHANNEL_ROLE = 'OrderExport'\nCONTAINER_PAYMENT =",
"end": 200,
"score": 0.988409161567688,
"start": 182,
"tag": "KEY",
"value": "OrderXmlFileExport"
}
] | src/spec/helper-import.coffee | celeste-horgan/sphere-order-export | 3 | _ = require 'underscore'
_.mixin require('underscore-mixins')
{SphereClient} = require 'sphere-node-sdk'
Config = require '../config'
SpecHelper = require './helper'
CHANNEL_KEY = 'OrderXmlFileExport'
CHANNEL_ROLE = 'OrderExport'
CONTAINER_PAYMENT = 'checkoutInfo'
client = new SphereClient Config
client.channels.ensure(CHANNEL_KEY, CHANNEL_ROLE)
.then ->
# get a tax category required for setting up shippingInfo
# (simply returning first found)
client.taxCategories.save SpecHelper.taxCategoryMock()
.then (result) =>
@taxCategory = result.body
client.zones.save SpecHelper.zoneMock()
.then (result) =>
zone = result.body
client.shippingMethods.save SpecHelper.shippingMethodMock(zone, @taxCategory)
.then (result) =>
@shippingMethod = result.body
client.productTypes.save SpecHelper.productTypeMock()
.then (result) ->
productType = result.body
client.products.save SpecHelper.productMock(productType)
.then (result) =>
@product = result.body
client.customers.save SpecHelper.customerMock()
.then (result) =>
@customer = result.body.customer
client.orders.import SpecHelper.orderMock(@shippingMethod, @product, @taxCategory, @customer)
.then (result) =>
@order = result.body
client.customObjects.save SpecHelper.orderPaymentInfo(CONTAINER_PAYMENT, @order.id)
.then -> console.log 'Order imported'
.catch (err) -> console.error _.prettify err
| 108301 | _ = require 'underscore'
_.mixin require('underscore-mixins')
{SphereClient} = require 'sphere-node-sdk'
Config = require '../config'
SpecHelper = require './helper'
CHANNEL_KEY = '<KEY>'
CHANNEL_ROLE = 'OrderExport'
CONTAINER_PAYMENT = 'checkoutInfo'
client = new SphereClient Config
client.channels.ensure(CHANNEL_KEY, CHANNEL_ROLE)
.then ->
# get a tax category required for setting up shippingInfo
# (simply returning first found)
client.taxCategories.save SpecHelper.taxCategoryMock()
.then (result) =>
@taxCategory = result.body
client.zones.save SpecHelper.zoneMock()
.then (result) =>
zone = result.body
client.shippingMethods.save SpecHelper.shippingMethodMock(zone, @taxCategory)
.then (result) =>
@shippingMethod = result.body
client.productTypes.save SpecHelper.productTypeMock()
.then (result) ->
productType = result.body
client.products.save SpecHelper.productMock(productType)
.then (result) =>
@product = result.body
client.customers.save SpecHelper.customerMock()
.then (result) =>
@customer = result.body.customer
client.orders.import SpecHelper.orderMock(@shippingMethod, @product, @taxCategory, @customer)
.then (result) =>
@order = result.body
client.customObjects.save SpecHelper.orderPaymentInfo(CONTAINER_PAYMENT, @order.id)
.then -> console.log 'Order imported'
.catch (err) -> console.error _.prettify err
| true | _ = require 'underscore'
_.mixin require('underscore-mixins')
{SphereClient} = require 'sphere-node-sdk'
Config = require '../config'
SpecHelper = require './helper'
CHANNEL_KEY = 'PI:KEY:<KEY>END_PI'
CHANNEL_ROLE = 'OrderExport'
CONTAINER_PAYMENT = 'checkoutInfo'
client = new SphereClient Config
client.channels.ensure(CHANNEL_KEY, CHANNEL_ROLE)
.then ->
# get a tax category required for setting up shippingInfo
# (simply returning first found)
client.taxCategories.save SpecHelper.taxCategoryMock()
.then (result) =>
@taxCategory = result.body
client.zones.save SpecHelper.zoneMock()
.then (result) =>
zone = result.body
client.shippingMethods.save SpecHelper.shippingMethodMock(zone, @taxCategory)
.then (result) =>
@shippingMethod = result.body
client.productTypes.save SpecHelper.productTypeMock()
.then (result) ->
productType = result.body
client.products.save SpecHelper.productMock(productType)
.then (result) =>
@product = result.body
client.customers.save SpecHelper.customerMock()
.then (result) =>
@customer = result.body.customer
client.orders.import SpecHelper.orderMock(@shippingMethod, @product, @taxCategory, @customer)
.then (result) =>
@order = result.body
client.customObjects.save SpecHelper.orderPaymentInfo(CONTAINER_PAYMENT, @order.id)
.then -> console.log 'Order imported'
.catch (err) -> console.error _.prettify err
|
[
{
"context": "rocess.env =\n HUBOT_BACKLOG_ASSIGN_API_KEY: 'xxx'\n HUBOT_BACKLOG_ASSIGN_SPACE_ID: 'space'\n ",
"end": 249,
"score": 0.9817888736724854,
"start": 246,
"tag": "KEY",
"value": "xxx"
},
{
"context": "'space'\n HUBOT_BACKLOG_ASSIGN_USER_NAMES: '{\"bouzuya\": \"bouzuya\", \"hoge\": \"fuga\"}'\n @sinon = sinon.",
"end": 345,
"score": 0.9990012049674988,
"start": 338,
"tag": "USERNAME",
"value": "bouzuya"
},
{
"context": " HUBOT_BACKLOG_ASSIGN_USER_NAMES: '{\"bouzuya\": \"bouzuya\", \"hoge\": \"fuga\"}'\n @sinon = sinon.sandbox.cre",
"end": 356,
"score": 0.9525359869003296,
"start": 349,
"tag": "USERNAME",
"value": "bouzuya"
},
{
"context": "CKLOG_ASSIGN_USER_NAMES: '{\"bouzuya\": \"bouzuya\", \"hoge\": \"fuga\"}'\n @sinon = sinon.sandbox.create()\n ",
"end": 364,
"score": 0.8459376096725464,
"start": 360,
"tag": "USERNAME",
"value": "hoge"
},
{
"context": "SIGN_USER_NAMES: '{\"bouzuya\": \"bouzuya\", \"hoge\": \"fuga\"}'\n @sinon = sinon.sandbox.create()\n # f",
"end": 369,
"score": 0.5890339016914368,
"start": 368,
"tag": "PASSWORD",
"value": "f"
},
{
"context": ".callback = callback\n sender = new User 'bouzuya', room: 'hitoridokusho'\n @robot.adapter.",
"end": 1523,
"score": 0.9994677305221558,
"start": 1516,
"tag": "USERNAME",
"value": "bouzuya"
}
] | test/scripts/backlog-assign.coffee | bouzuya/hubot-backlog-assign | 0 | {Robot, User, TextMessage} = require 'hubot'
assert = require 'power-assert'
path = require 'path'
sinon = require 'sinon'
describe 'hello', ->
beforeEach (done) ->
@env = process.env
process.env =
HUBOT_BACKLOG_ASSIGN_API_KEY: 'xxx'
HUBOT_BACKLOG_ASSIGN_SPACE_ID: 'space'
HUBOT_BACKLOG_ASSIGN_USER_NAMES: '{"bouzuya": "bouzuya", "hoge": "fuga"}'
@sinon = sinon.sandbox.create()
# for warning: possible EventEmitter memory leak detected.
# process.on 'uncaughtException'
@sinon.stub process, 'on', -> null
@robot = new Robot(path.resolve(__dirname, '..'), 'shell', false, 'hubot')
@robot.adapter.on 'connected', =>
@robot.load path.resolve(__dirname, '../../src/scripts')
setTimeout done, 10 # wait for parseHelp()
@robot.run()
afterEach (done) ->
@robot.brain.on 'close', =>
@sinon.restore()
process.env = @env
done()
@robot.shutdown()
describe 'listeners[0].regex', ->
describe 'valid patterns', ->
beforeEach ->
@tests = [
message: '@bouzuya: review BOUZUYA-123'
matches: ['@bouzuya: review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
,
message: '@bouzuya review BOUZUYA-123'
matches: ['@bouzuya review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
]
it 'should match', ->
@tests.forEach ({ message, matches }) =>
callback = @sinon.spy()
@robot.listeners[0].callback = callback
sender = new User 'bouzuya', room: 'hitoridokusho'
@robot.adapter.receive new TextMessage(sender, message)
actualMatches = callback.firstCall.args[0].match.map((i) -> i)
assert callback.callCount is 1
assert.deepEqual actualMatches, matches
describe 'robot.helpCommands()', ->
it 'should be ["<user> review <issueKey> - assigns the issue to user"]', ->
assert.deepEqual @robot.helpCommands(), [
'<user> review <issueKey> - assigns the issue to user'
]
| 190213 | {Robot, User, TextMessage} = require 'hubot'
assert = require 'power-assert'
path = require 'path'
sinon = require 'sinon'
describe 'hello', ->
beforeEach (done) ->
@env = process.env
process.env =
HUBOT_BACKLOG_ASSIGN_API_KEY: '<KEY>'
HUBOT_BACKLOG_ASSIGN_SPACE_ID: 'space'
HUBOT_BACKLOG_ASSIGN_USER_NAMES: '{"bouzuya": "bouzuya", "hoge": "<PASSWORD>uga"}'
@sinon = sinon.sandbox.create()
# for warning: possible EventEmitter memory leak detected.
# process.on 'uncaughtException'
@sinon.stub process, 'on', -> null
@robot = new Robot(path.resolve(__dirname, '..'), 'shell', false, 'hubot')
@robot.adapter.on 'connected', =>
@robot.load path.resolve(__dirname, '../../src/scripts')
setTimeout done, 10 # wait for parseHelp()
@robot.run()
afterEach (done) ->
@robot.brain.on 'close', =>
@sinon.restore()
process.env = @env
done()
@robot.shutdown()
describe 'listeners[0].regex', ->
describe 'valid patterns', ->
beforeEach ->
@tests = [
message: '@bouzuya: review BOUZUYA-123'
matches: ['@bouzuya: review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
,
message: '@bouzuya review BOUZUYA-123'
matches: ['@bouzuya review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
]
it 'should match', ->
@tests.forEach ({ message, matches }) =>
callback = @sinon.spy()
@robot.listeners[0].callback = callback
sender = new User 'bouzuya', room: 'hitoridokusho'
@robot.adapter.receive new TextMessage(sender, message)
actualMatches = callback.firstCall.args[0].match.map((i) -> i)
assert callback.callCount is 1
assert.deepEqual actualMatches, matches
describe 'robot.helpCommands()', ->
it 'should be ["<user> review <issueKey> - assigns the issue to user"]', ->
assert.deepEqual @robot.helpCommands(), [
'<user> review <issueKey> - assigns the issue to user'
]
| true | {Robot, User, TextMessage} = require 'hubot'
assert = require 'power-assert'
path = require 'path'
sinon = require 'sinon'
describe 'hello', ->
beforeEach (done) ->
@env = process.env
process.env =
HUBOT_BACKLOG_ASSIGN_API_KEY: 'PI:KEY:<KEY>END_PI'
HUBOT_BACKLOG_ASSIGN_SPACE_ID: 'space'
HUBOT_BACKLOG_ASSIGN_USER_NAMES: '{"bouzuya": "bouzuya", "hoge": "PI:PASSWORD:<PASSWORD>END_PIuga"}'
@sinon = sinon.sandbox.create()
# for warning: possible EventEmitter memory leak detected.
# process.on 'uncaughtException'
@sinon.stub process, 'on', -> null
@robot = new Robot(path.resolve(__dirname, '..'), 'shell', false, 'hubot')
@robot.adapter.on 'connected', =>
@robot.load path.resolve(__dirname, '../../src/scripts')
setTimeout done, 10 # wait for parseHelp()
@robot.run()
afterEach (done) ->
@robot.brain.on 'close', =>
@sinon.restore()
process.env = @env
done()
@robot.shutdown()
describe 'listeners[0].regex', ->
describe 'valid patterns', ->
beforeEach ->
@tests = [
message: '@bouzuya: review BOUZUYA-123'
matches: ['@bouzuya: review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
,
message: '@bouzuya review BOUZUYA-123'
matches: ['@bouzuya review BOUZUYA-123', 'bouzuya', 'BOUZUYA', '123']
]
it 'should match', ->
@tests.forEach ({ message, matches }) =>
callback = @sinon.spy()
@robot.listeners[0].callback = callback
sender = new User 'bouzuya', room: 'hitoridokusho'
@robot.adapter.receive new TextMessage(sender, message)
actualMatches = callback.firstCall.args[0].match.map((i) -> i)
assert callback.callCount is 1
assert.deepEqual actualMatches, matches
describe 'robot.helpCommands()', ->
it 'should be ["<user> review <issueKey> - assigns the issue to user"]', ->
assert.deepEqual @robot.helpCommands(), [
'<user> review <issueKey> - assigns the issue to user'
]
|
[
{
"context": "\n\nfixtureTrackId = 41948537\nfixtureAccessToken = \"1-4928-9174539-e8ed8a9e7bed36a43\"\nfixturePrivateTrackId = 41948548\nfixturePrivateT",
"end": 139,
"score": 0.9403107762336731,
"start": 107,
"tag": "PASSWORD",
"value": "1-4928-9174539-e8ed8a9e7bed36a43"
},
{
"context": "ateTrackId = 41948548\nfixturePrivateTrackToken = \"s-2gxGq\"\n\n# Can be used to update accessToken\n#asyncTest ",
"end": 209,
"score": 0.9330342411994934,
"start": 202,
"tag": "PASSWORD",
"value": "s-2gxGq"
},
{
"context": "CRET\"\n# grant_type: \"password\"\n# username: \"js-sdk-test\"\n# password: \"js-sdk-test-pw\"\n# scope: \"non",
"end": 476,
"score": 0.9956254959106445,
"start": 465,
"tag": "USERNAME",
"value": "js-sdk-test"
},
{
"context": "ord\"\n# username: \"js-sdk-test\"\n# password: \"js-sdk-test-pw\"\n# scope: \"non-expiring\"\n# , (response) ->\n#\n",
"end": 508,
"score": 0.9993311762809753,
"start": 494,
"tag": "PASSWORD",
"value": "js-sdk-test-pw"
}
] | components/soundcloud/test/integration.coffee | mizukai/sample | 91 | module "Full Integration Test against api.soundcloud.com"
fixtureTrackId = 41948537
fixtureAccessToken = "1-4928-9174539-e8ed8a9e7bed36a43"
fixturePrivateTrackId = 41948548
fixturePrivateTrackToken = "s-2gxGq"
# Can be used to update accessToken
#asyncTest "Retrieve token using OAuth2", 1, ->
# SC.accessToken null
# SC.post "/oauth2/token",
# client_id: "YOUR_CLIENT_ID"
# client_secret: "YOUR_CLIENT_SECRET"
# grant_type: "password"
# username: "js-sdk-test"
# password: "js-sdk-test-pw"
# scope: "non-expiring"
# , (response) ->
#
# SC.accessToken accessToken
# ok response.access_token
# start()
asyncTest "Audio Recording and Uploading", 2, ->
SC.accessToken(fixtureAccessToken)
trackTitle = "JS SDK Test Recording"
uploaded = false
SC.record
start: ->
ok(1, "start event fired")
progress: ->
SC.recordStop()
if !uploaded
uploaded = true
SC.recordUpload
track:
title: trackTitle
sharing: "private"
, (track) ->
equal track.title, trackTitle, "Track response matches track request"
start()
asyncTest "Receive latest tracks", 1, ->
SC.get "/tracks",
limit: 2
, (tracks) ->
equal tracks.length, 2
start()
asyncTest "Update a user description", 1, ->
SC.accessToken(fixtureAccessToken)
randomDescription = "ABC: " + Math.random()
SC.put "/me",
user:
description: randomDescription
, (updatedMe) ->
equal updatedMe.description, randomDescription
start()
asyncTest "Create a comment", 1, ->
SC.accessToken(fixtureAccessToken)
commentBody = "Great Track"
SC.post "/tracks/" + fixtureTrackId + "/comments",
comment:
body: commentBody
, (comment) ->
equal comment.body, commentBody
start()
asyncTest "Handle a 404 error", 1, ->
SC.get "/tracks/0", (track, error) ->
equal error.message, "404 - Not Found"
start()
asyncTest "Use private _request to create an attachment", 1, ->
SC.accessToken(fixtureAccessToken)
boundary = "SOMERANDOMBOUNDARY"
contentType = "multipart/mixed; boundary=" + boundary
body = ""
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"oauth_token\"\r\n"
body += "\r\n"
body += SC.accessToken() + "\r\n"
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"attachment[asset_data]\"; filename=\"attachment\"\r\n"
body += "Content-Type: application/octet-stream\r\n"
body += "\r\n"
body += "JSONPCALLBACK({a:1})\r\n"
body += "--" + boundary + "--\r\n"
url = "https://" + SC.hostname("api") + "/tracks/" + fixtureTrackId + "/attachments.json"
SC._request "POST", url, contentType, body, (responseText, xhr) ->
response = SC.Helper.responseHandler(responseText, xhr)
equal response.json.size, 20
start()
asyncTest "Handle a 302 redirect", 1, ->
SC.accessToken null
permalink_url = "http://" + SC.hostname() + "/js-sdk-test/fixture-track"
SC.get "/resolve",
url: permalink_url
, (track, error) ->
equal track.permalink_url, permalink_url
start()
module "SC.stream"
asyncTest "Playback of a track by id within whenStreamingReady with an ontimedcomment", 2, ->
SC.whenStreamingReady ->
sound = SC.stream fixtureTrackId,
autoPlay: true
volume: 0
ontimedcomments: (comments) ->
equal(2, comments.length, "A timedcomments event was fired")
start()
onplay: () ->
ok(true, "Playback started")
asyncTest "Playback of a private track using a secret token", 1, ->
SC.stream "/tracks/" + fixturePrivateTrackId + "?secret_token=" + fixturePrivateTrackToken,
autoPlay: true
volume: 0
onload: () ->
this.stop()
ok(true, "Track Stream Loaded")
start()
| 188609 | module "Full Integration Test against api.soundcloud.com"
fixtureTrackId = 41948537
fixtureAccessToken = "<PASSWORD>"
fixturePrivateTrackId = 41948548
fixturePrivateTrackToken = "<PASSWORD>"
# Can be used to update accessToken
#asyncTest "Retrieve token using OAuth2", 1, ->
# SC.accessToken null
# SC.post "/oauth2/token",
# client_id: "YOUR_CLIENT_ID"
# client_secret: "YOUR_CLIENT_SECRET"
# grant_type: "password"
# username: "js-sdk-test"
# password: "<PASSWORD>"
# scope: "non-expiring"
# , (response) ->
#
# SC.accessToken accessToken
# ok response.access_token
# start()
asyncTest "Audio Recording and Uploading", 2, ->
SC.accessToken(fixtureAccessToken)
trackTitle = "JS SDK Test Recording"
uploaded = false
SC.record
start: ->
ok(1, "start event fired")
progress: ->
SC.recordStop()
if !uploaded
uploaded = true
SC.recordUpload
track:
title: trackTitle
sharing: "private"
, (track) ->
equal track.title, trackTitle, "Track response matches track request"
start()
asyncTest "Receive latest tracks", 1, ->
SC.get "/tracks",
limit: 2
, (tracks) ->
equal tracks.length, 2
start()
asyncTest "Update a user description", 1, ->
SC.accessToken(fixtureAccessToken)
randomDescription = "ABC: " + Math.random()
SC.put "/me",
user:
description: randomDescription
, (updatedMe) ->
equal updatedMe.description, randomDescription
start()
asyncTest "Create a comment", 1, ->
SC.accessToken(fixtureAccessToken)
commentBody = "Great Track"
SC.post "/tracks/" + fixtureTrackId + "/comments",
comment:
body: commentBody
, (comment) ->
equal comment.body, commentBody
start()
asyncTest "Handle a 404 error", 1, ->
SC.get "/tracks/0", (track, error) ->
equal error.message, "404 - Not Found"
start()
asyncTest "Use private _request to create an attachment", 1, ->
SC.accessToken(fixtureAccessToken)
boundary = "SOMERANDOMBOUNDARY"
contentType = "multipart/mixed; boundary=" + boundary
body = ""
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"oauth_token\"\r\n"
body += "\r\n"
body += SC.accessToken() + "\r\n"
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"attachment[asset_data]\"; filename=\"attachment\"\r\n"
body += "Content-Type: application/octet-stream\r\n"
body += "\r\n"
body += "JSONPCALLBACK({a:1})\r\n"
body += "--" + boundary + "--\r\n"
url = "https://" + SC.hostname("api") + "/tracks/" + fixtureTrackId + "/attachments.json"
SC._request "POST", url, contentType, body, (responseText, xhr) ->
response = SC.Helper.responseHandler(responseText, xhr)
equal response.json.size, 20
start()
asyncTest "Handle a 302 redirect", 1, ->
SC.accessToken null
permalink_url = "http://" + SC.hostname() + "/js-sdk-test/fixture-track"
SC.get "/resolve",
url: permalink_url
, (track, error) ->
equal track.permalink_url, permalink_url
start()
module "SC.stream"
asyncTest "Playback of a track by id within whenStreamingReady with an ontimedcomment", 2, ->
SC.whenStreamingReady ->
sound = SC.stream fixtureTrackId,
autoPlay: true
volume: 0
ontimedcomments: (comments) ->
equal(2, comments.length, "A timedcomments event was fired")
start()
onplay: () ->
ok(true, "Playback started")
asyncTest "Playback of a private track using a secret token", 1, ->
SC.stream "/tracks/" + fixturePrivateTrackId + "?secret_token=" + fixturePrivateTrackToken,
autoPlay: true
volume: 0
onload: () ->
this.stop()
ok(true, "Track Stream Loaded")
start()
| true | module "Full Integration Test against api.soundcloud.com"
fixtureTrackId = 41948537
fixtureAccessToken = "PI:PASSWORD:<PASSWORD>END_PI"
fixturePrivateTrackId = 41948548
fixturePrivateTrackToken = "PI:PASSWORD:<PASSWORD>END_PI"
# Can be used to update accessToken
#asyncTest "Retrieve token using OAuth2", 1, ->
# SC.accessToken null
# SC.post "/oauth2/token",
# client_id: "YOUR_CLIENT_ID"
# client_secret: "YOUR_CLIENT_SECRET"
# grant_type: "password"
# username: "js-sdk-test"
# password: "PI:PASSWORD:<PASSWORD>END_PI"
# scope: "non-expiring"
# , (response) ->
#
# SC.accessToken accessToken
# ok response.access_token
# start()
asyncTest "Audio Recording and Uploading", 2, ->
SC.accessToken(fixtureAccessToken)
trackTitle = "JS SDK Test Recording"
uploaded = false
SC.record
start: ->
ok(1, "start event fired")
progress: ->
SC.recordStop()
if !uploaded
uploaded = true
SC.recordUpload
track:
title: trackTitle
sharing: "private"
, (track) ->
equal track.title, trackTitle, "Track response matches track request"
start()
asyncTest "Receive latest tracks", 1, ->
SC.get "/tracks",
limit: 2
, (tracks) ->
equal tracks.length, 2
start()
asyncTest "Update a user description", 1, ->
SC.accessToken(fixtureAccessToken)
randomDescription = "ABC: " + Math.random()
SC.put "/me",
user:
description: randomDescription
, (updatedMe) ->
equal updatedMe.description, randomDescription
start()
asyncTest "Create a comment", 1, ->
SC.accessToken(fixtureAccessToken)
commentBody = "Great Track"
SC.post "/tracks/" + fixtureTrackId + "/comments",
comment:
body: commentBody
, (comment) ->
equal comment.body, commentBody
start()
asyncTest "Handle a 404 error", 1, ->
SC.get "/tracks/0", (track, error) ->
equal error.message, "404 - Not Found"
start()
asyncTest "Use private _request to create an attachment", 1, ->
SC.accessToken(fixtureAccessToken)
boundary = "SOMERANDOMBOUNDARY"
contentType = "multipart/mixed; boundary=" + boundary
body = ""
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"oauth_token\"\r\n"
body += "\r\n"
body += SC.accessToken() + "\r\n"
body += "--" + boundary + "\r\n"
body += "Content-Disposition: form-data; name=\"attachment[asset_data]\"; filename=\"attachment\"\r\n"
body += "Content-Type: application/octet-stream\r\n"
body += "\r\n"
body += "JSONPCALLBACK({a:1})\r\n"
body += "--" + boundary + "--\r\n"
url = "https://" + SC.hostname("api") + "/tracks/" + fixtureTrackId + "/attachments.json"
SC._request "POST", url, contentType, body, (responseText, xhr) ->
response = SC.Helper.responseHandler(responseText, xhr)
equal response.json.size, 20
start()
asyncTest "Handle a 302 redirect", 1, ->
SC.accessToken null
permalink_url = "http://" + SC.hostname() + "/js-sdk-test/fixture-track"
SC.get "/resolve",
url: permalink_url
, (track, error) ->
equal track.permalink_url, permalink_url
start()
module "SC.stream"
asyncTest "Playback of a track by id within whenStreamingReady with an ontimedcomment", 2, ->
SC.whenStreamingReady ->
sound = SC.stream fixtureTrackId,
autoPlay: true
volume: 0
ontimedcomments: (comments) ->
equal(2, comments.length, "A timedcomments event was fired")
start()
onplay: () ->
ok(true, "Playback started")
asyncTest "Playback of a private track using a secret token", 1, ->
SC.stream "/tracks/" + fixturePrivateTrackId + "?secret_token=" + fixturePrivateTrackToken,
autoPlay: true
volume: 0
onload: () ->
this.stop()
ok(true, "Track Stream Loaded")
start()
|
[
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 441,
"score": 0.5304310321807861,
"start": 435,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 462,
"score": 0.8230554461479187,
"start": 457,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 819,
"score": 0.5277971029281616,
"start": 813,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 840,
"score": 0.9061892032623291,
"start": 835,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 1338,
"score": 0.6810275316238403,
"start": 1332,
"tag": "NAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 1359,
"score": 0.6678268909454346,
"start": 1354,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 1713,
"score": 0.6834884881973267,
"start": 1707,
"tag": "NAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 1734,
"score": 0.7139644026756287,
"start": 1729,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 2109,
"score": 0.6647541522979736,
"start": 2103,
"tag": "NAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 2130,
"score": 0.5597003102302551,
"start": 2125,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 2604,
"score": 0.9713389873504639,
"start": 2598,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 2625,
"score": 0.9684246182441711,
"start": 2620,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 3250,
"score": 0.9515185356140137,
"start": 3244,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 3271,
"score": 0.9636498689651489,
"start": 3266,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 4184,
"score": 0.9785568714141846,
"start": 4178,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 4205,
"score": 0.9511806964874268,
"start": 4200,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 7266,
"score": 0.9348443150520325,
"start": 7260,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 7287,
"score": 0.7346085906028748,
"start": 7282,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 7792,
"score": 0.9911788702011108,
"start": 7786,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 7813,
"score": 0.9759599566459656,
"start": 7808,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 8484,
"score": 0.9894945025444031,
"start": 8478,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 8505,
"score": 0.9687473177909851,
"start": 8500,
"tag": "USERNAME",
"value": "staff"
},
{
"context": " \"ustar\\u0000\"\n ustarver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmi",
"end": 9424,
"score": 0.9837377071380615,
"start": 9418,
"tag": "USERNAME",
"value": "isaacs"
},
{
"context": "starver: \"00\"\n uname: \"isaacs\"\n gname: \"staff\"\n devmaj: 0\n devmin: 0\n fill: \"\"\n ",
"end": 9445,
"score": 0.9735438823699951,
"start": 9440,
"tag": "USERNAME",
"value": "staff"
}
] | deps/npm/node_modules/tar/test/parse.coffee | lxe/io.coffee | 0 | tap = require("tap")
tar = require("../tar.js")
fs = require("fs")
path = require("path")
file = path.resolve(__dirname, "fixtures/c.tar")
index = 0
expect = [
[
"entry"
{
path: "c.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:10:58 GMT")
cksum: 5422
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "cc.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:11:02 GMT")
cksum: 5525
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 100
mtime: new Date("Thu, 27 Oct 2011 03:43:23 GMT")
cksum: 18124
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 120
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 6702
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "Ω.txt"
ctime: 1319737909
atime: 1319739061
dev: 234881026
ino: 51693379
nlink: 1
}
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
atime: new Date("Thu, 27 Oct 2011 18:11:01 GMT")
dev: 234881026
ino: 51693379
nlink: 1
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1319686868
atime: 1319741254
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14570
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
atime: new Date("Thu, 27 Oct 2011 18:47:34 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
`undefined`
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 1000
gid: 1000
size: 201
mtime: new Date("Thu, 27 Oct 2011 22:21:50 GMT")
cksum: 14086
type: "0"
linkpath: ""
ustar: false
}
`undefined`
]
[
"longLinkpath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4975
type: "K"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
]
[
"entry"
{
path: "200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
mode: 511
uid: 1000
gid: 1000
size: 0
mtime: new Date("Fri, 28 Oct 2011 23:05:17 GMT")
cksum: 21603
type: "2"
linkpath: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ustar: false
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200-hard"
mode: 420
uid: 24561
gid: 20
size: 143
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 6533
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
ctime: 1320617144
atime: 1320617232
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200-hard"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 5526
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:07:12 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1320617144
atime: 1320617406
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 0
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 15173
type: "1"
linkpath: "200-hard"
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:10:06 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
]
tap.test "parser test", (t) ->
parser = tar.Parse()
parser.on "end", ->
t.equal index, expect.length, "saw all expected events"
t.end()
return
fs.createReadStream(file).pipe(parser).on "*", (ev, entry) ->
wanted = expect[index]
return t.fail("Unexpected event: " + ev) unless wanted
result = [
ev
entry.props
]
entry.on "end", ->
result.push entry.fields or entry.body
t.equal ev, wanted[0], index + " event type"
t.equivalent entry.props, wanted[1], wanted[1].path + " entry properties"
t.equivalent result[2], wanted[2], "metadata values" if wanted[2]
index++
return
return
return
| 72787 | tap = require("tap")
tar = require("../tar.js")
fs = require("fs")
path = require("path")
file = path.resolve(__dirname, "fixtures/c.tar")
index = 0
expect = [
[
"entry"
{
path: "c.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:10:58 GMT")
cksum: 5422
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "cc.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:11:02 GMT")
cksum: 5525
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 100
mtime: new Date("Thu, 27 Oct 2011 03:43:23 GMT")
cksum: 18124
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "<NAME>"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "<NAME>"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 120
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 6702
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "<NAME>"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "Ω.txt"
ctime: 1319737909
atime: 1319739061
dev: 234881026
ino: 51693379
nlink: 1
}
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
atime: new Date("Thu, 27 Oct 2011 18:11:01 GMT")
dev: 234881026
ino: 51693379
nlink: 1
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1319686868
atime: 1319741254
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14570
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
atime: new Date("Thu, 27 Oct 2011 18:47:34 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
`undefined`
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 1000
gid: 1000
size: 201
mtime: new Date("Thu, 27 Oct 2011 22:21:50 GMT")
cksum: 14086
type: "0"
linkpath: ""
ustar: false
}
`undefined`
]
[
"longLinkpath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4975
type: "K"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
]
[
"entry"
{
path: "200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
mode: 511
uid: 1000
gid: 1000
size: 0
mtime: new Date("Fri, 28 Oct 2011 23:05:17 GMT")
cksum: 21603
type: "2"
linkpath: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ustar: false
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200-hard"
mode: 420
uid: 24561
gid: 20
size: 143
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 6533
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
ctime: 1320617144
atime: 1320617232
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200-hard"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 5526
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:07:12 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1320617144
atime: 1320617406
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 0
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 15173
type: "1"
linkpath: "200-hard"
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:10:06 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
]
tap.test "parser test", (t) ->
parser = tar.Parse()
parser.on "end", ->
t.equal index, expect.length, "saw all expected events"
t.end()
return
fs.createReadStream(file).pipe(parser).on "*", (ev, entry) ->
wanted = expect[index]
return t.fail("Unexpected event: " + ev) unless wanted
result = [
ev
entry.props
]
entry.on "end", ->
result.push entry.fields or entry.body
t.equal ev, wanted[0], index + " event type"
t.equivalent entry.props, wanted[1], wanted[1].path + " entry properties"
t.equivalent result[2], wanted[2], "metadata values" if wanted[2]
index++
return
return
return
| true | tap = require("tap")
tar = require("../tar.js")
fs = require("fs")
path = require("path")
file = path.resolve(__dirname, "fixtures/c.tar")
index = 0
expect = [
[
"entry"
{
path: "c.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:10:58 GMT")
cksum: 5422
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "cc.txt"
mode: 420
uid: 24561
gid: 20
size: 513
mtime: new Date("Wed, 26 Oct 2011 01:11:02 GMT")
cksum: 5525
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 100
mtime: new Date("Thu, 27 Oct 2011 03:43:23 GMT")
cksum: 18124
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "PI:NAME:<NAME>END_PI"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "PI:NAME:<NAME>END_PI"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 120
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 6702
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "PI:NAME:<NAME>END_PI"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "Ω.txt"
ctime: 1319737909
atime: 1319739061
dev: 234881026
ino: 51693379
nlink: 1
}
]
[
"entry"
{
path: "Ω.txt"
mode: 420
uid: 24561
gid: 20
size: 2
mtime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
cksum: 5695
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 17:51:49 GMT")
atime: new Date("Thu, 27 Oct 2011 18:11:01 GMT")
dev: 234881026
ino: 51693379
nlink: 1
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1319686868
atime: 1319741254
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14570
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
atime: new Date("Thu, 27 Oct 2011 18:47:34 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 1
}
`undefined`
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 1000
gid: 1000
size: 201
mtime: new Date("Thu, 27 Oct 2011 22:21:50 GMT")
cksum: 14086
type: "0"
linkpath: ""
ustar: false
}
`undefined`
]
[
"longLinkpath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4975
type: "K"
linkpath: ""
ustar: false
}
"200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
]
[
"longPath"
{
path: "././@LongLink"
mode: 0
uid: 0
gid: 0
size: 201
mtime: new Date("Thu, 01 Jan 1970 00:00:00 GMT")
cksum: 4976
type: "L"
linkpath: ""
ustar: false
}
"200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
]
[
"entry"
{
path: "200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL"
mode: 511
uid: 1000
gid: 1000
size: 0
mtime: new Date("Fri, 28 Oct 2011 23:05:17 GMT")
cksum: 21603
type: "2"
linkpath: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ustar: false
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200-hard"
mode: 420
uid: 24561
gid: 20
size: 143
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 6533
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
ctime: 1320617144
atime: 1320617232
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200-hard"
mode: 420
uid: 24561
gid: 20
size: 200
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 5526
type: "0"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:07:12 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
[
"extendedHeader"
{
path: "PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 353
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 14488
type: "x"
linkpath: ""
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
}
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
ctime: 1320617144
atime: 1320617406
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
]
[
"entry"
{
path: "200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc"
mode: 420
uid: 24561
gid: 20
size: 0
mtime: new Date("Thu, 27 Oct 2011 03:41:08 GMT")
cksum: 15173
type: "1"
linkpath: "200-hard"
ustar: "ustar\u0000"
ustarver: "00"
uname: "isaacs"
gname: "staff"
devmaj: 0
devmin: 0
fill: ""
ctime: new Date("Sun, 06 Nov 2011 22:05:44 GMT")
atime: new Date("Sun, 06 Nov 2011 22:10:06 GMT")
"LIBARCHIVE.creationtime": "1319686852"
dev: 234881026
ino: 51681874
nlink: 2
}
`undefined`
]
]
tap.test "parser test", (t) ->
parser = tar.Parse()
parser.on "end", ->
t.equal index, expect.length, "saw all expected events"
t.end()
return
fs.createReadStream(file).pipe(parser).on "*", (ev, entry) ->
wanted = expect[index]
return t.fail("Unexpected event: " + ev) unless wanted
result = [
ev
entry.props
]
entry.on "end", ->
result.push entry.fields or entry.body
t.equal ev, wanted[0], index + " event type"
t.equivalent entry.props, wanted[1], wanted[1].path + " entry properties"
t.equivalent result[2], wanted[2], "metadata values" if wanted[2]
index++
return
return
return
|
[
{
"context": "mfabrik GmbH\n * MIT Licence\n * https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org\n#",
"end": 166,
"score": 0.9909250140190125,
"start": 152,
"tag": "USERNAME",
"value": "programmfabrik"
},
{
"context": "st text. Very very short. 1\").DOM\n\t\t\t,\n\t\t\t\ttext: \"Detroit\"\n\t\t\t\tcontent: new CUI.Label( text: \"1 Very short ",
"end": 3265,
"score": 0.8534364700317383,
"start": 3258,
"tag": "NAME",
"value": "Detroit"
},
{
"context": "st text. Very very short. 1\").DOM\n\t\t\t,\n\t\t\t\ttext: \"Detroit\"\n\t\t\t\tcontent: new CUI.Label( text: \"1 Very short ",
"end": 4054,
"score": 0.920914351940155,
"start": 4047,
"tag": "NAME",
"value": "Detroit"
}
] | demo/src/demos/TabsDemo.coffee | programmfabrik/coffeescript-ui | 10 | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class Demo.TabsDemo extends Demo
display: ->
_tabs = []
for i, idx in ["Atlanta", "New York", "Chicago", "Minneapolis & St. Paul"]
_tabs.push
text: i
content: i+": "+@getBlindText(idx*2+1)
createButtonbar = (tabs) =>
city_idx = 0
new CUI.Buttonbar buttons: [
new CUI.Button
icon: new CUI.Icon(class: "fa-plus")
group: "plus-minus"
onClick: (ev, btn) =>
tabs.addTab(
new CUI.Tab
text: Demo.TabsDemo.cities[city_idx]
onActivate: (tab) ->
minusButton.enable()
onDeactivate: (tab) ->
minusButton.disable()
content: @getBlindText(Math.ceil(Math.random()*5))
).activate()
if not Demo.TabsDemo.cities[city_idx++]
btn.disable()
minusButton = new CUI.Button
icon: new CUI.Icon(class: "fa-minus")
group: "plus-minus"
disabled: true
onClick: (ev, btn) =>
tabs.getActiveTab().destroy()
]
tabs = new CUI.Tabs
footer_right: "Right"
footer_left: "Left"
tabs: _tabs
tabs.setFooterRight(createButtonbar(tabs))
# --------- tab 2
_tabs2 = []
for i, idx in Demo.TabsDemo.cities
_tabs2.push
text: i
content: i+": "+@getBlindText(idx*2+1)
tabs2 = new CUI.Tabs
maximize: true
tabs: _tabs2
footer_right: "Right"
footer_left: "Left"
tabs2.setFooterRight(createButtonbar(tabs2))
tabs3 = new CUI.Tabs
footer_right: "Right"
footer_left: "Left from not maximized"
tabs: [
text: "testTab1"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "testTab2"
content: new CUI.Label(
multiline: true
text: """1 Very short test text.
Very very short. 2
Very very short. 2
Very very short. 2
Very very short. 2
"""
).DOM
]
maximize: false
tabs3.setFooterRight(createButtonbar(tabs))
# Small tab buttons
tabsSmall = new CUI.Tabs
class: "cui-demo-tabs-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
tabsSmall.setFooterRight(createButtonbar(tabs))
# Vertical tabs
tabsVertical = new CUI.Tabs
class: "cui-demo-tabs-vertical"
tabs: [
text: "Atlanta"
# content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
content: @getBlindText(idx*2+1)
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Detroit"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
# Vertical tabs
tabsVerticalSmall = new CUI.Tabs
class: "cui-demo-tabs-vertical-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Detroit"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
@demo_elements = [
new CUI.Label
text: "Tabs with height by content"
tabs
new CUI.Label
text: "Small Tab Buttons"
tabsSmall
new CUI.Label
text: "Tabs with static height"
tabs2
new CUI.Label
text: "Tabs not maximized"
tabs3
new CUI.Label
text: "Vertical Tabs"
tabsVertical
new CUI.Label
text: "Vertical Tabs"
tabsVerticalSmall
]
return @demo_elements
undisplay: ->
for element in @demo_elements
element.destroy()
Demo.TabsDemo.cities = [
"Bladensburg"
"Blackman"
"Blackmont"
"Blacksburg"
"Blackshear"
"Blackstock"
"Blackstone"
"Blacksville"
"Blackton"
"Blackwater"
"Blackwell"
"Blackwells"
"Blackwood"
"Bladen"
"Blades"
"Blain"
"Blaine"
"Blaine Hill"
"Blair"
"Blairs"
"BlairsMills"
"Blairsburg"
"Blairsden"
"Blairstown"
"Blairsville"
"Blairville"
"Blaisdell"
"Blakeley"
"Blakely"
]
Demo.register(new Demo.TabsDemo())
| 20756 | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class Demo.TabsDemo extends Demo
display: ->
_tabs = []
for i, idx in ["Atlanta", "New York", "Chicago", "Minneapolis & St. Paul"]
_tabs.push
text: i
content: i+": "+@getBlindText(idx*2+1)
createButtonbar = (tabs) =>
city_idx = 0
new CUI.Buttonbar buttons: [
new CUI.Button
icon: new CUI.Icon(class: "fa-plus")
group: "plus-minus"
onClick: (ev, btn) =>
tabs.addTab(
new CUI.Tab
text: Demo.TabsDemo.cities[city_idx]
onActivate: (tab) ->
minusButton.enable()
onDeactivate: (tab) ->
minusButton.disable()
content: @getBlindText(Math.ceil(Math.random()*5))
).activate()
if not Demo.TabsDemo.cities[city_idx++]
btn.disable()
minusButton = new CUI.Button
icon: new CUI.Icon(class: "fa-minus")
group: "plus-minus"
disabled: true
onClick: (ev, btn) =>
tabs.getActiveTab().destroy()
]
tabs = new CUI.Tabs
footer_right: "Right"
footer_left: "Left"
tabs: _tabs
tabs.setFooterRight(createButtonbar(tabs))
# --------- tab 2
_tabs2 = []
for i, idx in Demo.TabsDemo.cities
_tabs2.push
text: i
content: i+": "+@getBlindText(idx*2+1)
tabs2 = new CUI.Tabs
maximize: true
tabs: _tabs2
footer_right: "Right"
footer_left: "Left"
tabs2.setFooterRight(createButtonbar(tabs2))
tabs3 = new CUI.Tabs
footer_right: "Right"
footer_left: "Left from not maximized"
tabs: [
text: "testTab1"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "testTab2"
content: new CUI.Label(
multiline: true
text: """1 Very short test text.
Very very short. 2
Very very short. 2
Very very short. 2
Very very short. 2
"""
).DOM
]
maximize: false
tabs3.setFooterRight(createButtonbar(tabs))
# Small tab buttons
tabsSmall = new CUI.Tabs
class: "cui-demo-tabs-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
tabsSmall.setFooterRight(createButtonbar(tabs))
# Vertical tabs
tabsVertical = new CUI.Tabs
class: "cui-demo-tabs-vertical"
tabs: [
text: "Atlanta"
# content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
content: @getBlindText(idx*2+1)
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "<NAME>"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
# Vertical tabs
tabsVerticalSmall = new CUI.Tabs
class: "cui-demo-tabs-vertical-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "<NAME>"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
@demo_elements = [
new CUI.Label
text: "Tabs with height by content"
tabs
new CUI.Label
text: "Small Tab Buttons"
tabsSmall
new CUI.Label
text: "Tabs with static height"
tabs2
new CUI.Label
text: "Tabs not maximized"
tabs3
new CUI.Label
text: "Vertical Tabs"
tabsVertical
new CUI.Label
text: "Vertical Tabs"
tabsVerticalSmall
]
return @demo_elements
undisplay: ->
for element in @demo_elements
element.destroy()
Demo.TabsDemo.cities = [
"Bladensburg"
"Blackman"
"Blackmont"
"Blacksburg"
"Blackshear"
"Blackstock"
"Blackstone"
"Blacksville"
"Blackton"
"Blackwater"
"Blackwell"
"Blackwells"
"Blackwood"
"Bladen"
"Blades"
"Blain"
"Blaine"
"Blaine Hill"
"Blair"
"Blairs"
"BlairsMills"
"Blairsburg"
"Blairsden"
"Blairstown"
"Blairsville"
"Blairville"
"Blaisdell"
"Blakeley"
"Blakely"
]
Demo.register(new Demo.TabsDemo())
| true | ###
* coffeescript-ui - Coffeescript User Interface System (CUI)
* Copyright (c) 2013 - 2016 Programmfabrik GmbH
* MIT Licence
* https://github.com/programmfabrik/coffeescript-ui, http://www.coffeescript-ui.org
###
class Demo.TabsDemo extends Demo
display: ->
_tabs = []
for i, idx in ["Atlanta", "New York", "Chicago", "Minneapolis & St. Paul"]
_tabs.push
text: i
content: i+": "+@getBlindText(idx*2+1)
createButtonbar = (tabs) =>
city_idx = 0
new CUI.Buttonbar buttons: [
new CUI.Button
icon: new CUI.Icon(class: "fa-plus")
group: "plus-minus"
onClick: (ev, btn) =>
tabs.addTab(
new CUI.Tab
text: Demo.TabsDemo.cities[city_idx]
onActivate: (tab) ->
minusButton.enable()
onDeactivate: (tab) ->
minusButton.disable()
content: @getBlindText(Math.ceil(Math.random()*5))
).activate()
if not Demo.TabsDemo.cities[city_idx++]
btn.disable()
minusButton = new CUI.Button
icon: new CUI.Icon(class: "fa-minus")
group: "plus-minus"
disabled: true
onClick: (ev, btn) =>
tabs.getActiveTab().destroy()
]
tabs = new CUI.Tabs
footer_right: "Right"
footer_left: "Left"
tabs: _tabs
tabs.setFooterRight(createButtonbar(tabs))
# --------- tab 2
_tabs2 = []
for i, idx in Demo.TabsDemo.cities
_tabs2.push
text: i
content: i+": "+@getBlindText(idx*2+1)
tabs2 = new CUI.Tabs
maximize: true
tabs: _tabs2
footer_right: "Right"
footer_left: "Left"
tabs2.setFooterRight(createButtonbar(tabs2))
tabs3 = new CUI.Tabs
footer_right: "Right"
footer_left: "Left from not maximized"
tabs: [
text: "testTab1"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "testTab2"
content: new CUI.Label(
multiline: true
text: """1 Very short test text.
Very very short. 2
Very very short. 2
Very very short. 2
Very very short. 2
"""
).DOM
]
maximize: false
tabs3.setFooterRight(createButtonbar(tabs))
# Small tab buttons
tabsSmall = new CUI.Tabs
class: "cui-demo-tabs-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
tabsSmall.setFooterRight(createButtonbar(tabs))
# Vertical tabs
tabsVertical = new CUI.Tabs
class: "cui-demo-tabs-vertical"
tabs: [
text: "Atlanta"
# content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
content: @getBlindText(idx*2+1)
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "PI:NAME:<NAME>END_PI"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
# Vertical tabs
tabsVerticalSmall = new CUI.Tabs
class: "cui-demo-tabs-vertical-small"
tabs: [
text: "Atlanta"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "New York"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Chicago"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "San Fransisco"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "PI:NAME:<NAME>END_PI"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
,
text: "Minneapolis"
content: new CUI.Label( text: "1 Very short test text. Very very short. 1").DOM
]
maximize: true
@demo_elements = [
new CUI.Label
text: "Tabs with height by content"
tabs
new CUI.Label
text: "Small Tab Buttons"
tabsSmall
new CUI.Label
text: "Tabs with static height"
tabs2
new CUI.Label
text: "Tabs not maximized"
tabs3
new CUI.Label
text: "Vertical Tabs"
tabsVertical
new CUI.Label
text: "Vertical Tabs"
tabsVerticalSmall
]
return @demo_elements
undisplay: ->
for element in @demo_elements
element.destroy()
Demo.TabsDemo.cities = [
"Bladensburg"
"Blackman"
"Blackmont"
"Blacksburg"
"Blackshear"
"Blackstock"
"Blackstone"
"Blacksville"
"Blackton"
"Blackwater"
"Blackwell"
"Blackwells"
"Blackwood"
"Bladen"
"Blades"
"Blain"
"Blaine"
"Blaine Hill"
"Blair"
"Blairs"
"BlairsMills"
"Blairsburg"
"Blairsden"
"Blairstown"
"Blairsville"
"Blairville"
"Blaisdell"
"Blakeley"
"Blakely"
]
Demo.register(new Demo.TabsDemo())
|
[
{
"context": "assword: ->\n else\n throw error\n\ntokenName = 'Soldat.tv API Token'\n\nmodule.exports =\n # Get the Soldat.t",
"end": 262,
"score": 0.6360637545585632,
"start": 254,
"tag": "PASSWORD",
"value": "oldat.tv"
},
{
"context": "en: (token) ->\n keytar.setPassword(tokenName, 'soldat.tv', token)\n",
"end": 1013,
"score": 0.9799531102180481,
"start": 1004,
"tag": "PASSWORD",
"value": "soldat.tv"
}
] | src/auth.coffee | getsoldat/recrue | 0 | try
keytar = require 'keytar'
catch error
# Gracefully handle keytar failing to load due to missing library on Linux
if process.platform is 'linux'
keytar =
findPassword: ->
replacePassword: ->
else
throw error
tokenName = 'Soldat.tv API Token'
module.exports =
# Get the Soldat.tv API token from the keychain.
#
# callback - A function to call with an error as the first argument and a
# string token as the second argument.
getToken: (callback) ->
keytar.findPassword(tokenName).then (token) ->
if token
callback(null, token)
return
if token = process.env.SOLDAT_ACCESS_TOKEN
callback(null, token)
return
callback """
No Soldat.tv API token in keychain
Run `recrue login` or set the `SOLDAT_ACCESS_TOKEN` environment variable.
"""
# Save the given token to the keychain.
#
# token - A string token to save.
saveToken: (token) ->
keytar.setPassword(tokenName, 'soldat.tv', token)
| 65227 | try
keytar = require 'keytar'
catch error
# Gracefully handle keytar failing to load due to missing library on Linux
if process.platform is 'linux'
keytar =
findPassword: ->
replacePassword: ->
else
throw error
tokenName = 'S<PASSWORD> API Token'
module.exports =
# Get the Soldat.tv API token from the keychain.
#
# callback - A function to call with an error as the first argument and a
# string token as the second argument.
getToken: (callback) ->
keytar.findPassword(tokenName).then (token) ->
if token
callback(null, token)
return
if token = process.env.SOLDAT_ACCESS_TOKEN
callback(null, token)
return
callback """
No Soldat.tv API token in keychain
Run `recrue login` or set the `SOLDAT_ACCESS_TOKEN` environment variable.
"""
# Save the given token to the keychain.
#
# token - A string token to save.
saveToken: (token) ->
keytar.setPassword(tokenName, '<PASSWORD>', token)
| true | try
keytar = require 'keytar'
catch error
# Gracefully handle keytar failing to load due to missing library on Linux
if process.platform is 'linux'
keytar =
findPassword: ->
replacePassword: ->
else
throw error
tokenName = 'SPI:PASSWORD:<PASSWORD>END_PI API Token'
module.exports =
# Get the Soldat.tv API token from the keychain.
#
# callback - A function to call with an error as the first argument and a
# string token as the second argument.
getToken: (callback) ->
keytar.findPassword(tokenName).then (token) ->
if token
callback(null, token)
return
if token = process.env.SOLDAT_ACCESS_TOKEN
callback(null, token)
return
callback """
No Soldat.tv API token in keychain
Run `recrue login` or set the `SOLDAT_ACCESS_TOKEN` environment variable.
"""
# Save the given token to the keychain.
#
# token - A string token to save.
saveToken: (token) ->
keytar.setPassword(tokenName, 'PI:PASSWORD:<PASSWORD>END_PI', token)
|
[
{
"context": "umbvalue = ''\noptions = {\nurl: '',\nauth: {\n'user': jenkins_user,\n'pass': jenkins_api\n},\nmethod: 'post',\nheaders:{",
"end": 2126,
"score": 0.9813955426216125,
"start": 2114,
"tag": "USERNAME",
"value": "jenkins_user"
},
{
"context": "= {\nurl: '',\nauth: {\n'user': jenkins_user,\n'pass': jenkins_api\n},\nmethod: 'post',\nheaders:{}};\n\nif jenkins_versi",
"end": 2147,
"score": 0.9972897171974182,
"start": 2136,
"tag": "PASSWORD",
"value": "jenkins_api"
},
{
"context": "umb\"]=crumbvalue\n\t\t\t\telse\n\t\t\t\t\toptions.auth.pass = jenkins_pass\n\t\t\t\trequest.post options, (error, response, body)",
"end": 4679,
"score": 0.995215654373169,
"start": 4667,
"tag": "PASSWORD",
"value": "jenkins_pass"
},
{
"context": "er=process.env.HUBOT_JENKINS_USER\n\t\t\tjenkins_pass=process.env.HUBOT_JENKINS_PASSWORD\n\t\t\turl=jenkins_url+\"/job/\"+",
"end": 6120,
"score": 0.9822860956192017,
"start": 6109,
"tag": "PASSWORD",
"value": "process.env"
},
{
"context": "nv.HUBOT_JENKINS_USER\n\t\t\tjenkins_pass=process.env.HUBOT_JENKINS_PASSWORD\n\t\t\turl=jenkins_url+\"/job/\"+jobname+\"/buildWithPar",
"end": 6143,
"score": 0.9542917609214783,
"start": 6121,
"tag": "PASSWORD",
"value": "HUBOT_JENKINS_PASSWORD"
}
] | scripts/jenkins/scripts-msteams/buildwithparam.coffee | CognizantOneDevOps/OnBot | 4 | #-------------------------------------------------------------------------------
# copyright 2018 cognizant technology solutions
#
# licensed under the apache license, version 2.0 (the "license"); you may not
# use this file except in compliance with the license. you may obtain a copy
# of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis, without
# warranties or conditions of any kind, either express or implied. see the
# license for the specific language governing permissions and limitations under
# the license.
#-------------------------------------------------------------------------------
#description:
# builds a given jenkins job with given parameters
# and notifies the user once build is finished
#
#configuration:
# hubot_name
# hubot_jenkins_url
# hubot_jenkins_user
# hubot_jenkins_password
# hubot_jenkins_api_token
# hubot_jenkins_version
#
#commands:
# start <jobname> build with params <paramname1>=<paramvalue1> <paramname2>=<paramvalue2>... -> start a build for
# the given jenkins job with the given parameters and notify the user once build is finished
# example~
# start job01 build with params paramnew=123 paramold=456
#
#dependencies:
# "elasticsearch": "^0.9.2"
# "request": "2.81.0"
#
#note:
# this file is for building parameterized projects only. if the given job is not parameterized then
# hubot will respond with error. for non-parameterized jobs user jenkins build <jobaname>.
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=process.env.HUBOT_JENKINS_PASSWORD
jenkins_api=process.env.HUBOT_JENKINS_API_TOKEN
jenkins_version=process.env.HUBOT_JENKINS_VERSION
request = require('request')
readjson = require './readjson.js'
finaljson=" ";
index = require('./index')
statuscheck = require('./statuscheck.coffee')
generate_id = require('./mongoConnt')
crumb = require('./jenkinscrumb.js')
crumbvalue = ''
options = {
url: '',
auth: {
'user': jenkins_user,
'pass': jenkins_api
},
method: 'post',
headers:{}};
if jenkins_version >= 2.0
crumb.crumb (stderr, stdout) ->
if(stdout)
crumbvalue=stdout
post = (recipient, data) ->
optons = {method: "POST", url: recipient, json: data}
request.post optons, (error, response, body) ->
console.log body
module.exports = (robot) ->
robot.respond /start (.*) build with params (.+)/i, (msg) ->
readjson.readworkflow_coffee (error,stdout,stderr) ->
finaljson=stdout;
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
if stdout.start_build.workflowflag
generate_id.getNextSequence (err,id) ->
tckid=id
console.log(tckid);
payload={botname:process.env.HUBOT_NAME,username:msg.message.user.name,userid:process.env.CURRENT_CHANNEL,approver:stdout.start_build.admin,podIp:process.env.MY_POD_IP,jobname:jobname,paramstring:paramstring,"callback_id":"jenkinsbuildwithparam",msg:msg.toString()}
data = {"type": "MessageCard","context": "http://schema.org/extensions","summary": "Requested to requested a build","themeColor": "81CAF7","sections":[{"startGroup": true,"title": "**Approval Required!**","activityTitle": 'slack user '+payload.username+' requested a build '+payload.jobname+' with following params:\n'+payload.paramstring,"facts": []},{"potentialAction": [{"@type": "HttpPOST","name": "Approve","target": process.env.APPROVAL_APP_URL+"/Approved","body": "{\"tckid\": \""+tckid+"\" }", "bodyContentType":"application/x-www-form-urlencoded"},{"@type": "HttpPOST","name": "Deny","target": process.env.APPROVAL_APP_URL+"/Rejected","body": "{\"tckid\": \""+tckid+"\" }","bodyContentType":"application/x-www-form-urlencoded"}]}]}
#Post attachment to ms teams
post stdout.start_build.adminid, data
msg.send 'Your request is waiting for approval by '+stdout.start_build.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
else
#handles regular flow of the command without approval flow
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = jenkins_pass
request.post options, (error, response, body) ->
console.log(body)
console.log response.statusCode
if(response.statusCode!=201)
dt="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
res.send(dt)
setTimeout (->index.passdata dt),1000
else
dt="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
msg.send(dt)
setTimeout (->index.passdata dt),1000
message = msg.match[0]
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus res.message.user.room,jobname,crumbvalue
else
statuscheck.checkbuildstatus res.message.user.room,jobname,''
#the following code handles the approval flow of the command
robot.router.post '/jenkinsbuildwithparam', (req, response) ->
recipientid=req.body.userid
dt = {"text":"","title":""}
if(req.body.action=='Approved')
dt.title=req.body.approver+" approved jenkins build for job "+req.body.jobname+", requested by "+req.body.username+"\n";
jobname=req.body.jobname
paramstring=[]
paramstring=req.body.paramstring
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=process.env.HUBOT_JENKINS_PASSWORD
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = jenkins_pass
request.post options, (error, response, body) ->
console.log(options)
console.log response.statusCode
if(response.statusCode!=201)
dt.text="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
post recipientid, dt
setTimeout (->index.passdata dt),1000
else
dt.text="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
post recipientid, dt
setTimeout (->index.passdata dt),1000
message = "start "+jobname+"build with params "+paramstring
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus recipientid,jobname,crumbvalue
else
statuscheck.checkbuildstatus recipientid,jobname,''
else
dt.title = "the build request from "+req.body.username+" was rejected by "+req.body.approver
post recipientid, dt
setTimeout (->index.passdata dt),1000
| 166226 | #-------------------------------------------------------------------------------
# copyright 2018 cognizant technology solutions
#
# licensed under the apache license, version 2.0 (the "license"); you may not
# use this file except in compliance with the license. you may obtain a copy
# of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis, without
# warranties or conditions of any kind, either express or implied. see the
# license for the specific language governing permissions and limitations under
# the license.
#-------------------------------------------------------------------------------
#description:
# builds a given jenkins job with given parameters
# and notifies the user once build is finished
#
#configuration:
# hubot_name
# hubot_jenkins_url
# hubot_jenkins_user
# hubot_jenkins_password
# hubot_jenkins_api_token
# hubot_jenkins_version
#
#commands:
# start <jobname> build with params <paramname1>=<paramvalue1> <paramname2>=<paramvalue2>... -> start a build for
# the given jenkins job with the given parameters and notify the user once build is finished
# example~
# start job01 build with params paramnew=123 paramold=456
#
#dependencies:
# "elasticsearch": "^0.9.2"
# "request": "2.81.0"
#
#note:
# this file is for building parameterized projects only. if the given job is not parameterized then
# hubot will respond with error. for non-parameterized jobs user jenkins build <jobaname>.
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=process.env.HUBOT_JENKINS_PASSWORD
jenkins_api=process.env.HUBOT_JENKINS_API_TOKEN
jenkins_version=process.env.HUBOT_JENKINS_VERSION
request = require('request')
readjson = require './readjson.js'
finaljson=" ";
index = require('./index')
statuscheck = require('./statuscheck.coffee')
generate_id = require('./mongoConnt')
crumb = require('./jenkinscrumb.js')
crumbvalue = ''
options = {
url: '',
auth: {
'user': jenkins_user,
'pass': <PASSWORD>
},
method: 'post',
headers:{}};
if jenkins_version >= 2.0
crumb.crumb (stderr, stdout) ->
if(stdout)
crumbvalue=stdout
post = (recipient, data) ->
optons = {method: "POST", url: recipient, json: data}
request.post optons, (error, response, body) ->
console.log body
module.exports = (robot) ->
robot.respond /start (.*) build with params (.+)/i, (msg) ->
readjson.readworkflow_coffee (error,stdout,stderr) ->
finaljson=stdout;
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
if stdout.start_build.workflowflag
generate_id.getNextSequence (err,id) ->
tckid=id
console.log(tckid);
payload={botname:process.env.HUBOT_NAME,username:msg.message.user.name,userid:process.env.CURRENT_CHANNEL,approver:stdout.start_build.admin,podIp:process.env.MY_POD_IP,jobname:jobname,paramstring:paramstring,"callback_id":"jenkinsbuildwithparam",msg:msg.toString()}
data = {"type": "MessageCard","context": "http://schema.org/extensions","summary": "Requested to requested a build","themeColor": "81CAF7","sections":[{"startGroup": true,"title": "**Approval Required!**","activityTitle": 'slack user '+payload.username+' requested a build '+payload.jobname+' with following params:\n'+payload.paramstring,"facts": []},{"potentialAction": [{"@type": "HttpPOST","name": "Approve","target": process.env.APPROVAL_APP_URL+"/Approved","body": "{\"tckid\": \""+tckid+"\" }", "bodyContentType":"application/x-www-form-urlencoded"},{"@type": "HttpPOST","name": "Deny","target": process.env.APPROVAL_APP_URL+"/Rejected","body": "{\"tckid\": \""+tckid+"\" }","bodyContentType":"application/x-www-form-urlencoded"}]}]}
#Post attachment to ms teams
post stdout.start_build.adminid, data
msg.send 'Your request is waiting for approval by '+stdout.start_build.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
else
#handles regular flow of the command without approval flow
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = <PASSWORD>
request.post options, (error, response, body) ->
console.log(body)
console.log response.statusCode
if(response.statusCode!=201)
dt="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
res.send(dt)
setTimeout (->index.passdata dt),1000
else
dt="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
msg.send(dt)
setTimeout (->index.passdata dt),1000
message = msg.match[0]
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus res.message.user.room,jobname,crumbvalue
else
statuscheck.checkbuildstatus res.message.user.room,jobname,''
#the following code handles the approval flow of the command
robot.router.post '/jenkinsbuildwithparam', (req, response) ->
recipientid=req.body.userid
dt = {"text":"","title":""}
if(req.body.action=='Approved')
dt.title=req.body.approver+" approved jenkins build for job "+req.body.jobname+", requested by "+req.body.username+"\n";
jobname=req.body.jobname
paramstring=[]
paramstring=req.body.paramstring
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=<PASSWORD>.<PASSWORD>
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = jenkins_pass
request.post options, (error, response, body) ->
console.log(options)
console.log response.statusCode
if(response.statusCode!=201)
dt.text="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
post recipientid, dt
setTimeout (->index.passdata dt),1000
else
dt.text="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
post recipientid, dt
setTimeout (->index.passdata dt),1000
message = "start "+jobname+"build with params "+paramstring
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus recipientid,jobname,crumbvalue
else
statuscheck.checkbuildstatus recipientid,jobname,''
else
dt.title = "the build request from "+req.body.username+" was rejected by "+req.body.approver
post recipientid, dt
setTimeout (->index.passdata dt),1000
| true | #-------------------------------------------------------------------------------
# copyright 2018 cognizant technology solutions
#
# licensed under the apache license, version 2.0 (the "license"); you may not
# use this file except in compliance with the license. you may obtain a copy
# of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis, without
# warranties or conditions of any kind, either express or implied. see the
# license for the specific language governing permissions and limitations under
# the license.
#-------------------------------------------------------------------------------
#description:
# builds a given jenkins job with given parameters
# and notifies the user once build is finished
#
#configuration:
# hubot_name
# hubot_jenkins_url
# hubot_jenkins_user
# hubot_jenkins_password
# hubot_jenkins_api_token
# hubot_jenkins_version
#
#commands:
# start <jobname> build with params <paramname1>=<paramvalue1> <paramname2>=<paramvalue2>... -> start a build for
# the given jenkins job with the given parameters and notify the user once build is finished
# example~
# start job01 build with params paramnew=123 paramold=456
#
#dependencies:
# "elasticsearch": "^0.9.2"
# "request": "2.81.0"
#
#note:
# this file is for building parameterized projects only. if the given job is not parameterized then
# hubot will respond with error. for non-parameterized jobs user jenkins build <jobaname>.
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=process.env.HUBOT_JENKINS_PASSWORD
jenkins_api=process.env.HUBOT_JENKINS_API_TOKEN
jenkins_version=process.env.HUBOT_JENKINS_VERSION
request = require('request')
readjson = require './readjson.js'
finaljson=" ";
index = require('./index')
statuscheck = require('./statuscheck.coffee')
generate_id = require('./mongoConnt')
crumb = require('./jenkinscrumb.js')
crumbvalue = ''
options = {
url: '',
auth: {
'user': jenkins_user,
'pass': PI:PASSWORD:<PASSWORD>END_PI
},
method: 'post',
headers:{}};
if jenkins_version >= 2.0
crumb.crumb (stderr, stdout) ->
if(stdout)
crumbvalue=stdout
post = (recipient, data) ->
optons = {method: "POST", url: recipient, json: data}
request.post optons, (error, response, body) ->
console.log body
module.exports = (robot) ->
robot.respond /start (.*) build with params (.+)/i, (msg) ->
readjson.readworkflow_coffee (error,stdout,stderr) ->
finaljson=stdout;
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
if stdout.start_build.workflowflag
generate_id.getNextSequence (err,id) ->
tckid=id
console.log(tckid);
payload={botname:process.env.HUBOT_NAME,username:msg.message.user.name,userid:process.env.CURRENT_CHANNEL,approver:stdout.start_build.admin,podIp:process.env.MY_POD_IP,jobname:jobname,paramstring:paramstring,"callback_id":"jenkinsbuildwithparam",msg:msg.toString()}
data = {"type": "MessageCard","context": "http://schema.org/extensions","summary": "Requested to requested a build","themeColor": "81CAF7","sections":[{"startGroup": true,"title": "**Approval Required!**","activityTitle": 'slack user '+payload.username+' requested a build '+payload.jobname+' with following params:\n'+payload.paramstring,"facts": []},{"potentialAction": [{"@type": "HttpPOST","name": "Approve","target": process.env.APPROVAL_APP_URL+"/Approved","body": "{\"tckid\": \""+tckid+"\" }", "bodyContentType":"application/x-www-form-urlencoded"},{"@type": "HttpPOST","name": "Deny","target": process.env.APPROVAL_APP_URL+"/Rejected","body": "{\"tckid\": \""+tckid+"\" }","bodyContentType":"application/x-www-form-urlencoded"}]}]}
#Post attachment to ms teams
post stdout.start_build.adminid, data
msg.send 'Your request is waiting for approval by '+stdout.start_build.admin
dataToInsert = {ticketid: tckid, payload: payload, "status":"","approvedby":""}
#Insert into Mongo with Payload
generate_id.add_in_mongo dataToInsert
else
#handles regular flow of the command without approval flow
jobname=msg.match[1]
paramstring=[]
paramstring=msg.match[2].split(' ')
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = PI:PASSWORD:<PASSWORD>END_PI
request.post options, (error, response, body) ->
console.log(body)
console.log response.statusCode
if(response.statusCode!=201)
dt="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
res.send(dt)
setTimeout (->index.passdata dt),1000
else
dt="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
msg.send(dt)
setTimeout (->index.passdata dt),1000
message = msg.match[0]
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus res.message.user.room,jobname,crumbvalue
else
statuscheck.checkbuildstatus res.message.user.room,jobname,''
#the following code handles the approval flow of the command
robot.router.post '/jenkinsbuildwithparam', (req, response) ->
recipientid=req.body.userid
dt = {"text":"","title":""}
if(req.body.action=='Approved')
dt.title=req.body.approver+" approved jenkins build for job "+req.body.jobname+", requested by "+req.body.username+"\n";
jobname=req.body.jobname
paramstring=[]
paramstring=req.body.paramstring
jenkins_url=process.env.HUBOT_JENKINS_URL
jenkins_user=process.env.HUBOT_JENKINS_USER
jenkins_pass=PI:PASSWORD:<PASSWORD>END_PI.PI:PASSWORD:<PASSWORD>END_PI
url=jenkins_url+"/job/"+jobname+"/buildWithParameters?"
i=0
for i in [0...paramstring.length]
if(i==paramstring.length-1)
url=url+paramstring[i]
else
url=url+paramstring[i]+'&'
options.url = url
if jenkins_version >= 2.0
console.log(options)
options.headers["jenkins-crumb"]=crumbvalue
else
options.auth.pass = jenkins_pass
request.post options, (error, response, body) ->
console.log(options)
console.log response.statusCode
if(response.statusCode!=201)
dt.text="could not initiate build. make sure the jobname and the param key(s) you have given are correct. for new params, try adding it to your jenkins project first."
post recipientid, dt
setTimeout (->index.passdata dt),1000
else
dt.text="build initiated with given parameters\n"+jenkins_url+"/job/"+jobname
post recipientid, dt
setTimeout (->index.passdata dt),1000
message = "start "+jobname+"build with params "+paramstring
actionmsg = "jenkins build started"
statusmsg = "success"
index.walldata process.env.hubot_name, message, actionmsg, statusmsg;
if jenkins_version >= 2.0
statuscheck.checkbuildstatus recipientid,jobname,crumbvalue
else
statuscheck.checkbuildstatus recipientid,jobname,''
else
dt.title = "the build request from "+req.body.username+" was rejected by "+req.body.approver
post recipientid, dt
setTimeout (->index.passdata dt),1000
|
[
{
"context": "nal notes required for the script>\n#\n# Author:\n# Richard Hunt <>\n\nmodule.exports = (robot) ->\n robot.router.po",
"end": 324,
"score": 0.9998793601989746,
"start": 312,
"tag": "NAME",
"value": "Richard Hunt"
}
] | src/gretchen.coffee | arogozin/hubot-frank | 0 | # Description
# Creates a HTTP endpoint to tell hubot secrets. Like Gretchen Weiners.
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# hubot hello - <what the respond trigger does>
# orly - <what the hear trigger does>
#
# Notes:
# <optional notes required for the script>
#
# Author:
# Richard Hunt <>
module.exports = (robot) ->
robot.router.post '/hubot/tellfrank/:room', (req, res) ->
console.log req.params.room, "#{req.body && req.body.secret}"
robot.messageRoom req.params.room, "#{req.body && req.body.secret}"
res.send('OK')
# try
# res.writeHead 200, 'OK'
# catch error
# res.writeHead 500, 'Internal Server Error'
# res.write error.stack
| 80970 | # Description
# Creates a HTTP endpoint to tell hubot secrets. Like Gretchen Weiners.
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# hubot hello - <what the respond trigger does>
# orly - <what the hear trigger does>
#
# Notes:
# <optional notes required for the script>
#
# Author:
# <NAME> <>
module.exports = (robot) ->
robot.router.post '/hubot/tellfrank/:room', (req, res) ->
console.log req.params.room, "#{req.body && req.body.secret}"
robot.messageRoom req.params.room, "#{req.body && req.body.secret}"
res.send('OK')
# try
# res.writeHead 200, 'OK'
# catch error
# res.writeHead 500, 'Internal Server Error'
# res.write error.stack
| true | # Description
# Creates a HTTP endpoint to tell hubot secrets. Like Gretchen Weiners.
#
# Configuration:
# LIST_OF_ENV_VARS_TO_SET
#
# Commands:
# hubot hello - <what the respond trigger does>
# orly - <what the hear trigger does>
#
# Notes:
# <optional notes required for the script>
#
# Author:
# PI:NAME:<NAME>END_PI <>
module.exports = (robot) ->
robot.router.post '/hubot/tellfrank/:room', (req, res) ->
console.log req.params.room, "#{req.body && req.body.secret}"
robot.messageRoom req.params.room, "#{req.body && req.body.secret}"
res.send('OK')
# try
# res.writeHead 200, 'OK'
# catch error
# res.writeHead 500, 'Internal Server Error'
# res.write error.stack
|
[
{
"context": "le {\n title\n href\n author {\n name\n }\n image: thumbnail_image {\n th",
"end": 133,
"score": 0.9715570211410522,
"start": 129,
"tag": "NAME",
"value": "name"
}
] | src/mobile/apps/artwork/components/highlights/query.coffee | kanaabe/force | 1 | module.exports = """
highlights {
__typename
... on HighlightedArticle {
title
href
author {
name
}
image: thumbnail_image {
thumb: cropped(width: 200, height: 200) {
url
width
height
}
}
}
... on HighlightedShow {
name
href
partner {
href
name
}
location {
city
}
status
start_at(format: "MMM D")
end_at(format: "MMM D")
images(size: 2) {
thumb: cropped(width: 200, height: 200) {
width
height
url
}
}
}
}
"""
| 161598 | module.exports = """
highlights {
__typename
... on HighlightedArticle {
title
href
author {
<NAME>
}
image: thumbnail_image {
thumb: cropped(width: 200, height: 200) {
url
width
height
}
}
}
... on HighlightedShow {
name
href
partner {
href
name
}
location {
city
}
status
start_at(format: "MMM D")
end_at(format: "MMM D")
images(size: 2) {
thumb: cropped(width: 200, height: 200) {
width
height
url
}
}
}
}
"""
| true | module.exports = """
highlights {
__typename
... on HighlightedArticle {
title
href
author {
PI:NAME:<NAME>END_PI
}
image: thumbnail_image {
thumb: cropped(width: 200, height: 200) {
url
width
height
}
}
}
... on HighlightedShow {
name
href
partner {
href
name
}
location {
city
}
status
start_at(format: "MMM D")
end_at(format: "MMM D")
images(size: 2) {
thumb: cropped(width: 200, height: 200) {
width
height
url
}
}
}
}
"""
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998871684074402,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyright and license informa",
"end": 96,
"score": 0.9999347925186157,
"start": 76,
"tag": "EMAIL",
"value": "jessym@konsserto.com"
},
{
"context": "/ListCommand')\n\n#\n# ConsoleApplication\n#\n# @author Jessym Reziga <jessym@konsserto.com>\n#\nclass ConsoleApplication",
"end": 868,
"score": 0.9998932480812073,
"start": 855,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "#\n# ConsoleApplication\n#\n# @author Jessym Reziga <jessym@konsserto.com>\n#\nclass ConsoleApplication\n\n\n\tconstructor:(@name",
"end": 890,
"score": 0.9999348521232605,
"start": 870,
"tag": "EMAIL",
"value": "jessym@konsserto.com"
}
] | node_modules/konsserto/lib/src/Konsserto/Component/Console/ConsoleApplication.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
cc = use('cli-color')
wait = use('wait.for')
ArgvInput = use('@Konsserto/Component/Console/Input/ArgvInput')
ArrayInput = use('@Konsserto/Component/Console/Input/ArrayInput')
Command = use('@Konsserto/Component/Console/Command')
HelpCommand = use('@Konsserto/Component/Console/Command/HelpCommand')
InputArgument = use('@Konsserto/Component/Console/Input/InputArgument')
InputDefinition = use('@Konsserto/Component/Console/Input/InputDefinition')
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
ListCommand = use('@Konsserto/Component/Console/Command/ListCommand')
#
# ConsoleApplication
#
# @author Jessym Reziga <jessym@konsserto.com>
#
class ConsoleApplication
constructor:(@name,@version) ->
@verbose = 0
@commands = {}
@running = ''
@needHelp = false
@definition = @getDefaultInputDefinition()
for command in @getDefaultCommands()
@add(command)
getDefinition:() ->
return @definition
getDefaultInputDefinition:() ->
return new InputDefinition([
new InputArgument('command', InputArgument.REQUIRED, 'The command to execute'),
new InputOption('--help', '-h', InputOption.VALUE_NONE, 'Display this help message.'),
new InputOption('--verbose', '-v', InputOption.VALUE_NONE, 'Increase the verbosity of messages'),
new InputOption('--version', '-V', InputOption.VALUE_NONE, 'Display this application version.'),
new InputOption('--no-interaction', '-n', InputOption.VALUE_NONE, 'Do not ask any interactive question.'),
])
getDefaultCommands:() ->
return [new ListCommand(),new HelpCommand()];
getCommands:() ->
return @commands
add:(command) ->
command.setApplication(this)
if !command.isEnabled()
command.setApplication(null)
return
if command.getDefinition() == undefined
throw new Error('Command class '+command.constructor.name+' is not correctly initialized. You probably forgot to call the parent constructor.')
@commands[command.getName()] = command
return command
addCommands:(commands) ->
for command in commands
@add(command)
has:(name) ->
return @commands[name]
setVerbose:(level) ->
@verbose = level
getCommandName:(input) ->
return input.getFirstArgument()
getName:() ->
return @name
getVersion:() ->
return @version
setName:(@name) ->
return this
setVersion:(@version) ->
return this
getLongVersion:() ->
versionMessage = 'Konsserto'
if @getName() != undefined
versionMessage = @getName()
if @getVersion() != undefined
versionMessage +=' version '+@getVersion()
return versionMessage
run:(input) ->
if input == undefined
input = new ArgvInput()
@configureIO(input)
ecode = -1
try
ecode = @doRun(input)
catch e
console.info('\n '+cc.white.bgRed(e))
console.info('\n '+@running.getSynopsis())
ecode = 255
if @verbose > 0 then throw e else process.exit(ecode)
return ecode
configureIO:(input) ->
if input.hasParameterOption(['--no-interaction','-n'])
input.setInteractive(false)
if input.hasParameterOption(['--verbose','-v'])
@setVerbose(1)
doRun:(input) ->
if input.hasParameterOption(['--version','-V'])
console.info(@getLongVersion())
return 0
name = @getCommandName(input)
if input.hasParameterOption(['--help','-h'])
if name == undefined
name = 'help'
input = new ArrayInput({command:'help'})
else
@needHelp = true
if name == undefined
name = 'list'
input = new ArrayInput({command:name})
command = @find(name)
@running = command
exitCode = @doRunCommand(command,input)
@running = null
return exitCode
doRunCommand:(command,input) ->
return command.run(input)
find:(name) ->
@get(name)
get:(name) ->
if @commands[name] == undefined
throw new Error('The command \''+name+'\' does not exist.')
command = @commands[name]
if @needHelp
@needHelp = false
helpCommand = @get('help')
helpCommand.setCommand(command)
return helpCommand
return command
module.exports = ConsoleApplication
| 181546 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
cc = use('cli-color')
wait = use('wait.for')
ArgvInput = use('@Konsserto/Component/Console/Input/ArgvInput')
ArrayInput = use('@Konsserto/Component/Console/Input/ArrayInput')
Command = use('@Konsserto/Component/Console/Command')
HelpCommand = use('@Konsserto/Component/Console/Command/HelpCommand')
InputArgument = use('@Konsserto/Component/Console/Input/InputArgument')
InputDefinition = use('@Konsserto/Component/Console/Input/InputDefinition')
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
ListCommand = use('@Konsserto/Component/Console/Command/ListCommand')
#
# ConsoleApplication
#
# @author <NAME> <<EMAIL>>
#
class ConsoleApplication
constructor:(@name,@version) ->
@verbose = 0
@commands = {}
@running = ''
@needHelp = false
@definition = @getDefaultInputDefinition()
for command in @getDefaultCommands()
@add(command)
getDefinition:() ->
return @definition
getDefaultInputDefinition:() ->
return new InputDefinition([
new InputArgument('command', InputArgument.REQUIRED, 'The command to execute'),
new InputOption('--help', '-h', InputOption.VALUE_NONE, 'Display this help message.'),
new InputOption('--verbose', '-v', InputOption.VALUE_NONE, 'Increase the verbosity of messages'),
new InputOption('--version', '-V', InputOption.VALUE_NONE, 'Display this application version.'),
new InputOption('--no-interaction', '-n', InputOption.VALUE_NONE, 'Do not ask any interactive question.'),
])
getDefaultCommands:() ->
return [new ListCommand(),new HelpCommand()];
getCommands:() ->
return @commands
add:(command) ->
command.setApplication(this)
if !command.isEnabled()
command.setApplication(null)
return
if command.getDefinition() == undefined
throw new Error('Command class '+command.constructor.name+' is not correctly initialized. You probably forgot to call the parent constructor.')
@commands[command.getName()] = command
return command
addCommands:(commands) ->
for command in commands
@add(command)
has:(name) ->
return @commands[name]
setVerbose:(level) ->
@verbose = level
getCommandName:(input) ->
return input.getFirstArgument()
getName:() ->
return @name
getVersion:() ->
return @version
setName:(@name) ->
return this
setVersion:(@version) ->
return this
getLongVersion:() ->
versionMessage = 'Konsserto'
if @getName() != undefined
versionMessage = @getName()
if @getVersion() != undefined
versionMessage +=' version '+@getVersion()
return versionMessage
run:(input) ->
if input == undefined
input = new ArgvInput()
@configureIO(input)
ecode = -1
try
ecode = @doRun(input)
catch e
console.info('\n '+cc.white.bgRed(e))
console.info('\n '+@running.getSynopsis())
ecode = 255
if @verbose > 0 then throw e else process.exit(ecode)
return ecode
configureIO:(input) ->
if input.hasParameterOption(['--no-interaction','-n'])
input.setInteractive(false)
if input.hasParameterOption(['--verbose','-v'])
@setVerbose(1)
doRun:(input) ->
if input.hasParameterOption(['--version','-V'])
console.info(@getLongVersion())
return 0
name = @getCommandName(input)
if input.hasParameterOption(['--help','-h'])
if name == undefined
name = 'help'
input = new ArrayInput({command:'help'})
else
@needHelp = true
if name == undefined
name = 'list'
input = new ArrayInput({command:name})
command = @find(name)
@running = command
exitCode = @doRunCommand(command,input)
@running = null
return exitCode
doRunCommand:(command,input) ->
return command.run(input)
find:(name) ->
@get(name)
get:(name) ->
if @commands[name] == undefined
throw new Error('The command \''+name+'\' does not exist.')
command = @commands[name]
if @needHelp
@needHelp = false
helpCommand = @get('help')
helpCommand.setCommand(command)
return helpCommand
return command
module.exports = ConsoleApplication
| true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
cc = use('cli-color')
wait = use('wait.for')
ArgvInput = use('@Konsserto/Component/Console/Input/ArgvInput')
ArrayInput = use('@Konsserto/Component/Console/Input/ArrayInput')
Command = use('@Konsserto/Component/Console/Command')
HelpCommand = use('@Konsserto/Component/Console/Command/HelpCommand')
InputArgument = use('@Konsserto/Component/Console/Input/InputArgument')
InputDefinition = use('@Konsserto/Component/Console/Input/InputDefinition')
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
ListCommand = use('@Konsserto/Component/Console/Command/ListCommand')
#
# ConsoleApplication
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
class ConsoleApplication
constructor:(@name,@version) ->
@verbose = 0
@commands = {}
@running = ''
@needHelp = false
@definition = @getDefaultInputDefinition()
for command in @getDefaultCommands()
@add(command)
getDefinition:() ->
return @definition
getDefaultInputDefinition:() ->
return new InputDefinition([
new InputArgument('command', InputArgument.REQUIRED, 'The command to execute'),
new InputOption('--help', '-h', InputOption.VALUE_NONE, 'Display this help message.'),
new InputOption('--verbose', '-v', InputOption.VALUE_NONE, 'Increase the verbosity of messages'),
new InputOption('--version', '-V', InputOption.VALUE_NONE, 'Display this application version.'),
new InputOption('--no-interaction', '-n', InputOption.VALUE_NONE, 'Do not ask any interactive question.'),
])
getDefaultCommands:() ->
return [new ListCommand(),new HelpCommand()];
getCommands:() ->
return @commands
add:(command) ->
command.setApplication(this)
if !command.isEnabled()
command.setApplication(null)
return
if command.getDefinition() == undefined
throw new Error('Command class '+command.constructor.name+' is not correctly initialized. You probably forgot to call the parent constructor.')
@commands[command.getName()] = command
return command
addCommands:(commands) ->
for command in commands
@add(command)
has:(name) ->
return @commands[name]
setVerbose:(level) ->
@verbose = level
getCommandName:(input) ->
return input.getFirstArgument()
getName:() ->
return @name
getVersion:() ->
return @version
setName:(@name) ->
return this
setVersion:(@version) ->
return this
getLongVersion:() ->
versionMessage = 'Konsserto'
if @getName() != undefined
versionMessage = @getName()
if @getVersion() != undefined
versionMessage +=' version '+@getVersion()
return versionMessage
run:(input) ->
if input == undefined
input = new ArgvInput()
@configureIO(input)
ecode = -1
try
ecode = @doRun(input)
catch e
console.info('\n '+cc.white.bgRed(e))
console.info('\n '+@running.getSynopsis())
ecode = 255
if @verbose > 0 then throw e else process.exit(ecode)
return ecode
configureIO:(input) ->
if input.hasParameterOption(['--no-interaction','-n'])
input.setInteractive(false)
if input.hasParameterOption(['--verbose','-v'])
@setVerbose(1)
doRun:(input) ->
if input.hasParameterOption(['--version','-V'])
console.info(@getLongVersion())
return 0
name = @getCommandName(input)
if input.hasParameterOption(['--help','-h'])
if name == undefined
name = 'help'
input = new ArrayInput({command:'help'})
else
@needHelp = true
if name == undefined
name = 'list'
input = new ArrayInput({command:name})
command = @find(name)
@running = command
exitCode = @doRunCommand(command,input)
@running = null
return exitCode
doRunCommand:(command,input) ->
return command.run(input)
find:(name) ->
@get(name)
get:(name) ->
if @commands[name] == undefined
throw new Error('The command \''+name+'\' does not exist.')
command = @commands[name]
if @needHelp
@needHelp = false
helpCommand = @get('help')
helpCommand.setCommand(command)
return helpCommand
return command
module.exports = ConsoleApplication
|
[
{
"context": "Minimum: 10\t\t# Minimum beacon value\n\t\tonHTTPKey: '0acc7d0fd7ac9ef4133950d3949b81a7' # Hash of http secret key\n\t\tinRangeCheckinTime: ",
"end": 465,
"score": 0.9997662901878357,
"start": 433,
"tag": "KEY",
"value": "0acc7d0fd7ac9ef4133950d3949b81a7"
}
] | config.common.coffee | VincentSmit/Ontwerpproject | 0 | # General config values (used by client and server)
exports.getConfig = ->
return {
beaconPointsTime: 3600000 # Milliseconds between scoring points for a beacon
beaconHoldScore: 1 # Number of points scored per <pointsTime> by holding a beacon
beaconValueInitial: 50 # Initial capture value of a beacon
beaconValueDecrease: 5 # Value decrease after capture
beaconValueMinimum: 10 # Minimum beacon value
onHTTPKey: '0acc7d0fd7ac9ef4133950d3949b81a7' # Hash of http secret key
inRangeCheckinTime: 30 # Time between client checkins while inrange of a beacon (seconds)
inRangeKickTime: 60 # Time after no checkin that the server will remove the client from inrange (seconds)
afkCheckinLocation: 30 # Times that location can be checked in while AFK
} | 35446 | # General config values (used by client and server)
exports.getConfig = ->
return {
beaconPointsTime: 3600000 # Milliseconds between scoring points for a beacon
beaconHoldScore: 1 # Number of points scored per <pointsTime> by holding a beacon
beaconValueInitial: 50 # Initial capture value of a beacon
beaconValueDecrease: 5 # Value decrease after capture
beaconValueMinimum: 10 # Minimum beacon value
onHTTPKey: '<KEY>' # Hash of http secret key
inRangeCheckinTime: 30 # Time between client checkins while inrange of a beacon (seconds)
inRangeKickTime: 60 # Time after no checkin that the server will remove the client from inrange (seconds)
afkCheckinLocation: 30 # Times that location can be checked in while AFK
} | true | # General config values (used by client and server)
exports.getConfig = ->
return {
beaconPointsTime: 3600000 # Milliseconds between scoring points for a beacon
beaconHoldScore: 1 # Number of points scored per <pointsTime> by holding a beacon
beaconValueInitial: 50 # Initial capture value of a beacon
beaconValueDecrease: 5 # Value decrease after capture
beaconValueMinimum: 10 # Minimum beacon value
onHTTPKey: 'PI:KEY:<KEY>END_PI' # Hash of http secret key
inRangeCheckinTime: 30 # Time between client checkins while inrange of a beacon (seconds)
inRangeKickTime: 60 # Time after no checkin that the server will remove the client from inrange (seconds)
afkCheckinLocation: 30 # Times that location can be checked in while AFK
} |
[
{
"context": "\tplease check the README here: https://github.com/resin-io/resin-cli .\n\n\t\tUse this command to preload an app",
"end": 3478,
"score": 0.9834510087966919,
"start": 3470,
"tag": "USERNAME",
"value": "resin-io"
},
{
"context": "\n\t\t $ resin preload resin.img --app 1234 --commit e1f2592fc6ee949e68756d4f4a48e49bff8d72a0 --splash-image some-image.png\n\t\t $ resin preload",
"end": 3724,
"score": 0.776468813419342,
"start": 3684,
"tag": "PASSWORD",
"value": "e1f2592fc6ee949e68756d4f4a48e49bff8d72a0"
}
] | lib/actions/preload.coffee | vtmf/resion | 0 | ###
Copyright 2016-2017 Resin.io
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
dockerUtils = require('../utils/docker')
LATEST = 'latest'
getApplicationsWithSuccessfulBuilds = (deviceType) ->
preload = require('resin-preload')
resin = require('resin-sdk').fromSharedOptions()
resin.pine.get
resource: 'my_application'
options:
$filter:
device_type: deviceType
owns__release:
$any:
$alias: 'r'
$expr:
r:
status: 'success'
$expand: preload.applicationExpandOptions
$select: [ 'id', 'app_name', 'device_type', 'commit', 'should_track_latest_release' ]
$orderby: 'app_name asc'
selectApplication = (deviceType) ->
visuals = require('resin-cli-visuals')
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
applicationInfoSpinner = new visuals.Spinner('Downloading list of applications and releases.')
applicationInfoSpinner.start()
getApplicationsWithSuccessfulBuilds(deviceType)
.then (applications) ->
applicationInfoSpinner.stop()
if applications.length == 0
exitWithExpectedError("You have no apps with successful releases for a '#{deviceType}' device type.")
form.ask
message: 'Select an application'
type: 'list'
choices: applications.map (app) ->
name: app.app_name
value: app
selectApplicationCommit = (releases) ->
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
if releases.length == 0
exitWithExpectedError('This application has no successful releases.')
DEFAULT_CHOICE = { 'name': LATEST, 'value': LATEST }
choices = [ DEFAULT_CHOICE ].concat releases.map (release) ->
name: "#{release.end_timestamp} - #{release.commit}"
value: release.commit
return form.ask
message: 'Select a release'
type: 'list'
default: LATEST
choices: choices
offerToDisableAutomaticUpdates = (application, commit) ->
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
form = require('resin-cli-form')
if commit == LATEST or not application.should_track_latest_release
return Promise.resolve()
message = '''
This application is set to automatically update all devices to the latest available version.
This might be unexpected behaviour: with this enabled, the preloaded device will still
download and install the latest release once it is online.
Do you want to disable automatic updates for this application?
'''
form.ask
message: message,
type: 'confirm'
.then (update) ->
if not update
return
resin.pine.patch
resource: 'application'
id: application.id
body:
should_track_latest_release: false
module.exports =
signature: 'preload <image>'
description: '(beta) preload an app on a disk image (or Edison zip archive)'
help: '''
Warning: "resin preload" requires Docker to be correctly installed in
your shell environment. For more information (including Windows support)
please check the README here: https://github.com/resin-io/resin-cli .
Use this command to preload an application to a local disk image (or
Edison zip archive) with a built release from Resin.io.
Examples:
$ resin preload resin.img --app 1234 --commit e1f2592fc6ee949e68756d4f4a48e49bff8d72a0 --splash-image some-image.png
$ resin preload resin.img
'''
permission: 'user'
primary: true
options: dockerUtils.appendConnectionOptions [
{
signature: 'app'
parameter: 'appId'
description: 'id of the application to preload'
alias: 'a'
}
{
signature: 'commit'
parameter: 'hash'
description: '''
the commit hash for a specific application release to preload, use "latest" to specify the latest release
(ignored if no appId is given)
'''
alias: 'c'
}
{
signature: 'splash-image'
parameter: 'splashImage.png'
description: 'path to a png image to replace the splash screen'
alias: 's'
}
{
signature: 'dont-check-device-type'
boolean: true
description: 'Disables check for matching device types in image and application'
}
{
signature: 'pin-device-to-release'
boolean: true
description: 'Pin the preloaded device to the preloaded release on provision'
alias: 'p'
}
]
action: (params, options, done) ->
_ = require('lodash')
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
preload = require('resin-preload')
visuals = require('resin-cli-visuals')
nodeCleanup = require('node-cleanup')
{ exitWithExpectedError } = require('../utils/patterns')
progressBars = {}
progressHandler = (event) ->
progressBar = progressBars[event.name]
if not progressBar
progressBar = progressBars[event.name] = new visuals.Progress(event.name)
progressBar.update(percentage: event.percentage)
spinners = {}
spinnerHandler = (event) ->
spinner = spinners[event.name]
if not spinner
spinner = spinners[event.name] = new visuals.Spinner(event.name)
if event.action == 'start'
spinner.start()
else
console.log()
spinner.stop()
options.image = params.image
options.appId = options.app
delete options.app
options.splashImage = options['splash-image']
delete options['splash-image']
options.dontCheckDeviceType = options['dont-check-device-type']
delete options['dont-check-device-type']
if options.dontCheckDeviceType and not options.appId
exitWithExpectedError('You need to specify an app id if you disable the device type check.')
options.pinDevice = options['pin-device-to-release']
delete options['pin-device-to-release']
# Get a configured dockerode instance
dockerUtils.getDocker(options)
.then (docker) ->
preloader = new preload.Preloader(
resin
docker
options.appId
options.commit
options.image
options.splashImage
options.proxy
options.dontCheckDeviceType
options.pinDevice
)
gotSignal = false
nodeCleanup (exitCode, signal) ->
if signal
gotSignal = true
nodeCleanup.uninstall() # don't call cleanup handler again
preloader.cleanup()
.then ->
# calling process.exit() won't inform parent process of signal
process.kill(process.pid, signal)
return false
if process.env.DEBUG
preloader.stderr.pipe(process.stderr)
preloader.on('progress', progressHandler)
preloader.on('spinner', spinnerHandler)
return new Promise (resolve, reject) ->
preloader.on('error', reject)
preloader.prepare()
.then ->
# If no appId was provided, show a list of matching apps
Promise.try ->
if not preloader.appId
selectApplication(preloader.config.deviceType)
.then (application) ->
preloader.setApplication(application)
.then ->
# Use the commit given as --commit or show an interactive commit selection menu
Promise.try ->
if options.commit
if options.commit == LATEST and preloader.application.commit
# handle `--commit latest`
return LATEST
release = _.find preloader.application.owns__release, (release) ->
release.commit.startsWith(options.commit)
if not release
exitWithExpectedError('There is no release matching this commit')
return release.commit
selectApplicationCommit(preloader.application.owns__release)
.then (commit) ->
if commit == LATEST
preloader.commit = preloader.application.commit
else
preloader.commit = commit
# Propose to disable automatic app updates if the commit is not the latest
offerToDisableAutomaticUpdates(preloader.application, commit)
.then ->
# All options are ready: preload the image.
preloader.preload()
.catch(resin.errors.ResinError, exitWithExpectedError)
.then(resolve)
.catch(reject)
.then(done)
.finally ->
if not gotSignal
preloader.cleanup()
| 135876 | ###
Copyright 2016-2017 Resin.io
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
dockerUtils = require('../utils/docker')
LATEST = 'latest'
getApplicationsWithSuccessfulBuilds = (deviceType) ->
preload = require('resin-preload')
resin = require('resin-sdk').fromSharedOptions()
resin.pine.get
resource: 'my_application'
options:
$filter:
device_type: deviceType
owns__release:
$any:
$alias: 'r'
$expr:
r:
status: 'success'
$expand: preload.applicationExpandOptions
$select: [ 'id', 'app_name', 'device_type', 'commit', 'should_track_latest_release' ]
$orderby: 'app_name asc'
selectApplication = (deviceType) ->
visuals = require('resin-cli-visuals')
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
applicationInfoSpinner = new visuals.Spinner('Downloading list of applications and releases.')
applicationInfoSpinner.start()
getApplicationsWithSuccessfulBuilds(deviceType)
.then (applications) ->
applicationInfoSpinner.stop()
if applications.length == 0
exitWithExpectedError("You have no apps with successful releases for a '#{deviceType}' device type.")
form.ask
message: 'Select an application'
type: 'list'
choices: applications.map (app) ->
name: app.app_name
value: app
selectApplicationCommit = (releases) ->
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
if releases.length == 0
exitWithExpectedError('This application has no successful releases.')
DEFAULT_CHOICE = { 'name': LATEST, 'value': LATEST }
choices = [ DEFAULT_CHOICE ].concat releases.map (release) ->
name: "#{release.end_timestamp} - #{release.commit}"
value: release.commit
return form.ask
message: 'Select a release'
type: 'list'
default: LATEST
choices: choices
offerToDisableAutomaticUpdates = (application, commit) ->
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
form = require('resin-cli-form')
if commit == LATEST or not application.should_track_latest_release
return Promise.resolve()
message = '''
This application is set to automatically update all devices to the latest available version.
This might be unexpected behaviour: with this enabled, the preloaded device will still
download and install the latest release once it is online.
Do you want to disable automatic updates for this application?
'''
form.ask
message: message,
type: 'confirm'
.then (update) ->
if not update
return
resin.pine.patch
resource: 'application'
id: application.id
body:
should_track_latest_release: false
module.exports =
signature: 'preload <image>'
description: '(beta) preload an app on a disk image (or Edison zip archive)'
help: '''
Warning: "resin preload" requires Docker to be correctly installed in
your shell environment. For more information (including Windows support)
please check the README here: https://github.com/resin-io/resin-cli .
Use this command to preload an application to a local disk image (or
Edison zip archive) with a built release from Resin.io.
Examples:
$ resin preload resin.img --app 1234 --commit <PASSWORD> --splash-image some-image.png
$ resin preload resin.img
'''
permission: 'user'
primary: true
options: dockerUtils.appendConnectionOptions [
{
signature: 'app'
parameter: 'appId'
description: 'id of the application to preload'
alias: 'a'
}
{
signature: 'commit'
parameter: 'hash'
description: '''
the commit hash for a specific application release to preload, use "latest" to specify the latest release
(ignored if no appId is given)
'''
alias: 'c'
}
{
signature: 'splash-image'
parameter: 'splashImage.png'
description: 'path to a png image to replace the splash screen'
alias: 's'
}
{
signature: 'dont-check-device-type'
boolean: true
description: 'Disables check for matching device types in image and application'
}
{
signature: 'pin-device-to-release'
boolean: true
description: 'Pin the preloaded device to the preloaded release on provision'
alias: 'p'
}
]
action: (params, options, done) ->
_ = require('lodash')
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
preload = require('resin-preload')
visuals = require('resin-cli-visuals')
nodeCleanup = require('node-cleanup')
{ exitWithExpectedError } = require('../utils/patterns')
progressBars = {}
progressHandler = (event) ->
progressBar = progressBars[event.name]
if not progressBar
progressBar = progressBars[event.name] = new visuals.Progress(event.name)
progressBar.update(percentage: event.percentage)
spinners = {}
spinnerHandler = (event) ->
spinner = spinners[event.name]
if not spinner
spinner = spinners[event.name] = new visuals.Spinner(event.name)
if event.action == 'start'
spinner.start()
else
console.log()
spinner.stop()
options.image = params.image
options.appId = options.app
delete options.app
options.splashImage = options['splash-image']
delete options['splash-image']
options.dontCheckDeviceType = options['dont-check-device-type']
delete options['dont-check-device-type']
if options.dontCheckDeviceType and not options.appId
exitWithExpectedError('You need to specify an app id if you disable the device type check.')
options.pinDevice = options['pin-device-to-release']
delete options['pin-device-to-release']
# Get a configured dockerode instance
dockerUtils.getDocker(options)
.then (docker) ->
preloader = new preload.Preloader(
resin
docker
options.appId
options.commit
options.image
options.splashImage
options.proxy
options.dontCheckDeviceType
options.pinDevice
)
gotSignal = false
nodeCleanup (exitCode, signal) ->
if signal
gotSignal = true
nodeCleanup.uninstall() # don't call cleanup handler again
preloader.cleanup()
.then ->
# calling process.exit() won't inform parent process of signal
process.kill(process.pid, signal)
return false
if process.env.DEBUG
preloader.stderr.pipe(process.stderr)
preloader.on('progress', progressHandler)
preloader.on('spinner', spinnerHandler)
return new Promise (resolve, reject) ->
preloader.on('error', reject)
preloader.prepare()
.then ->
# If no appId was provided, show a list of matching apps
Promise.try ->
if not preloader.appId
selectApplication(preloader.config.deviceType)
.then (application) ->
preloader.setApplication(application)
.then ->
# Use the commit given as --commit or show an interactive commit selection menu
Promise.try ->
if options.commit
if options.commit == LATEST and preloader.application.commit
# handle `--commit latest`
return LATEST
release = _.find preloader.application.owns__release, (release) ->
release.commit.startsWith(options.commit)
if not release
exitWithExpectedError('There is no release matching this commit')
return release.commit
selectApplicationCommit(preloader.application.owns__release)
.then (commit) ->
if commit == LATEST
preloader.commit = preloader.application.commit
else
preloader.commit = commit
# Propose to disable automatic app updates if the commit is not the latest
offerToDisableAutomaticUpdates(preloader.application, commit)
.then ->
# All options are ready: preload the image.
preloader.preload()
.catch(resin.errors.ResinError, exitWithExpectedError)
.then(resolve)
.catch(reject)
.then(done)
.finally ->
if not gotSignal
preloader.cleanup()
| true | ###
Copyright 2016-2017 Resin.io
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
dockerUtils = require('../utils/docker')
LATEST = 'latest'
getApplicationsWithSuccessfulBuilds = (deviceType) ->
preload = require('resin-preload')
resin = require('resin-sdk').fromSharedOptions()
resin.pine.get
resource: 'my_application'
options:
$filter:
device_type: deviceType
owns__release:
$any:
$alias: 'r'
$expr:
r:
status: 'success'
$expand: preload.applicationExpandOptions
$select: [ 'id', 'app_name', 'device_type', 'commit', 'should_track_latest_release' ]
$orderby: 'app_name asc'
selectApplication = (deviceType) ->
visuals = require('resin-cli-visuals')
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
applicationInfoSpinner = new visuals.Spinner('Downloading list of applications and releases.')
applicationInfoSpinner.start()
getApplicationsWithSuccessfulBuilds(deviceType)
.then (applications) ->
applicationInfoSpinner.stop()
if applications.length == 0
exitWithExpectedError("You have no apps with successful releases for a '#{deviceType}' device type.")
form.ask
message: 'Select an application'
type: 'list'
choices: applications.map (app) ->
name: app.app_name
value: app
selectApplicationCommit = (releases) ->
form = require('resin-cli-form')
{ exitWithExpectedError } = require('../utils/patterns')
if releases.length == 0
exitWithExpectedError('This application has no successful releases.')
DEFAULT_CHOICE = { 'name': LATEST, 'value': LATEST }
choices = [ DEFAULT_CHOICE ].concat releases.map (release) ->
name: "#{release.end_timestamp} - #{release.commit}"
value: release.commit
return form.ask
message: 'Select a release'
type: 'list'
default: LATEST
choices: choices
offerToDisableAutomaticUpdates = (application, commit) ->
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
form = require('resin-cli-form')
if commit == LATEST or not application.should_track_latest_release
return Promise.resolve()
message = '''
This application is set to automatically update all devices to the latest available version.
This might be unexpected behaviour: with this enabled, the preloaded device will still
download and install the latest release once it is online.
Do you want to disable automatic updates for this application?
'''
form.ask
message: message,
type: 'confirm'
.then (update) ->
if not update
return
resin.pine.patch
resource: 'application'
id: application.id
body:
should_track_latest_release: false
module.exports =
signature: 'preload <image>'
description: '(beta) preload an app on a disk image (or Edison zip archive)'
help: '''
Warning: "resin preload" requires Docker to be correctly installed in
your shell environment. For more information (including Windows support)
please check the README here: https://github.com/resin-io/resin-cli .
Use this command to preload an application to a local disk image (or
Edison zip archive) with a built release from Resin.io.
Examples:
$ resin preload resin.img --app 1234 --commit PI:PASSWORD:<PASSWORD>END_PI --splash-image some-image.png
$ resin preload resin.img
'''
permission: 'user'
primary: true
options: dockerUtils.appendConnectionOptions [
{
signature: 'app'
parameter: 'appId'
description: 'id of the application to preload'
alias: 'a'
}
{
signature: 'commit'
parameter: 'hash'
description: '''
the commit hash for a specific application release to preload, use "latest" to specify the latest release
(ignored if no appId is given)
'''
alias: 'c'
}
{
signature: 'splash-image'
parameter: 'splashImage.png'
description: 'path to a png image to replace the splash screen'
alias: 's'
}
{
signature: 'dont-check-device-type'
boolean: true
description: 'Disables check for matching device types in image and application'
}
{
signature: 'pin-device-to-release'
boolean: true
description: 'Pin the preloaded device to the preloaded release on provision'
alias: 'p'
}
]
action: (params, options, done) ->
_ = require('lodash')
Promise = require('bluebird')
resin = require('resin-sdk').fromSharedOptions()
preload = require('resin-preload')
visuals = require('resin-cli-visuals')
nodeCleanup = require('node-cleanup')
{ exitWithExpectedError } = require('../utils/patterns')
progressBars = {}
progressHandler = (event) ->
progressBar = progressBars[event.name]
if not progressBar
progressBar = progressBars[event.name] = new visuals.Progress(event.name)
progressBar.update(percentage: event.percentage)
spinners = {}
spinnerHandler = (event) ->
spinner = spinners[event.name]
if not spinner
spinner = spinners[event.name] = new visuals.Spinner(event.name)
if event.action == 'start'
spinner.start()
else
console.log()
spinner.stop()
options.image = params.image
options.appId = options.app
delete options.app
options.splashImage = options['splash-image']
delete options['splash-image']
options.dontCheckDeviceType = options['dont-check-device-type']
delete options['dont-check-device-type']
if options.dontCheckDeviceType and not options.appId
exitWithExpectedError('You need to specify an app id if you disable the device type check.')
options.pinDevice = options['pin-device-to-release']
delete options['pin-device-to-release']
# Get a configured dockerode instance
dockerUtils.getDocker(options)
.then (docker) ->
preloader = new preload.Preloader(
resin
docker
options.appId
options.commit
options.image
options.splashImage
options.proxy
options.dontCheckDeviceType
options.pinDevice
)
gotSignal = false
nodeCleanup (exitCode, signal) ->
if signal
gotSignal = true
nodeCleanup.uninstall() # don't call cleanup handler again
preloader.cleanup()
.then ->
# calling process.exit() won't inform parent process of signal
process.kill(process.pid, signal)
return false
if process.env.DEBUG
preloader.stderr.pipe(process.stderr)
preloader.on('progress', progressHandler)
preloader.on('spinner', spinnerHandler)
return new Promise (resolve, reject) ->
preloader.on('error', reject)
preloader.prepare()
.then ->
# If no appId was provided, show a list of matching apps
Promise.try ->
if not preloader.appId
selectApplication(preloader.config.deviceType)
.then (application) ->
preloader.setApplication(application)
.then ->
# Use the commit given as --commit or show an interactive commit selection menu
Promise.try ->
if options.commit
if options.commit == LATEST and preloader.application.commit
# handle `--commit latest`
return LATEST
release = _.find preloader.application.owns__release, (release) ->
release.commit.startsWith(options.commit)
if not release
exitWithExpectedError('There is no release matching this commit')
return release.commit
selectApplicationCommit(preloader.application.owns__release)
.then (commit) ->
if commit == LATEST
preloader.commit = preloader.application.commit
else
preloader.commit = commit
# Propose to disable automatic app updates if the commit is not the latest
offerToDisableAutomaticUpdates(preloader.application, commit)
.then ->
# All options are ready: preload the image.
preloader.preload()
.catch(resin.errors.ResinError, exitWithExpectedError)
.then(resolve)
.catch(reject)
.then(done)
.finally ->
if not gotSignal
preloader.cleanup()
|
[
{
"context": "On Rails UJS support module\n#\n# Copyright (C) 2011 Nikolay Nemshilov\n#\n\n$ = require('dom')\ncore = require('core')\nA",
"end": 77,
"score": 0.9998863935470581,
"start": 60,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/rails/src/ujs.coffee | lovely-io/lovely.io-stl | 2 | #
# Ruby On Rails UJS support module
#
# Copyright (C) 2011 Nikolay Nemshilov
#
$ = require('dom')
core = require('core')
Ajax = require('ajax')
# tries to cancel the event via confirmation
user_cancels = (event, element)->
message = element.attr('data-confirm')
if message && !confirm(message)
event.stop()
return true
# sends an ajax request
send_ajax = (element, url, options)->
Ajax.load url, core.ext(
create: -> element.emit 'ajax:loading', ajax: @
complete: -> element.emit 'ajax:complete', ajax: @
cuccess: -> element.emit 'ajax:success', ajax: @
cailure: -> element.emit 'ajax:failure', ajax: @
, options)
# global events listeners
$(global.document).on
# handles clicks on the remote links
click: (event)->
return unless link = event.find('a')
url = link.attr('href')
method = link.attr('data-method')
remote = link.attr('data-remote')
return if user_cancels(event, link)
event.stop() if remote || method
if remote
send_ajax link, url,
method: method || 'get'
spinner: link.attr('data-spinner')
else if method
param = $('meta[name=csrf-param]').attr('content')
token = $('meta[name=csrf-token]').attr('content')
form = "<form method='post'>"
form += "<input type='hidden' name='#{param}' value='#{token}' />" if param && token
form += "<input type='hidden' name='_method' value='#{method}' />"
form += "</form>"
$(form)[0].attr('action', url).insertTo(global.document.body).submit()
return # nothing
# handles remote forms submissions
submit: (event)->
form = event.target
if form.attr('data-remote') && !user_cancels(event, form)
event.stop()
send_ajax form, form.attr('action') || global.document.location.href,
method: form.attr('method') || 'get'
params: form.serialize()
spinner: form.attr('data-spinner') || form.first('.spinner')
return # nothing
| 115932 | #
# Ruby On Rails UJS support module
#
# Copyright (C) 2011 <NAME>
#
$ = require('dom')
core = require('core')
Ajax = require('ajax')
# tries to cancel the event via confirmation
user_cancels = (event, element)->
message = element.attr('data-confirm')
if message && !confirm(message)
event.stop()
return true
# sends an ajax request
send_ajax = (element, url, options)->
Ajax.load url, core.ext(
create: -> element.emit 'ajax:loading', ajax: @
complete: -> element.emit 'ajax:complete', ajax: @
cuccess: -> element.emit 'ajax:success', ajax: @
cailure: -> element.emit 'ajax:failure', ajax: @
, options)
# global events listeners
$(global.document).on
# handles clicks on the remote links
click: (event)->
return unless link = event.find('a')
url = link.attr('href')
method = link.attr('data-method')
remote = link.attr('data-remote')
return if user_cancels(event, link)
event.stop() if remote || method
if remote
send_ajax link, url,
method: method || 'get'
spinner: link.attr('data-spinner')
else if method
param = $('meta[name=csrf-param]').attr('content')
token = $('meta[name=csrf-token]').attr('content')
form = "<form method='post'>"
form += "<input type='hidden' name='#{param}' value='#{token}' />" if param && token
form += "<input type='hidden' name='_method' value='#{method}' />"
form += "</form>"
$(form)[0].attr('action', url).insertTo(global.document.body).submit()
return # nothing
# handles remote forms submissions
submit: (event)->
form = event.target
if form.attr('data-remote') && !user_cancels(event, form)
event.stop()
send_ajax form, form.attr('action') || global.document.location.href,
method: form.attr('method') || 'get'
params: form.serialize()
spinner: form.attr('data-spinner') || form.first('.spinner')
return # nothing
| true | #
# Ruby On Rails UJS support module
#
# Copyright (C) 2011 PI:NAME:<NAME>END_PI
#
$ = require('dom')
core = require('core')
Ajax = require('ajax')
# tries to cancel the event via confirmation
user_cancels = (event, element)->
message = element.attr('data-confirm')
if message && !confirm(message)
event.stop()
return true
# sends an ajax request
send_ajax = (element, url, options)->
Ajax.load url, core.ext(
create: -> element.emit 'ajax:loading', ajax: @
complete: -> element.emit 'ajax:complete', ajax: @
cuccess: -> element.emit 'ajax:success', ajax: @
cailure: -> element.emit 'ajax:failure', ajax: @
, options)
# global events listeners
$(global.document).on
# handles clicks on the remote links
click: (event)->
return unless link = event.find('a')
url = link.attr('href')
method = link.attr('data-method')
remote = link.attr('data-remote')
return if user_cancels(event, link)
event.stop() if remote || method
if remote
send_ajax link, url,
method: method || 'get'
spinner: link.attr('data-spinner')
else if method
param = $('meta[name=csrf-param]').attr('content')
token = $('meta[name=csrf-token]').attr('content')
form = "<form method='post'>"
form += "<input type='hidden' name='#{param}' value='#{token}' />" if param && token
form += "<input type='hidden' name='_method' value='#{method}' />"
form += "</form>"
$(form)[0].attr('action', url).insertTo(global.document.body).submit()
return # nothing
# handles remote forms submissions
submit: (event)->
form = event.target
if form.attr('data-remote') && !user_cancels(event, form)
event.stop()
send_ajax form, form.attr('action') || global.document.location.href,
method: form.attr('method') || 'get'
params: form.serialize()
spinner: form.attr('data-spinner') || form.first('.spinner')
return # nothing
|
[
{
"context": "('input').focus()\n\t$.get 'https://ipinfo.io?token=83129ddfd413c4', ((response) ->\n\n\t\tif response.country = 'GB'\n\t\t",
"end": 105,
"score": 0.9938249588012695,
"start": 91,
"tag": "PASSWORD",
"value": "83129ddfd413c4"
}
] | js/main.coffee | dieppon/google | 2 | ---
---
jQuery(document).ready ($) ->
$('input').focus()
$.get 'https://ipinfo.io?token=83129ddfd413c4', ((response) ->
if response.country = 'GB'
$('.country').html ' UK'
$('form').attr 'action', 'https://www.google.co.uk/search'
else if response.country = 'FR'
$('.country').html ' France'
$('form').attr 'action', 'https://www.google.fr/search'
$('label').html 'Tapez et appuyez sur la touche ENTRER pour rechercher'
else if response.country = 'ES'
$('.country').html ' España'
$('form').attr 'action', 'https://www.google.es/search'
$('label').html 'Escribe y presiona intro para buscar'
else if response.country = 'DE'
$('.country').html ' Deutschland'
$('form').attr 'action', 'https://www.google.de/search'
$('label').html 'Geben Sie und drücken Sie die ENTER-Taste zur Suche'
else if response.country = 'IT'
$('.country').html ' Italia'
$('form').attr 'action', 'https://www.google.it/search'
$('label').html 'Tipo e premere il tasto INVIO per la ricerca'
else if response.country = 'PT'
$('.country').html ' Portugal'
$('form').attr 'action', 'https://www.google.pt/search'
$('label').html 'Escreva e pressione a tecla ENTER para pesquisa'
else if response.country = 'US'
else
$('.country').html ' ' + response.country
$('form').attr 'action', 'https://www.google.com/search'
return
), 'jsonp'
return | 16712 | ---
---
jQuery(document).ready ($) ->
$('input').focus()
$.get 'https://ipinfo.io?token=<PASSWORD>', ((response) ->
if response.country = 'GB'
$('.country').html ' UK'
$('form').attr 'action', 'https://www.google.co.uk/search'
else if response.country = 'FR'
$('.country').html ' France'
$('form').attr 'action', 'https://www.google.fr/search'
$('label').html 'Tapez et appuyez sur la touche ENTRER pour rechercher'
else if response.country = 'ES'
$('.country').html ' España'
$('form').attr 'action', 'https://www.google.es/search'
$('label').html 'Escribe y presiona intro para buscar'
else if response.country = 'DE'
$('.country').html ' Deutschland'
$('form').attr 'action', 'https://www.google.de/search'
$('label').html 'Geben Sie und drücken Sie die ENTER-Taste zur Suche'
else if response.country = 'IT'
$('.country').html ' Italia'
$('form').attr 'action', 'https://www.google.it/search'
$('label').html 'Tipo e premere il tasto INVIO per la ricerca'
else if response.country = 'PT'
$('.country').html ' Portugal'
$('form').attr 'action', 'https://www.google.pt/search'
$('label').html 'Escreva e pressione a tecla ENTER para pesquisa'
else if response.country = 'US'
else
$('.country').html ' ' + response.country
$('form').attr 'action', 'https://www.google.com/search'
return
), 'jsonp'
return | true | ---
---
jQuery(document).ready ($) ->
$('input').focus()
$.get 'https://ipinfo.io?token=PI:PASSWORD:<PASSWORD>END_PI', ((response) ->
if response.country = 'GB'
$('.country').html ' UK'
$('form').attr 'action', 'https://www.google.co.uk/search'
else if response.country = 'FR'
$('.country').html ' France'
$('form').attr 'action', 'https://www.google.fr/search'
$('label').html 'Tapez et appuyez sur la touche ENTRER pour rechercher'
else if response.country = 'ES'
$('.country').html ' España'
$('form').attr 'action', 'https://www.google.es/search'
$('label').html 'Escribe y presiona intro para buscar'
else if response.country = 'DE'
$('.country').html ' Deutschland'
$('form').attr 'action', 'https://www.google.de/search'
$('label').html 'Geben Sie und drücken Sie die ENTER-Taste zur Suche'
else if response.country = 'IT'
$('.country').html ' Italia'
$('form').attr 'action', 'https://www.google.it/search'
$('label').html 'Tipo e premere il tasto INVIO per la ricerca'
else if response.country = 'PT'
$('.country').html ' Portugal'
$('form').attr 'action', 'https://www.google.pt/search'
$('label').html 'Escreva e pressione a tecla ENTER para pesquisa'
else if response.country = 'US'
else
$('.country').html ' ' + response.country
$('form').attr 'action', 'https://www.google.com/search'
return
), 'jsonp'
return |
[
{
"context": "l = cc.login(\n login:'l'\n password:'p'\n host:'h'\n port:10\n vhost:'",
"end": 418,
"score": 0.9995197057723999,
"start": 417,
"tag": "PASSWORD",
"value": "p"
},
{
"context": "login, 'l'\n assert.equal amqpdsl._password, 'p'\n assert.equal amqpdsl._host, 'h'\n asse",
"end": 559,
"score": 0.9992807507514954,
"start": 558,
"tag": "PASSWORD",
"value": "p"
}
] | test/AmqpDsl-test.coffee | FGRibreau/node-amqp-dsl | 2 | #!/usr/bin/env node
vows = require("vows")
assert = require("assert")
AmqpDsl = require("../")
suite = vows.describe("AmqpDsl test")
suite.options.error = true
suite.addBatch
".login":
topic: -> AmqpDsl
"runs without issue": (cc) ->
assert.isObject cc.login()
assert.instanceOf cc.login(), AmqpDsl
"save options": (cc) ->
amqpdsl = cc.login(
login:'l'
password:'p'
host:'h'
port:10
vhost:'v'
)
assert.equal amqpdsl._login, 'l'
assert.equal amqpdsl._password, 'p'
assert.equal amqpdsl._host, 'h'
assert.equal amqpdsl._port, 10
assert.equal amqpdsl._vhost, 'v'
".on":
topic: -> AmqpDsl.login()
"No events by default": (cc) ->
assert.equal Object.keys(cc._events).length, 0
"can add listener": (cc) ->
a = () -> "ok"
cc.on('ready', a)
assert.equal cc._events['ready'].length, 1
assert.equal cc._events['ready'][0], a
"can't add listen to unsupported event":(cc) ->
assert.throws () ->
cc.on('unsupported', () ->)
"allow multiple listener to the same event":(cc) ->
cc.on('ready', () -> "ok2")
assert.equal cc._events['ready'].length, 2
".exchange":
topic: -> AmqpDsl.login()
"No exchange by default": (cc)->
assert.equal cc._exchanges.length(), 0
"Accept (name, options)":(cc)->
cc.exchange("exchg1", a:true)
assert.equal cc._exchanges.last().name, "exchg1"
assert.equal cc._exchanges.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.exchange("exchg2", a)
assert.equal cc._exchanges.length(), 2
assert.equal cc._exchanges.last().name, "exchg2"
assert.equal cc._exchanges.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.exchange "exchg3", {b:false}, a
assert.equal cc._exchanges.length(), 3
assert.equal cc._exchanges.last().name, "exchg3"
assert.equal cc._exchanges.last().options.b, false
assert.equal cc._exchanges.last().openCallback, a
"queue":
topic: -> AmqpDsl.login()
"No queue by default": (cc)->
assert.equal cc._queues.length(), 0
"Accept (name, options)":(cc)->
cc.queue("queue1", a:true)
assert.equal cc._queues.last().name, "queue1"
assert.equal cc._queues.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.queue("queue2", a)
assert.equal cc._queues.length(), 2
assert.equal cc._queues.last().name, "queue2"
assert.equal cc._queues.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.queue "queue3", {b:false}, a
assert.equal cc._queues.length(), 3
assert.equal cc._queues.last().name, "queue3"
assert.equal cc._queues.last().options.b, false
assert.equal cc._queues.last().openCallback, a
"subscribe":
topic: -> AmqpDsl.login()
"Throw an error if no queue were declared":(cc) ->
assert.equal cc._queues.length(), 0
assert.throws () ->
cc.subscribe(() ->)
"Accept subscribe( callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue")
.subscribe(fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{}, fn]
"Accept subscribe( option, callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue2")
.subscribe(ack:true, fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{ack:true}, fn]
"bind":
topic: -> AmqpDsl.login()
"Accept (name, routingKey)":(cc)->
cc
.queue("queue")
.bind("exch1", "#")
.bind("exch2", "#rk2")
queue = cc._queues.last()
assert.deepEqual queue.bindTo[0], ["exch1", "#"]
assert.deepEqual queue.bindTo[1], ["exch2", "#rk2"]
"connect":
topic: -> AmqpDsl.login()
"Accept ()": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect()
"Accept ( callback )": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect(->)
"Accept ( amqp, callback )": (cc)->
cc._connect = (amqp) -> assert.isTrue amqp
cc.connect(true, ->)
"(private) _connect":
topic: -> AmqpDsl.login()
"(private) _getListenerFor":
topic: -> AmqpDsl.login()
"get listener for a single listener":(cc) ->
a = () -> throw new Error("ok1")
cc.on('error', a)
assert.equal cc._getListenerFor('error'), a
"get listener for multiple listener":(cc) ->
b = () -> assert.ok true
cc.on('error', b)
assert.equal cc._events['error'].length, 2
assert.notEqual cc._getListenerFor('error'), b
assert.throws () ->
cc._getListenerFor('error')()
, "ok1"
"(private) _connectExchanges":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectExchange":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueues":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueue":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _done":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
if process.argv[1] == __filename
suite.run()
else
suite.export module
| 134761 | #!/usr/bin/env node
vows = require("vows")
assert = require("assert")
AmqpDsl = require("../")
suite = vows.describe("AmqpDsl test")
suite.options.error = true
suite.addBatch
".login":
topic: -> AmqpDsl
"runs without issue": (cc) ->
assert.isObject cc.login()
assert.instanceOf cc.login(), AmqpDsl
"save options": (cc) ->
amqpdsl = cc.login(
login:'l'
password:'<PASSWORD>'
host:'h'
port:10
vhost:'v'
)
assert.equal amqpdsl._login, 'l'
assert.equal amqpdsl._password, '<PASSWORD>'
assert.equal amqpdsl._host, 'h'
assert.equal amqpdsl._port, 10
assert.equal amqpdsl._vhost, 'v'
".on":
topic: -> AmqpDsl.login()
"No events by default": (cc) ->
assert.equal Object.keys(cc._events).length, 0
"can add listener": (cc) ->
a = () -> "ok"
cc.on('ready', a)
assert.equal cc._events['ready'].length, 1
assert.equal cc._events['ready'][0], a
"can't add listen to unsupported event":(cc) ->
assert.throws () ->
cc.on('unsupported', () ->)
"allow multiple listener to the same event":(cc) ->
cc.on('ready', () -> "ok2")
assert.equal cc._events['ready'].length, 2
".exchange":
topic: -> AmqpDsl.login()
"No exchange by default": (cc)->
assert.equal cc._exchanges.length(), 0
"Accept (name, options)":(cc)->
cc.exchange("exchg1", a:true)
assert.equal cc._exchanges.last().name, "exchg1"
assert.equal cc._exchanges.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.exchange("exchg2", a)
assert.equal cc._exchanges.length(), 2
assert.equal cc._exchanges.last().name, "exchg2"
assert.equal cc._exchanges.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.exchange "exchg3", {b:false}, a
assert.equal cc._exchanges.length(), 3
assert.equal cc._exchanges.last().name, "exchg3"
assert.equal cc._exchanges.last().options.b, false
assert.equal cc._exchanges.last().openCallback, a
"queue":
topic: -> AmqpDsl.login()
"No queue by default": (cc)->
assert.equal cc._queues.length(), 0
"Accept (name, options)":(cc)->
cc.queue("queue1", a:true)
assert.equal cc._queues.last().name, "queue1"
assert.equal cc._queues.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.queue("queue2", a)
assert.equal cc._queues.length(), 2
assert.equal cc._queues.last().name, "queue2"
assert.equal cc._queues.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.queue "queue3", {b:false}, a
assert.equal cc._queues.length(), 3
assert.equal cc._queues.last().name, "queue3"
assert.equal cc._queues.last().options.b, false
assert.equal cc._queues.last().openCallback, a
"subscribe":
topic: -> AmqpDsl.login()
"Throw an error if no queue were declared":(cc) ->
assert.equal cc._queues.length(), 0
assert.throws () ->
cc.subscribe(() ->)
"Accept subscribe( callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue")
.subscribe(fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{}, fn]
"Accept subscribe( option, callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue2")
.subscribe(ack:true, fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{ack:true}, fn]
"bind":
topic: -> AmqpDsl.login()
"Accept (name, routingKey)":(cc)->
cc
.queue("queue")
.bind("exch1", "#")
.bind("exch2", "#rk2")
queue = cc._queues.last()
assert.deepEqual queue.bindTo[0], ["exch1", "#"]
assert.deepEqual queue.bindTo[1], ["exch2", "#rk2"]
"connect":
topic: -> AmqpDsl.login()
"Accept ()": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect()
"Accept ( callback )": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect(->)
"Accept ( amqp, callback )": (cc)->
cc._connect = (amqp) -> assert.isTrue amqp
cc.connect(true, ->)
"(private) _connect":
topic: -> AmqpDsl.login()
"(private) _getListenerFor":
topic: -> AmqpDsl.login()
"get listener for a single listener":(cc) ->
a = () -> throw new Error("ok1")
cc.on('error', a)
assert.equal cc._getListenerFor('error'), a
"get listener for multiple listener":(cc) ->
b = () -> assert.ok true
cc.on('error', b)
assert.equal cc._events['error'].length, 2
assert.notEqual cc._getListenerFor('error'), b
assert.throws () ->
cc._getListenerFor('error')()
, "ok1"
"(private) _connectExchanges":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectExchange":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueues":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueue":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _done":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
if process.argv[1] == __filename
suite.run()
else
suite.export module
| true | #!/usr/bin/env node
vows = require("vows")
assert = require("assert")
AmqpDsl = require("../")
suite = vows.describe("AmqpDsl test")
suite.options.error = true
suite.addBatch
".login":
topic: -> AmqpDsl
"runs without issue": (cc) ->
assert.isObject cc.login()
assert.instanceOf cc.login(), AmqpDsl
"save options": (cc) ->
amqpdsl = cc.login(
login:'l'
password:'PI:PASSWORD:<PASSWORD>END_PI'
host:'h'
port:10
vhost:'v'
)
assert.equal amqpdsl._login, 'l'
assert.equal amqpdsl._password, 'PI:PASSWORD:<PASSWORD>END_PI'
assert.equal amqpdsl._host, 'h'
assert.equal amqpdsl._port, 10
assert.equal amqpdsl._vhost, 'v'
".on":
topic: -> AmqpDsl.login()
"No events by default": (cc) ->
assert.equal Object.keys(cc._events).length, 0
"can add listener": (cc) ->
a = () -> "ok"
cc.on('ready', a)
assert.equal cc._events['ready'].length, 1
assert.equal cc._events['ready'][0], a
"can't add listen to unsupported event":(cc) ->
assert.throws () ->
cc.on('unsupported', () ->)
"allow multiple listener to the same event":(cc) ->
cc.on('ready', () -> "ok2")
assert.equal cc._events['ready'].length, 2
".exchange":
topic: -> AmqpDsl.login()
"No exchange by default": (cc)->
assert.equal cc._exchanges.length(), 0
"Accept (name, options)":(cc)->
cc.exchange("exchg1", a:true)
assert.equal cc._exchanges.last().name, "exchg1"
assert.equal cc._exchanges.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.exchange("exchg2", a)
assert.equal cc._exchanges.length(), 2
assert.equal cc._exchanges.last().name, "exchg2"
assert.equal cc._exchanges.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.exchange "exchg3", {b:false}, a
assert.equal cc._exchanges.length(), 3
assert.equal cc._exchanges.last().name, "exchg3"
assert.equal cc._exchanges.last().options.b, false
assert.equal cc._exchanges.last().openCallback, a
"queue":
topic: -> AmqpDsl.login()
"No queue by default": (cc)->
assert.equal cc._queues.length(), 0
"Accept (name, options)":(cc)->
cc.queue("queue1", a:true)
assert.equal cc._queues.last().name, "queue1"
assert.equal cc._queues.last().options.a, true
"Accept (name, callback)":(cc)->
a = () -> false
cc.queue("queue2", a)
assert.equal cc._queues.length(), 2
assert.equal cc._queues.last().name, "queue2"
assert.equal cc._queues.last().openCallback, a
"Accept (name, options, callback)":(cc)->
a = () -> false
cc.queue "queue3", {b:false}, a
assert.equal cc._queues.length(), 3
assert.equal cc._queues.last().name, "queue3"
assert.equal cc._queues.last().options.b, false
assert.equal cc._queues.last().openCallback, a
"subscribe":
topic: -> AmqpDsl.login()
"Throw an error if no queue were declared":(cc) ->
assert.equal cc._queues.length(), 0
assert.throws () ->
cc.subscribe(() ->)
"Accept subscribe( callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue")
.subscribe(fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{}, fn]
"Accept subscribe( option, callback )":(cc) ->
fn = () -> throw new Error("ok")
cc
.queue("queue2")
.subscribe(ack:true, fn)
queue = cc._queues.last()
assert.deepEqual queue.listenTo[0], [{ack:true}, fn]
"bind":
topic: -> AmqpDsl.login()
"Accept (name, routingKey)":(cc)->
cc
.queue("queue")
.bind("exch1", "#")
.bind("exch2", "#rk2")
queue = cc._queues.last()
assert.deepEqual queue.bindTo[0], ["exch1", "#"]
assert.deepEqual queue.bindTo[1], ["exch2", "#rk2"]
"connect":
topic: -> AmqpDsl.login()
"Accept ()": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect()
"Accept ( callback )": (cc)->
cc._connect = (amqp) ->
assert.equal amqp, require('amqp')
cc.connect(->)
"Accept ( amqp, callback )": (cc)->
cc._connect = (amqp) -> assert.isTrue amqp
cc.connect(true, ->)
"(private) _connect":
topic: -> AmqpDsl.login()
"(private) _getListenerFor":
topic: -> AmqpDsl.login()
"get listener for a single listener":(cc) ->
a = () -> throw new Error("ok1")
cc.on('error', a)
assert.equal cc._getListenerFor('error'), a
"get listener for multiple listener":(cc) ->
b = () -> assert.ok true
cc.on('error', b)
assert.equal cc._events['error'].length, 2
assert.notEqual cc._getListenerFor('error'), b
assert.throws () ->
cc._getListenerFor('error')()
, "ok1"
"(private) _connectExchanges":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectExchange":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueues":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _connectQueue":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
"(private) _done":
topic: -> AmqpDsl.login()
"#todo":(cc) ->
if process.argv[1] == __filename
suite.run()
else
suite.export module
|
[
{
"context": "# Copyright (c) 2012, Chi-En Wu\n# All rights reserved.\n#\n# Redistribution and use",
"end": 31,
"score": 0.9996604919433594,
"start": 22,
"tag": "NAME",
"value": "Chi-En Wu"
}
] | static/coffee/graph.coffee | israkir/Visualize-ConceptNet | 3 | # Copyright (c) 2012, Chi-En Wu
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
String.format = (str) ->
if arguments.length == 0
return null
args = Array.prototype.slice.call(arguments, 1)
str.replace(/\{(\d+)\}/g, (m, i) -> args[i])
calculateDistance = (d, maxScore) ->
80 + 50 * Math.log(if d.target.score? then (maxScore - d.target.score) / maxScore + 1 else 1)
class Graph
constructor: (canvas) ->
@canvas = d3.select(canvas)
@force = d3.layout.force()
.charge(-2000)
.size([@width(), @height()])
d3.select(window).on("resize", =>
@force.size([@width(), @height()])
return
)
width: ->
parseInt(@canvas.style("width"), 10)
height: ->
parseInt(@canvas.style("height"), 10)
load: (lang, term) ->
url = String.format("/c/{0}/{1}", lang, term)
d3.json(url, (json) =>
@root = json
@update()
)
return
update: ->
root = @root
root.fixed = true
root.x = @width() / 2
root.y = @height() / 2
# push nodes and links
nodes = [root]
links = []
root.children.forEach (rel) ->
nodes.push(rel)
links.push
source: root
target: rel
rel.children.forEach (target) ->
nodes.push(target)
links.push
source: rel
target: target
# restart the force layout
@force
.nodes(nodes)
.links(links)
.linkDistance((d) -> calculateDistance(d, root.maxScore))
.start()
# update the links
svgLinks = @canvas.selectAll("line.link").data(links)
# create new links
svgLinks.enter().insert("line", ".node")
.attr("class", "link")
# remove unnecessary links
svgLinks.exit().remove()
# update the nodes
svgNodes = @canvas.selectAll("g.node").data(nodes)
# create new nodes
svgNodes.enter().append("g")
.attr("class", (d) -> "node " + d.type)
.call(@force.drag)
svgNodes.append("circle")
.attr("r", 30)
svgNodes.append("text")
.attr("dy", ".31em")
.attr("text-anchor", "middle")
.text((d) -> if d.name.length > 8 then d.name.substr(0, 6) + "..." else d.name)
svgNodes.append("title")
.text((d) -> d.name)
# remove unnecessary nodes
svgNodes.exit().remove()
# register the tick event handler
@force.on("tick", ->
svgLinks
.attr("x1", (d) -> d.source.x)
.attr("y1", (d) -> d.source.y)
.attr("x2", (d) -> d.target.x)
.attr("y2", (d) -> d.target.y)
svgNodes
.attr("transform", (d) -> "translate(" + d.x + "," + d.y + ")")
return
)
return
# export functions
window.Graph = Graph
| 107264 | # Copyright (c) 2012, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
String.format = (str) ->
if arguments.length == 0
return null
args = Array.prototype.slice.call(arguments, 1)
str.replace(/\{(\d+)\}/g, (m, i) -> args[i])
calculateDistance = (d, maxScore) ->
80 + 50 * Math.log(if d.target.score? then (maxScore - d.target.score) / maxScore + 1 else 1)
class Graph
constructor: (canvas) ->
@canvas = d3.select(canvas)
@force = d3.layout.force()
.charge(-2000)
.size([@width(), @height()])
d3.select(window).on("resize", =>
@force.size([@width(), @height()])
return
)
width: ->
parseInt(@canvas.style("width"), 10)
height: ->
parseInt(@canvas.style("height"), 10)
load: (lang, term) ->
url = String.format("/c/{0}/{1}", lang, term)
d3.json(url, (json) =>
@root = json
@update()
)
return
update: ->
root = @root
root.fixed = true
root.x = @width() / 2
root.y = @height() / 2
# push nodes and links
nodes = [root]
links = []
root.children.forEach (rel) ->
nodes.push(rel)
links.push
source: root
target: rel
rel.children.forEach (target) ->
nodes.push(target)
links.push
source: rel
target: target
# restart the force layout
@force
.nodes(nodes)
.links(links)
.linkDistance((d) -> calculateDistance(d, root.maxScore))
.start()
# update the links
svgLinks = @canvas.selectAll("line.link").data(links)
# create new links
svgLinks.enter().insert("line", ".node")
.attr("class", "link")
# remove unnecessary links
svgLinks.exit().remove()
# update the nodes
svgNodes = @canvas.selectAll("g.node").data(nodes)
# create new nodes
svgNodes.enter().append("g")
.attr("class", (d) -> "node " + d.type)
.call(@force.drag)
svgNodes.append("circle")
.attr("r", 30)
svgNodes.append("text")
.attr("dy", ".31em")
.attr("text-anchor", "middle")
.text((d) -> if d.name.length > 8 then d.name.substr(0, 6) + "..." else d.name)
svgNodes.append("title")
.text((d) -> d.name)
# remove unnecessary nodes
svgNodes.exit().remove()
# register the tick event handler
@force.on("tick", ->
svgLinks
.attr("x1", (d) -> d.source.x)
.attr("y1", (d) -> d.source.y)
.attr("x2", (d) -> d.target.x)
.attr("y2", (d) -> d.target.y)
svgNodes
.attr("transform", (d) -> "translate(" + d.x + "," + d.y + ")")
return
)
return
# export functions
window.Graph = Graph
| true | # Copyright (c) 2012, PI:NAME:<NAME>END_PI
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
String.format = (str) ->
if arguments.length == 0
return null
args = Array.prototype.slice.call(arguments, 1)
str.replace(/\{(\d+)\}/g, (m, i) -> args[i])
calculateDistance = (d, maxScore) ->
80 + 50 * Math.log(if d.target.score? then (maxScore - d.target.score) / maxScore + 1 else 1)
class Graph
constructor: (canvas) ->
@canvas = d3.select(canvas)
@force = d3.layout.force()
.charge(-2000)
.size([@width(), @height()])
d3.select(window).on("resize", =>
@force.size([@width(), @height()])
return
)
width: ->
parseInt(@canvas.style("width"), 10)
height: ->
parseInt(@canvas.style("height"), 10)
load: (lang, term) ->
url = String.format("/c/{0}/{1}", lang, term)
d3.json(url, (json) =>
@root = json
@update()
)
return
update: ->
root = @root
root.fixed = true
root.x = @width() / 2
root.y = @height() / 2
# push nodes and links
nodes = [root]
links = []
root.children.forEach (rel) ->
nodes.push(rel)
links.push
source: root
target: rel
rel.children.forEach (target) ->
nodes.push(target)
links.push
source: rel
target: target
# restart the force layout
@force
.nodes(nodes)
.links(links)
.linkDistance((d) -> calculateDistance(d, root.maxScore))
.start()
# update the links
svgLinks = @canvas.selectAll("line.link").data(links)
# create new links
svgLinks.enter().insert("line", ".node")
.attr("class", "link")
# remove unnecessary links
svgLinks.exit().remove()
# update the nodes
svgNodes = @canvas.selectAll("g.node").data(nodes)
# create new nodes
svgNodes.enter().append("g")
.attr("class", (d) -> "node " + d.type)
.call(@force.drag)
svgNodes.append("circle")
.attr("r", 30)
svgNodes.append("text")
.attr("dy", ".31em")
.attr("text-anchor", "middle")
.text((d) -> if d.name.length > 8 then d.name.substr(0, 6) + "..." else d.name)
svgNodes.append("title")
.text((d) -> d.name)
# remove unnecessary nodes
svgNodes.exit().remove()
# register the tick event handler
@force.on("tick", ->
svgLinks
.attr("x1", (d) -> d.source.x)
.attr("y1", (d) -> d.source.y)
.attr("x2", (d) -> d.target.x)
.attr("y2", (d) -> d.target.y)
svgNodes
.attr("transform", (d) -> "translate(" + d.x + "," + d.y + ")")
return
)
return
# export functions
window.Graph = Graph
|
[
{
"context": "./lib/intercom')\n if level.get('slug') is 'wakka-maul'\n yield teacher.update({ $set: { \"studentMiles",
"end": 6403,
"score": 0.5684596300125122,
"start": 6399,
"tag": "NAME",
"value": "maul"
}
] | server/middleware/levels.coffee | Danielv123/codecombat | 0 | mongoose = require 'mongoose'
wrap = require 'co-express'
co = require 'co'
errors = require '../commons/errors'
Level = require '../models/Level'
LevelSession = require '../models/LevelSession'
Prepaid = require '../models/Prepaid'
CourseInstance = require '../models/CourseInstance'
Classroom = require '../models/Classroom'
Campaign = require '../models/Campaign'
Course = require '../models/Course'
User = require '../models/User'
database = require '../commons/database'
codePlay = require '../../app/lib/code-play'
log = require 'winston'
module.exports =
upsertSession: wrap (req, res) ->
level = yield database.getDocFromHandle(req, Level)
if not level
throw new errors.NotFound('Level not found.')
levelOriginal = level.get('original')
sessionQuery =
level:
original: level.get('original').toString()
majorVersion: level.get('version').major
creator: req.user.id
if req.query.team?
sessionQuery.team = req.query.team
if req.query.courseInstance
unless mongoose.Types.ObjectId.isValid(req.query.courseInstance)
throw new errors.UnprocessableEntity('Invalid course instance id')
courseInstance = yield CourseInstance.findById(req.query.courseInstance)
if not courseInstance
throw new errors.NotFound('Course Instance not found.')
if not _.find(courseInstance.get('members'), (memberID) -> memberID.equals(req.user._id))
throw new errors.Forbidden('You must be a member of the Course Instance.')
classroom = yield Classroom.findById(courseInstance.get('classroomID'))
if not classroom
throw new errors.NotFound('Classroom not found.')
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
targetLevel = null
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
break
if not targetLevel
throw new errors.NotFound('Level not found in classroom courses')
language = targetLevel.primerLanguage or classroom.get('aceConfig.language')
if language
sessionQuery.codeLanguage = language
session = yield LevelSession.findOne(sessionQuery)
if session
return res.send(session.toObject({req: req}))
attrs = sessionQuery
_.extend(attrs, {
state:
complete: false
scripts:
currentScript: null # will not save empty objects
permissions: [
{target: req.user.id, access: 'owner'}
{target: 'public', access: 'write'}
]
codeLanguage: req.user.get('aceConfig')?.language ? 'python'
})
if level.get('type') in ['course', 'course-ladder'] or req.query.course?
# Find the course and classroom that has assigned this level, verify access
# Handle either being given the courseInstance, or having to deduce it
if courseInstance and classroom
courseInstances = [courseInstance]
classrooms = [classroom]
else
courseInstances = yield CourseInstance.find({members: req.user._id})
classroomIDs = (courseInstance.get('classroomID') for courseInstance in courseInstances)
classroomIDs = _.filter _.uniq classroomIDs, false, (objectID='') -> objectID.toString()
classrooms = yield Classroom.find({ _id: { $in: classroomIDs }})
classroomWithLevel = null
targetLevel = null
courseID = null
classroomMap = {}
classroomMap[classroom.id] = classroom for classroom in classrooms
for courseInstance in courseInstances
classroomID = courseInstance.get('classroomID')
continue unless classroomID
classroom = classroomMap[classroomID.toString()]
continue unless classroom
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
classroomWithLevel = classroom
break
break if classroomWithLevel
if course?.id
prepaidIncludesCourse = req.user.prepaidIncludesCourse(course?.id)
else
prepaidIncludesCourse = true
unless classroomWithLevel and prepaidIncludesCourse
throw new errors.PaymentRequired('You must be in a course which includes this level to play it')
course = yield Course.findById(courseID).select('free')
unless course.get('free') or req.user.isEnrolled()
throw new errors.PaymentRequired('You must be enrolled to access this content')
lang = targetLevel.primerLanguage or classroomWithLevel.get('aceConfig')?.language
attrs.codeLanguage = lang if lang
else
requiresSubscription = level.get('requiresSubscription') or (req.user.isOnPremiumServer() and level.get('campaign') and not (level.slug in ['dungeons-of-kithgard', 'gems-in-the-deep', 'shadow-guard', 'forgetful-gemsmith', 'signs-and-portents', 'true-names']))
canPlayAnyway = _.any([
req.user.isPremium(),
level.get('adventurer'),
req.features.codePlay and codePlay.canPlay(level.get('slug'))
])
if req.query.campaign and not canPlayAnyway
# check if the campaign requesting this is game dev hoc, if so then let it work
query = {
_id: mongoose.Types.ObjectId(req.query.campaign),
type: 'hoc'
"levels.#{level.get('original')}": {$exists: true}
}
campaign = yield Campaign.count(query)
if campaign
canPlayAnyway = true
if requiresSubscription and not canPlayAnyway
throw new errors.PaymentRequired('This level requires a subscription to play')
attrs.isForClassroom = course?
session = new LevelSession(attrs)
if classroom # Potentially set intercom trigger flag on teacher
teacher = yield User.findOne({ _id: classroom.get('ownerID') })
reportLevelStarted({teacher, level})
yield session.save()
res.status(201).send(session.toObject({req: req}))
# Notes on the teacher object that the relevant intercom trigger should be activated.
reportLevelStarted = co.wrap ({teacher, level}) ->
intercom = require('../lib/intercom')
if level.get('slug') is 'wakka-maul'
yield teacher.update({ $set: { "studentMilestones.studentStartedWakkaMaul": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedWakkaMaul: true
}
if level.get('slug') is 'a-mayhem-of-munchkins'
yield teacher.update({ $set: { "studentMilestones.studentStartedMayhemOfMunchkins": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedMayhemOfMunchkins: true
}
if update
tries = 0
while tries < 100
tries += 1
try
yield intercom.users.create update
return
catch e
yield new Promise (accept, reject) -> setTimeout(accept, 1000)
log.error "Couldn't update intercom for user #{teacher.get('email')} in 100 tries"
| 170844 | mongoose = require 'mongoose'
wrap = require 'co-express'
co = require 'co'
errors = require '../commons/errors'
Level = require '../models/Level'
LevelSession = require '../models/LevelSession'
Prepaid = require '../models/Prepaid'
CourseInstance = require '../models/CourseInstance'
Classroom = require '../models/Classroom'
Campaign = require '../models/Campaign'
Course = require '../models/Course'
User = require '../models/User'
database = require '../commons/database'
codePlay = require '../../app/lib/code-play'
log = require 'winston'
module.exports =
upsertSession: wrap (req, res) ->
level = yield database.getDocFromHandle(req, Level)
if not level
throw new errors.NotFound('Level not found.')
levelOriginal = level.get('original')
sessionQuery =
level:
original: level.get('original').toString()
majorVersion: level.get('version').major
creator: req.user.id
if req.query.team?
sessionQuery.team = req.query.team
if req.query.courseInstance
unless mongoose.Types.ObjectId.isValid(req.query.courseInstance)
throw new errors.UnprocessableEntity('Invalid course instance id')
courseInstance = yield CourseInstance.findById(req.query.courseInstance)
if not courseInstance
throw new errors.NotFound('Course Instance not found.')
if not _.find(courseInstance.get('members'), (memberID) -> memberID.equals(req.user._id))
throw new errors.Forbidden('You must be a member of the Course Instance.')
classroom = yield Classroom.findById(courseInstance.get('classroomID'))
if not classroom
throw new errors.NotFound('Classroom not found.')
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
targetLevel = null
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
break
if not targetLevel
throw new errors.NotFound('Level not found in classroom courses')
language = targetLevel.primerLanguage or classroom.get('aceConfig.language')
if language
sessionQuery.codeLanguage = language
session = yield LevelSession.findOne(sessionQuery)
if session
return res.send(session.toObject({req: req}))
attrs = sessionQuery
_.extend(attrs, {
state:
complete: false
scripts:
currentScript: null # will not save empty objects
permissions: [
{target: req.user.id, access: 'owner'}
{target: 'public', access: 'write'}
]
codeLanguage: req.user.get('aceConfig')?.language ? 'python'
})
if level.get('type') in ['course', 'course-ladder'] or req.query.course?
# Find the course and classroom that has assigned this level, verify access
# Handle either being given the courseInstance, or having to deduce it
if courseInstance and classroom
courseInstances = [courseInstance]
classrooms = [classroom]
else
courseInstances = yield CourseInstance.find({members: req.user._id})
classroomIDs = (courseInstance.get('classroomID') for courseInstance in courseInstances)
classroomIDs = _.filter _.uniq classroomIDs, false, (objectID='') -> objectID.toString()
classrooms = yield Classroom.find({ _id: { $in: classroomIDs }})
classroomWithLevel = null
targetLevel = null
courseID = null
classroomMap = {}
classroomMap[classroom.id] = classroom for classroom in classrooms
for courseInstance in courseInstances
classroomID = courseInstance.get('classroomID')
continue unless classroomID
classroom = classroomMap[classroomID.toString()]
continue unless classroom
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
classroomWithLevel = classroom
break
break if classroomWithLevel
if course?.id
prepaidIncludesCourse = req.user.prepaidIncludesCourse(course?.id)
else
prepaidIncludesCourse = true
unless classroomWithLevel and prepaidIncludesCourse
throw new errors.PaymentRequired('You must be in a course which includes this level to play it')
course = yield Course.findById(courseID).select('free')
unless course.get('free') or req.user.isEnrolled()
throw new errors.PaymentRequired('You must be enrolled to access this content')
lang = targetLevel.primerLanguage or classroomWithLevel.get('aceConfig')?.language
attrs.codeLanguage = lang if lang
else
requiresSubscription = level.get('requiresSubscription') or (req.user.isOnPremiumServer() and level.get('campaign') and not (level.slug in ['dungeons-of-kithgard', 'gems-in-the-deep', 'shadow-guard', 'forgetful-gemsmith', 'signs-and-portents', 'true-names']))
canPlayAnyway = _.any([
req.user.isPremium(),
level.get('adventurer'),
req.features.codePlay and codePlay.canPlay(level.get('slug'))
])
if req.query.campaign and not canPlayAnyway
# check if the campaign requesting this is game dev hoc, if so then let it work
query = {
_id: mongoose.Types.ObjectId(req.query.campaign),
type: 'hoc'
"levels.#{level.get('original')}": {$exists: true}
}
campaign = yield Campaign.count(query)
if campaign
canPlayAnyway = true
if requiresSubscription and not canPlayAnyway
throw new errors.PaymentRequired('This level requires a subscription to play')
attrs.isForClassroom = course?
session = new LevelSession(attrs)
if classroom # Potentially set intercom trigger flag on teacher
teacher = yield User.findOne({ _id: classroom.get('ownerID') })
reportLevelStarted({teacher, level})
yield session.save()
res.status(201).send(session.toObject({req: req}))
# Notes on the teacher object that the relevant intercom trigger should be activated.
reportLevelStarted = co.wrap ({teacher, level}) ->
intercom = require('../lib/intercom')
if level.get('slug') is 'wakka-<NAME>'
yield teacher.update({ $set: { "studentMilestones.studentStartedWakkaMaul": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedWakkaMaul: true
}
if level.get('slug') is 'a-mayhem-of-munchkins'
yield teacher.update({ $set: { "studentMilestones.studentStartedMayhemOfMunchkins": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedMayhemOfMunchkins: true
}
if update
tries = 0
while tries < 100
tries += 1
try
yield intercom.users.create update
return
catch e
yield new Promise (accept, reject) -> setTimeout(accept, 1000)
log.error "Couldn't update intercom for user #{teacher.get('email')} in 100 tries"
| true | mongoose = require 'mongoose'
wrap = require 'co-express'
co = require 'co'
errors = require '../commons/errors'
Level = require '../models/Level'
LevelSession = require '../models/LevelSession'
Prepaid = require '../models/Prepaid'
CourseInstance = require '../models/CourseInstance'
Classroom = require '../models/Classroom'
Campaign = require '../models/Campaign'
Course = require '../models/Course'
User = require '../models/User'
database = require '../commons/database'
codePlay = require '../../app/lib/code-play'
log = require 'winston'
module.exports =
upsertSession: wrap (req, res) ->
level = yield database.getDocFromHandle(req, Level)
if not level
throw new errors.NotFound('Level not found.')
levelOriginal = level.get('original')
sessionQuery =
level:
original: level.get('original').toString()
majorVersion: level.get('version').major
creator: req.user.id
if req.query.team?
sessionQuery.team = req.query.team
if req.query.courseInstance
unless mongoose.Types.ObjectId.isValid(req.query.courseInstance)
throw new errors.UnprocessableEntity('Invalid course instance id')
courseInstance = yield CourseInstance.findById(req.query.courseInstance)
if not courseInstance
throw new errors.NotFound('Course Instance not found.')
if not _.find(courseInstance.get('members'), (memberID) -> memberID.equals(req.user._id))
throw new errors.Forbidden('You must be a member of the Course Instance.')
classroom = yield Classroom.findById(courseInstance.get('classroomID'))
if not classroom
throw new errors.NotFound('Classroom not found.')
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
targetLevel = null
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
break
if not targetLevel
throw new errors.NotFound('Level not found in classroom courses')
language = targetLevel.primerLanguage or classroom.get('aceConfig.language')
if language
sessionQuery.codeLanguage = language
session = yield LevelSession.findOne(sessionQuery)
if session
return res.send(session.toObject({req: req}))
attrs = sessionQuery
_.extend(attrs, {
state:
complete: false
scripts:
currentScript: null # will not save empty objects
permissions: [
{target: req.user.id, access: 'owner'}
{target: 'public', access: 'write'}
]
codeLanguage: req.user.get('aceConfig')?.language ? 'python'
})
if level.get('type') in ['course', 'course-ladder'] or req.query.course?
# Find the course and classroom that has assigned this level, verify access
# Handle either being given the courseInstance, or having to deduce it
if courseInstance and classroom
courseInstances = [courseInstance]
classrooms = [classroom]
else
courseInstances = yield CourseInstance.find({members: req.user._id})
classroomIDs = (courseInstance.get('classroomID') for courseInstance in courseInstances)
classroomIDs = _.filter _.uniq classroomIDs, false, (objectID='') -> objectID.toString()
classrooms = yield Classroom.find({ _id: { $in: classroomIDs }})
classroomWithLevel = null
targetLevel = null
courseID = null
classroomMap = {}
classroomMap[classroom.id] = classroom for classroom in classrooms
for courseInstance in courseInstances
classroomID = courseInstance.get('classroomID')
continue unless classroomID
classroom = classroomMap[classroomID.toString()]
continue unless classroom
courseID = courseInstance.get('courseID')
classroomCourse = _.find(classroom.get('courses'), (c) -> c._id.equals(courseID))
for courseLevel in classroomCourse.levels
if courseLevel.original.equals(levelOriginal)
targetLevel = courseLevel
classroomWithLevel = classroom
break
break if classroomWithLevel
if course?.id
prepaidIncludesCourse = req.user.prepaidIncludesCourse(course?.id)
else
prepaidIncludesCourse = true
unless classroomWithLevel and prepaidIncludesCourse
throw new errors.PaymentRequired('You must be in a course which includes this level to play it')
course = yield Course.findById(courseID).select('free')
unless course.get('free') or req.user.isEnrolled()
throw new errors.PaymentRequired('You must be enrolled to access this content')
lang = targetLevel.primerLanguage or classroomWithLevel.get('aceConfig')?.language
attrs.codeLanguage = lang if lang
else
requiresSubscription = level.get('requiresSubscription') or (req.user.isOnPremiumServer() and level.get('campaign') and not (level.slug in ['dungeons-of-kithgard', 'gems-in-the-deep', 'shadow-guard', 'forgetful-gemsmith', 'signs-and-portents', 'true-names']))
canPlayAnyway = _.any([
req.user.isPremium(),
level.get('adventurer'),
req.features.codePlay and codePlay.canPlay(level.get('slug'))
])
if req.query.campaign and not canPlayAnyway
# check if the campaign requesting this is game dev hoc, if so then let it work
query = {
_id: mongoose.Types.ObjectId(req.query.campaign),
type: 'hoc'
"levels.#{level.get('original')}": {$exists: true}
}
campaign = yield Campaign.count(query)
if campaign
canPlayAnyway = true
if requiresSubscription and not canPlayAnyway
throw new errors.PaymentRequired('This level requires a subscription to play')
attrs.isForClassroom = course?
session = new LevelSession(attrs)
if classroom # Potentially set intercom trigger flag on teacher
teacher = yield User.findOne({ _id: classroom.get('ownerID') })
reportLevelStarted({teacher, level})
yield session.save()
res.status(201).send(session.toObject({req: req}))
# Notes on the teacher object that the relevant intercom trigger should be activated.
reportLevelStarted = co.wrap ({teacher, level}) ->
intercom = require('../lib/intercom')
if level.get('slug') is 'wakka-PI:NAME:<NAME>END_PI'
yield teacher.update({ $set: { "studentMilestones.studentStartedWakkaMaul": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedWakkaMaul: true
}
if level.get('slug') is 'a-mayhem-of-munchkins'
yield teacher.update({ $set: { "studentMilestones.studentStartedMayhemOfMunchkins": true } })
update = {
user_id: teacher.get('_id') + '',
email: teacher.get('email'),
custom_attributes:
studentStartedMayhemOfMunchkins: true
}
if update
tries = 0
while tries < 100
tries += 1
try
yield intercom.users.create update
return
catch e
yield new Promise (accept, reject) -> setTimeout(accept, 1000)
log.error "Couldn't update intercom for user #{teacher.get('email')} in 100 tries"
|
[
{
"context": " value\n return\n\nCTypeParser::lstypes = ->\n key = undefined\n ret = {}\n for key of @types\n continue if k",
"end": 18235,
"score": 0.9323746562004089,
"start": 18226,
"tag": "KEY",
"value": "undefined"
}
] | deps/npm/node_modules/request/node_modules/http-signature/node_modules/ctype/ctype.coffee | lxe/io.coffee | 0 | #
# * rm - Feb 2011
# * ctype.js
# *
# * This module provides a simple abstraction towards reading and writing
# * different types of binary data. It is designed to use ctio.js and provide a
# * richer and more expressive API on top of it.
# *
# * By default we support the following as built in basic types:
# * int8_t
# * int16_t
# * int32_t
# * uint8_t
# * uint16_t
# * uint32_t
# * uint64_t
# * float
# * double
# * char
# * char[]
# *
# * Each type is returned as a Number, with the exception of char and char[]
# * which are returned as Node Buffers. A char is considered a uint8_t.
# *
# * Requests to read and write data are specified as an array of JSON objects.
# * This is also the same way that one declares structs. Even if just a single
# * value is requested, it must be done as a struct. The array order determines
# * the order that we try and read values. Each entry has the following format
# * with values marked with a * being optional.
# *
# * { key: { type: /type/, value*: /value/, offset*: /offset/ }
# *
# * If offset is defined, we lseek(offset, SEEK_SET) before reading the next
# * value. Value is defined when we're writing out data, otherwise it's ignored.
# *
#
#
# * This is the set of basic types that we support.
# *
# * read The function to call to read in a value from a buffer
# *
# * write The function to call to write a value to a buffer
# *
#
#
# * The following are wrappers around the CType IO low level API. They encode
# * knowledge about the size and return something in the expected format.
#
ctReadUint8 = (endian, buffer, offset) ->
val = mod_ctio.ruint8(buffer, endian, offset)
value: val
size: 1
ctReadUint16 = (endian, buffer, offset) ->
val = mod_ctio.ruint16(buffer, endian, offset)
value: val
size: 2
ctReadUint32 = (endian, buffer, offset) ->
val = mod_ctio.ruint32(buffer, endian, offset)
value: val
size: 4
ctReadUint64 = (endian, buffer, offset) ->
val = mod_ctio.ruint64(buffer, endian, offset)
value: val
size: 8
ctReadSint8 = (endian, buffer, offset) ->
val = mod_ctio.rsint8(buffer, endian, offset)
value: val
size: 1
ctReadSint16 = (endian, buffer, offset) ->
val = mod_ctio.rsint16(buffer, endian, offset)
value: val
size: 2
ctReadSint32 = (endian, buffer, offset) ->
val = mod_ctio.rsint32(buffer, endian, offset)
value: val
size: 4
ctReadSint64 = (endian, buffer, offset) ->
val = mod_ctio.rsint64(buffer, endian, offset)
value: val
size: 8
ctReadFloat = (endian, buffer, offset) ->
val = mod_ctio.rfloat(buffer, endian, offset)
value: val
size: 4
ctReadDouble = (endian, buffer, offset) ->
val = mod_ctio.rdouble(buffer, endian, offset)
value: val
size: 8
#
# * Reads a single character into a node buffer
#
ctReadChar = (endian, buffer, offset) ->
res = new Buffer(1)
res[0] = mod_ctio.ruint8(buffer, endian, offset)
value: res
size: 1
ctReadCharArray = (length, endian, buffer, offset) ->
ii = undefined
res = new Buffer(length)
ii = 0
while ii < length
res[ii] = mod_ctio.ruint8(buffer, endian, offset + ii)
ii++
value: res
size: length
ctWriteUint8 = (value, endian, buffer, offset) ->
mod_ctio.wuint8 value, endian, buffer, offset
1
ctWriteUint16 = (value, endian, buffer, offset) ->
mod_ctio.wuint16 value, endian, buffer, offset
2
ctWriteUint32 = (value, endian, buffer, offset) ->
mod_ctio.wuint32 value, endian, buffer, offset
4
ctWriteUint64 = (value, endian, buffer, offset) ->
mod_ctio.wuint64 value, endian, buffer, offset
8
ctWriteSint8 = (value, endian, buffer, offset) ->
mod_ctio.wsint8 value, endian, buffer, offset
1
ctWriteSint16 = (value, endian, buffer, offset) ->
mod_ctio.wsint16 value, endian, buffer, offset
2
ctWriteSint32 = (value, endian, buffer, offset) ->
mod_ctio.wsint32 value, endian, buffer, offset
4
ctWriteSint64 = (value, endian, buffer, offset) ->
mod_ctio.wsint64 value, endian, buffer, offset
8
ctWriteFloat = (value, endian, buffer, offset) ->
mod_ctio.wfloat value, endian, buffer, offset
4
ctWriteDouble = (value, endian, buffer, offset) ->
mod_ctio.wdouble value, endian, buffer, offset
8
#
# * Writes a single character into a node buffer
#
ctWriteChar = (value, endian, buffer, offset) ->
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
mod_ctio.ruint8 value[0], endian, buffer, offset
1
#
# * We're going to write 0s into the buffer if the string is shorter than the
# * length of the array.
#
ctWriteCharArray = (value, length, endian, buffer, offset) ->
ii = undefined
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
throw (new Error("value length greater than array length")) if value.length > length
ii = 0
while ii < value.length and ii < length
mod_ctio.wuint8 value[ii], endian, buffer, offset + ii
ii++
while ii < length
mod_ctio.wuint8 0, endian, offset + ii
ii++
length
#
# * Each parser has their own set of types. We want to make sure that they each
# * get their own copy as they may need to modify it.
#
ctGetBasicTypes = ->
ret = {}
key = undefined
for key of deftypes
ret[key] = deftypes[key]
ret
#
# * Given a string in the form of type[length] we want to split this into an
# * object that extracts that information. We want to note that we could possibly
# * have nested arrays so this should only check the furthest one. It may also be
# * the case that we have no [] pieces, in which case we just return the current
# * type.
#
ctParseType = (str) ->
begInd = undefined
endInd = undefined
type = undefined
len = undefined
throw (new Error("type must be a Javascript string")) unless typeof (str) is "string"
endInd = str.lastIndexOf("]")
if endInd is -1
throw (new Error("found invalid type with '[' but " + "no corresponding ']'")) unless str.lastIndexOf("[") is -1
return (type: str)
begInd = str.lastIndexOf("[")
throw (new Error("found invalid type with ']' but " + "no corresponding '['")) if begInd is -1
throw (new Error("malformed type, ']' appears before '['")) if begInd >= endInd
type = str.substring(0, begInd)
len = str.substring(begInd + 1, endInd)
type: type
len: len
#
# * Given a request validate that all of the fields for it are valid and make
# * sense. This includes verifying the following notions:
# * - Each type requested is present in types
# * - Only allow a name for a field to be specified once
# * - If an array is specified, validate that the requested field exists and
# * comes before it.
# * - If fields is defined, check that each entry has the occurrence of field
#
ctCheckReq = (def, types, fields) ->
ii = undefined
jj = undefined
req = undefined
keys = undefined
key = undefined
found = {}
throw (new Error("definition is not an array")) unless def instanceof Array
throw (new Error("definition must have at least one element")) if def.length is 0
ii = 0
while ii < def.length
req = def[ii]
throw (new Error("definition must be an array of" + "objects")) unless req instanceof Object
keys = Object.keys(req)
throw (new Error("definition entry must only have " + "one key")) unless keys.length is 1
throw (new Error("Specified name already " + "specified: " + keys[0])) if keys[0] of found
throw (new Error("missing required type definition")) unless "type" of req[keys[0]]
key = ctParseType(req[keys[0]]["type"])
#
# * We may have nested arrays, we need to check the validity of
# * the types until the len field is undefined in key. However,
# * each time len is defined we need to verify it is either an
# * integer or corresponds to an already seen key.
#
while key["len"] isnt `undefined`
throw (new Error("Given an array " + "length without a matching type")) unless key["len"] of found if isNaN(parseInt(key["len"], 10))
key = ctParseType(key["type"])
# Now we can validate if the type is valid
throw (new Error("type not found or typdefed: " + key["type"])) unless key["type"] of types
# Check for any required fields
if fields isnt `undefined`
jj = 0
while jj < fields.length
throw (new Error("Missing required " + "field: " + fields[jj])) unless fields[jj] of req[keys[0]]
jj++
found[keys[0]] = true
ii++
return
#
# * Create a new instance of the parser. Each parser has its own store of
# * typedefs and endianness. Conf is an object with the following required
# * values:
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
# * And the following optional values:
# *
# * char-type Valid options here are uint8 and int8. If uint8 is
# * specified this changes the default behavior of a single
# * char from being a buffer of a single character to being
# * a uint8_t. If int8, it becomes an int8_t instead.
#
CTypeParser = (conf) ->
throw (new Error("missing required argument")) unless conf
throw (new Error("missing required endian value")) unless "endian" of conf
throw (new Error("Invalid endian type")) if conf["endian"] isnt "big" and conf["endian"] isnt "little"
throw (new Error("invalid option for char-type: " + conf["char-type"])) if "char-type" of conf and (conf["char-type"] isnt "uint8" and conf["char-type"] isnt "int8")
@endian = conf["endian"]
@types = ctGetBasicTypes()
#
# * There may be a more graceful way to do this, but this will have to
# * serve.
#
@types["char"] = @types["uint8_t"] if "char-type" of conf and conf["char-type"] is "uint8"
@types["char"] = @types["int8_t"] if "char-type" of conf and conf["char-type"] is "int8"
return
#
# * Sets the current endian value for the Parser. If the value is not valid,
# * throws an Error.
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
#
#
# * Returns the current value of the endian value for the parser.
#
#
# * A user has requested to add a type, let us honor their request. Yet, if their
# * request doth spurn us, send them unto the Hells which Dante describes.
# *
# * name The string for the type definition we're adding
# *
# * value Either a string that is a type/array name or an object
# * that describes a struct.
#
# We have a struct, validate it
#
# * Include all of the typedefs, but none of the built in types. This should be
# * treated as read-only.
#
#
# * Given a type string that may have array types that aren't numbers, try and
# * fill them in from the values object. The object should be of the format where
# * indexing into it should return a number for that type.
# *
# * str The type string
# *
# * values An object that can be used to fulfill type information
#
ctResolveArray = (str, values) ->
ret = ""
type = ctParseType(str)
while type["len"] isnt `undefined`
if isNaN(parseInt(type["len"], 10))
throw (new Error("cannot sawp in non-number " + "for array value")) unless typeof (values[type["len"]]) is "number"
ret = "[" + values[type["len"]] + "]" + ret
else
ret = "[" + type["len"] + "]" + ret
type = ctParseType(type["type"])
ret = type["type"] + ret
ret
#
# * [private] Either the typedef resolves to another type string or to a struct.
# * If it resolves to a struct, we just pass it off to read struct. If not, we
# * can just pass it off to read entry.
#
#
# * [private] Try and read in the specific entry.
#
#
# * Because we want to special case char[]s this is unfortunately
# * a bit uglier than it really should be. We want to special
# * case char[]s so that we return a node buffer, thus they are a
# * first class type where as all other arrays just call into a
# * generic array routine which calls their data-specific routine
# * the specified number of times.
# *
# * The valid dispatch options we have are:
# * - Array and char => char[] handler
# * - Generic array handler
# * - Generic typedef handler
# * - Basic type handler
#
#
# * [private] Read an array of data
#
#
# * [private] Read a single struct in.
#
# Walk it and handle doing what's necessary
# Resolve all array values
#
# * This is what we were born to do. We read the data from a buffer and return it
# * in an object whose keys match the values from the object.
# *
# * def The array definition of the data to read in
# *
# * buffer The buffer to read data from
# *
# * offset The offset to start writing to
# *
# * Returns an object where each key corresponds to an entry in def and the value
# * is the read value.
#
# Sanity check for arguments
# Sanity check the object definition
#
# * [private] Write out an array of data
#
#
# * [private] Write the specific entry
#
#
# * [private] Write a single struct out.
#
# Now that we've written it out, we can use it for arrays
#
# * Unfortunately, we're stuck with the sins of an initial poor design. Because
# * of that, we are going to have to support the old way of writing data via
# * writeData. There we insert the values that you want to write into the
# * definition. A little baroque. Internally, we use the new model. So we need to
# * just get those values out of there. But to maintain the principle of least
# * surprise, we're not going to modify the input data.
#
getValues = (def) ->
ii = undefined
out = undefined
key = undefined
out = []
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
mod_assert.ok "value" of def[ii][key]
out.push def[ii][key]["value"]
ii++
out
#
# * This is the second half of what we were born to do, write out the data
# * itself. Historically this function required you to put your values in the
# * definition section. This was not the smartest thing to do and a bit of an
# * oversight to be honest. As such, this function now takes a values argument.
# * If values is non-null and non-undefined, it will be used to determine the
# * values. This means that the old method is still supported, but is no longer
# * acceptable.
# *
# * def The array definition of the data to write out with
# * values
# *
# * buffer The buffer to write to
# *
# * offset The offset in the buffer to write to
# *
# * values An array of values to write.
#
#
# * Functions to go to and from 64 bit numbers in a way that is compatible with
# * Javascript limitations. There are two sets. One where the user is okay with
# * an approximation and one where they are definitely not okay with an
# * approximation.
#
#
# * Attempts to convert an array of two integers returned from rsint64 / ruint64
# * into an absolute 64 bit number. If however the value would exceed 2^52 this
# * will instead throw an error. The mantissa in a double is a 52 bit number and
# * rather than potentially give you a value that is an approximation this will
# * error. If you would rather an approximation, please see toApprox64.
# *
# * val An array of two 32-bit integers
#
toAbs64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
# We have 20 bits worth of precision in this range
throw (new Error("value would become approximated")) if val[0] >= 0x100000
val[0] * Math.pow(2, 32) + val[1]
#
# * Will return the 64 bit value as returned in an array from rsint64 / ruint64
# * to a value as close as it can. Note that Javascript stores all numbers as a
# * double and the mantissa only has 52 bits. Thus this version may approximate
# * the value.
# *
# * val An array of two 32-bit integers
#
toApprox64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
Math.pow(2, 32) * val[0] + val[1]
parseCTF = (json, conf) ->
ctype = new CTypeParser(conf)
mod_ctf.ctfParseJson json, ctype
ctype
mod_ctf = require("./ctf.js")
mod_ctio = require("./ctio.js")
mod_assert = require("assert")
deftypes =
uint8_t:
read: ctReadUint8
write: ctWriteUint8
uint16_t:
read: ctReadUint16
write: ctWriteUint16
uint32_t:
read: ctReadUint32
write: ctWriteUint32
uint64_t:
read: ctReadUint64
write: ctWriteUint64
int8_t:
read: ctReadSint8
write: ctWriteSint8
int16_t:
read: ctReadSint16
write: ctWriteSint16
int32_t:
read: ctReadSint32
write: ctWriteSint32
int64_t:
read: ctReadSint64
write: ctWriteSint64
float:
read: ctReadFloat
write: ctWriteFloat
double:
read: ctReadDouble
write: ctWriteDouble
char:
read: ctReadChar
write: ctWriteChar
"char[]":
read: ctReadCharArray
write: ctWriteCharArray
CTypeParser::setEndian = (endian) ->
throw (new Error("invalid endian type, must be big or " + "little")) if endian isnt "big" and endian isnt "little"
@endian = endian
return
CTypeParser::getEndian = ->
@endian
CTypeParser::typedef = (name, value) ->
type = undefined
throw (new (Error("missing required typedef argument: name"))) if name is `undefined`
throw (new (Error("missing required typedef argument: value"))) if value is `undefined`
throw (new (Error("the name of a type must be a string"))) unless typeof (name) is "string"
type = ctParseType(name)
throw (new Error("Cannot have an array in the typedef name")) if type["len"] isnt `undefined`
throw (new Error("typedef name already present: " + name)) if name of @types
throw (new Error("typedef value must either be a string or " + "struct")) if typeof (value) isnt "string" and (value not instanceof Array)
if typeof (value) is "string"
type = ctParseType(value)
throw (new (Error("typedef value must use " + "fixed size array when outside of a " + "struct"))) if isNaN(parseInt(type["len"], 10)) if type["len"] isnt `undefined`
@types[name] = value
else
ctCheckReq value, @types
@types[name] = value
return
CTypeParser::lstypes = ->
key = undefined
ret = {}
for key of @types
continue if key of deftypes
ret[key] = @types[key]
ret
CTypeParser::resolveTypedef = (type, dispatch, buffer, offset, value) ->
pt = undefined
mod_assert.ok type of @types
if typeof (@types[type]) is "string"
pt = ctParseType(@types[type])
if dispatch is "read"
@readEntry pt, buffer, offset
else if dispatch is "write"
@writeEntry value, pt, buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
else
if dispatch is "read"
@readStruct @types[type], buffer, offset
else if dispatch is "write"
@writeStruct value, @types[type], buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
return
CTypeParser::readEntry = (type, buffer, offset) ->
parse = undefined
len = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
parse = @types["char[]"]["read"](len, @endian, buffer, offset)
else
parse = @readArray(type["type"], len, buffer, offset)
else
if type["type"] of deftypes
parse = @types[type["type"]]["read"](@endian, buffer, offset)
else
parse = @resolveTypedef(type["type"], "read", buffer, offset)
parse
CTypeParser::readArray = (type, length, buffer, offset) ->
ii = undefined
ent = undefined
pt = undefined
baseOffset = offset
ret = new Array(length)
pt = ctParseType(type)
ii = 0
while ii < length
ent = @readEntry(pt, buffer, offset)
offset += ent["size"]
ret[ii] = ent["value"]
ii++
value: ret
size: offset - baseOffset
CTypeParser::readStruct = (def, buffer, offset) ->
parse = undefined
ii = undefined
type = undefined
entry = undefined
key = undefined
baseOffset = offset
ret = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], ret))
offset = baseOffset + entry["offset"] if "offset" of entry
parse = @readEntry(type, buffer, offset)
offset += parse["size"]
ret[key] = parse["value"]
ii++
value: ret
size: (offset - baseOffset)
CTypeParser::readData = (def, buffer, offset) ->
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
ctCheckReq def, @types
@readStruct(def, buffer, offset)["value"]
CTypeParser::writeArray = (value, type, length, buffer, offset) ->
ii = undefined
pt = undefined
baseOffset = offset
throw (new Error("asked to write an array, but value is not " + "an array")) unless value instanceof Array
throw (new Error("asked to write array of length " + length + " but that does not match value length: " + value.length)) unless value.length is length
pt = ctParseType(type)
ii = 0
while ii < length
offset += @writeEntry(value[ii], pt, buffer, offset)
ii++
offset - baseOffset
CTypeParser::writeEntry = (value, type, buffer, offset) ->
len = undefined
ret = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
ret = @types["char[]"]["write"](value, len, @endian, buffer, offset)
else
ret = @writeArray(value, type["type"], len, buffer, offset)
else
if type["type"] of deftypes
ret = @types[type["type"]]["write"](value, @endian, buffer, offset)
else
ret = @resolveTypedef(type["type"], "write", buffer, offset, value)
ret
CTypeParser::writeStruct = (value, def, buffer, offset) ->
ii = undefined
entry = undefined
type = undefined
key = undefined
baseOffset = offset
vals = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], vals))
offset = baseOffset + entry["offset"] if "offset" of entry
offset += @writeEntry(value[ii], type, buffer, offset)
vals[key] = value[ii]
ii++
offset
CTypeParser::writeData = (def, buffer, offset, values) ->
hv = undefined
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
hv = (values? and values?)
if hv
throw (new Error("missing values for writing")) unless Array.isArray(values)
ctCheckReq def, @types
else
ctCheckReq def, @types, ["value"]
@writeStruct (if hv then values else getValues(def)), def, buffer, offset
return
#
# * Export the few things we actually want to. Currently this is just the CType
# * Parser and ctio.
#
exports.Parser = CTypeParser
exports.toAbs64 = toAbs64
exports.toApprox64 = toApprox64
exports.parseCTF = parseCTF
exports.ruint8 = mod_ctio.ruint8
exports.ruint16 = mod_ctio.ruint16
exports.ruint32 = mod_ctio.ruint32
exports.ruint64 = mod_ctio.ruint64
exports.wuint8 = mod_ctio.wuint8
exports.wuint16 = mod_ctio.wuint16
exports.wuint32 = mod_ctio.wuint32
exports.wuint64 = mod_ctio.wuint64
exports.rsint8 = mod_ctio.rsint8
exports.rsint16 = mod_ctio.rsint16
exports.rsint32 = mod_ctio.rsint32
exports.rsint64 = mod_ctio.rsint64
exports.wsint8 = mod_ctio.wsint8
exports.wsint16 = mod_ctio.wsint16
exports.wsint32 = mod_ctio.wsint32
exports.wsint64 = mod_ctio.wsint64
exports.rfloat = mod_ctio.rfloat
exports.rdouble = mod_ctio.rdouble
exports.wfloat = mod_ctio.wfloat
exports.wdouble = mod_ctio.wdouble
| 161008 | #
# * rm - Feb 2011
# * ctype.js
# *
# * This module provides a simple abstraction towards reading and writing
# * different types of binary data. It is designed to use ctio.js and provide a
# * richer and more expressive API on top of it.
# *
# * By default we support the following as built in basic types:
# * int8_t
# * int16_t
# * int32_t
# * uint8_t
# * uint16_t
# * uint32_t
# * uint64_t
# * float
# * double
# * char
# * char[]
# *
# * Each type is returned as a Number, with the exception of char and char[]
# * which are returned as Node Buffers. A char is considered a uint8_t.
# *
# * Requests to read and write data are specified as an array of JSON objects.
# * This is also the same way that one declares structs. Even if just a single
# * value is requested, it must be done as a struct. The array order determines
# * the order that we try and read values. Each entry has the following format
# * with values marked with a * being optional.
# *
# * { key: { type: /type/, value*: /value/, offset*: /offset/ }
# *
# * If offset is defined, we lseek(offset, SEEK_SET) before reading the next
# * value. Value is defined when we're writing out data, otherwise it's ignored.
# *
#
#
# * This is the set of basic types that we support.
# *
# * read The function to call to read in a value from a buffer
# *
# * write The function to call to write a value to a buffer
# *
#
#
# * The following are wrappers around the CType IO low level API. They encode
# * knowledge about the size and return something in the expected format.
#
ctReadUint8 = (endian, buffer, offset) ->
val = mod_ctio.ruint8(buffer, endian, offset)
value: val
size: 1
ctReadUint16 = (endian, buffer, offset) ->
val = mod_ctio.ruint16(buffer, endian, offset)
value: val
size: 2
ctReadUint32 = (endian, buffer, offset) ->
val = mod_ctio.ruint32(buffer, endian, offset)
value: val
size: 4
ctReadUint64 = (endian, buffer, offset) ->
val = mod_ctio.ruint64(buffer, endian, offset)
value: val
size: 8
ctReadSint8 = (endian, buffer, offset) ->
val = mod_ctio.rsint8(buffer, endian, offset)
value: val
size: 1
ctReadSint16 = (endian, buffer, offset) ->
val = mod_ctio.rsint16(buffer, endian, offset)
value: val
size: 2
ctReadSint32 = (endian, buffer, offset) ->
val = mod_ctio.rsint32(buffer, endian, offset)
value: val
size: 4
ctReadSint64 = (endian, buffer, offset) ->
val = mod_ctio.rsint64(buffer, endian, offset)
value: val
size: 8
ctReadFloat = (endian, buffer, offset) ->
val = mod_ctio.rfloat(buffer, endian, offset)
value: val
size: 4
ctReadDouble = (endian, buffer, offset) ->
val = mod_ctio.rdouble(buffer, endian, offset)
value: val
size: 8
#
# * Reads a single character into a node buffer
#
ctReadChar = (endian, buffer, offset) ->
res = new Buffer(1)
res[0] = mod_ctio.ruint8(buffer, endian, offset)
value: res
size: 1
ctReadCharArray = (length, endian, buffer, offset) ->
ii = undefined
res = new Buffer(length)
ii = 0
while ii < length
res[ii] = mod_ctio.ruint8(buffer, endian, offset + ii)
ii++
value: res
size: length
ctWriteUint8 = (value, endian, buffer, offset) ->
mod_ctio.wuint8 value, endian, buffer, offset
1
ctWriteUint16 = (value, endian, buffer, offset) ->
mod_ctio.wuint16 value, endian, buffer, offset
2
ctWriteUint32 = (value, endian, buffer, offset) ->
mod_ctio.wuint32 value, endian, buffer, offset
4
ctWriteUint64 = (value, endian, buffer, offset) ->
mod_ctio.wuint64 value, endian, buffer, offset
8
ctWriteSint8 = (value, endian, buffer, offset) ->
mod_ctio.wsint8 value, endian, buffer, offset
1
ctWriteSint16 = (value, endian, buffer, offset) ->
mod_ctio.wsint16 value, endian, buffer, offset
2
ctWriteSint32 = (value, endian, buffer, offset) ->
mod_ctio.wsint32 value, endian, buffer, offset
4
ctWriteSint64 = (value, endian, buffer, offset) ->
mod_ctio.wsint64 value, endian, buffer, offset
8
ctWriteFloat = (value, endian, buffer, offset) ->
mod_ctio.wfloat value, endian, buffer, offset
4
ctWriteDouble = (value, endian, buffer, offset) ->
mod_ctio.wdouble value, endian, buffer, offset
8
#
# * Writes a single character into a node buffer
#
ctWriteChar = (value, endian, buffer, offset) ->
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
mod_ctio.ruint8 value[0], endian, buffer, offset
1
#
# * We're going to write 0s into the buffer if the string is shorter than the
# * length of the array.
#
ctWriteCharArray = (value, length, endian, buffer, offset) ->
ii = undefined
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
throw (new Error("value length greater than array length")) if value.length > length
ii = 0
while ii < value.length and ii < length
mod_ctio.wuint8 value[ii], endian, buffer, offset + ii
ii++
while ii < length
mod_ctio.wuint8 0, endian, offset + ii
ii++
length
#
# * Each parser has their own set of types. We want to make sure that they each
# * get their own copy as they may need to modify it.
#
ctGetBasicTypes = ->
ret = {}
key = undefined
for key of deftypes
ret[key] = deftypes[key]
ret
#
# * Given a string in the form of type[length] we want to split this into an
# * object that extracts that information. We want to note that we could possibly
# * have nested arrays so this should only check the furthest one. It may also be
# * the case that we have no [] pieces, in which case we just return the current
# * type.
#
ctParseType = (str) ->
begInd = undefined
endInd = undefined
type = undefined
len = undefined
throw (new Error("type must be a Javascript string")) unless typeof (str) is "string"
endInd = str.lastIndexOf("]")
if endInd is -1
throw (new Error("found invalid type with '[' but " + "no corresponding ']'")) unless str.lastIndexOf("[") is -1
return (type: str)
begInd = str.lastIndexOf("[")
throw (new Error("found invalid type with ']' but " + "no corresponding '['")) if begInd is -1
throw (new Error("malformed type, ']' appears before '['")) if begInd >= endInd
type = str.substring(0, begInd)
len = str.substring(begInd + 1, endInd)
type: type
len: len
#
# * Given a request validate that all of the fields for it are valid and make
# * sense. This includes verifying the following notions:
# * - Each type requested is present in types
# * - Only allow a name for a field to be specified once
# * - If an array is specified, validate that the requested field exists and
# * comes before it.
# * - If fields is defined, check that each entry has the occurrence of field
#
ctCheckReq = (def, types, fields) ->
ii = undefined
jj = undefined
req = undefined
keys = undefined
key = undefined
found = {}
throw (new Error("definition is not an array")) unless def instanceof Array
throw (new Error("definition must have at least one element")) if def.length is 0
ii = 0
while ii < def.length
req = def[ii]
throw (new Error("definition must be an array of" + "objects")) unless req instanceof Object
keys = Object.keys(req)
throw (new Error("definition entry must only have " + "one key")) unless keys.length is 1
throw (new Error("Specified name already " + "specified: " + keys[0])) if keys[0] of found
throw (new Error("missing required type definition")) unless "type" of req[keys[0]]
key = ctParseType(req[keys[0]]["type"])
#
# * We may have nested arrays, we need to check the validity of
# * the types until the len field is undefined in key. However,
# * each time len is defined we need to verify it is either an
# * integer or corresponds to an already seen key.
#
while key["len"] isnt `undefined`
throw (new Error("Given an array " + "length without a matching type")) unless key["len"] of found if isNaN(parseInt(key["len"], 10))
key = ctParseType(key["type"])
# Now we can validate if the type is valid
throw (new Error("type not found or typdefed: " + key["type"])) unless key["type"] of types
# Check for any required fields
if fields isnt `undefined`
jj = 0
while jj < fields.length
throw (new Error("Missing required " + "field: " + fields[jj])) unless fields[jj] of req[keys[0]]
jj++
found[keys[0]] = true
ii++
return
#
# * Create a new instance of the parser. Each parser has its own store of
# * typedefs and endianness. Conf is an object with the following required
# * values:
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
# * And the following optional values:
# *
# * char-type Valid options here are uint8 and int8. If uint8 is
# * specified this changes the default behavior of a single
# * char from being a buffer of a single character to being
# * a uint8_t. If int8, it becomes an int8_t instead.
#
CTypeParser = (conf) ->
throw (new Error("missing required argument")) unless conf
throw (new Error("missing required endian value")) unless "endian" of conf
throw (new Error("Invalid endian type")) if conf["endian"] isnt "big" and conf["endian"] isnt "little"
throw (new Error("invalid option for char-type: " + conf["char-type"])) if "char-type" of conf and (conf["char-type"] isnt "uint8" and conf["char-type"] isnt "int8")
@endian = conf["endian"]
@types = ctGetBasicTypes()
#
# * There may be a more graceful way to do this, but this will have to
# * serve.
#
@types["char"] = @types["uint8_t"] if "char-type" of conf and conf["char-type"] is "uint8"
@types["char"] = @types["int8_t"] if "char-type" of conf and conf["char-type"] is "int8"
return
#
# * Sets the current endian value for the Parser. If the value is not valid,
# * throws an Error.
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
#
#
# * Returns the current value of the endian value for the parser.
#
#
# * A user has requested to add a type, let us honor their request. Yet, if their
# * request doth spurn us, send them unto the Hells which Dante describes.
# *
# * name The string for the type definition we're adding
# *
# * value Either a string that is a type/array name or an object
# * that describes a struct.
#
# We have a struct, validate it
#
# * Include all of the typedefs, but none of the built in types. This should be
# * treated as read-only.
#
#
# * Given a type string that may have array types that aren't numbers, try and
# * fill them in from the values object. The object should be of the format where
# * indexing into it should return a number for that type.
# *
# * str The type string
# *
# * values An object that can be used to fulfill type information
#
ctResolveArray = (str, values) ->
ret = ""
type = ctParseType(str)
while type["len"] isnt `undefined`
if isNaN(parseInt(type["len"], 10))
throw (new Error("cannot sawp in non-number " + "for array value")) unless typeof (values[type["len"]]) is "number"
ret = "[" + values[type["len"]] + "]" + ret
else
ret = "[" + type["len"] + "]" + ret
type = ctParseType(type["type"])
ret = type["type"] + ret
ret
#
# * [private] Either the typedef resolves to another type string or to a struct.
# * If it resolves to a struct, we just pass it off to read struct. If not, we
# * can just pass it off to read entry.
#
#
# * [private] Try and read in the specific entry.
#
#
# * Because we want to special case char[]s this is unfortunately
# * a bit uglier than it really should be. We want to special
# * case char[]s so that we return a node buffer, thus they are a
# * first class type where as all other arrays just call into a
# * generic array routine which calls their data-specific routine
# * the specified number of times.
# *
# * The valid dispatch options we have are:
# * - Array and char => char[] handler
# * - Generic array handler
# * - Generic typedef handler
# * - Basic type handler
#
#
# * [private] Read an array of data
#
#
# * [private] Read a single struct in.
#
# Walk it and handle doing what's necessary
# Resolve all array values
#
# * This is what we were born to do. We read the data from a buffer and return it
# * in an object whose keys match the values from the object.
# *
# * def The array definition of the data to read in
# *
# * buffer The buffer to read data from
# *
# * offset The offset to start writing to
# *
# * Returns an object where each key corresponds to an entry in def and the value
# * is the read value.
#
# Sanity check for arguments
# Sanity check the object definition
#
# * [private] Write out an array of data
#
#
# * [private] Write the specific entry
#
#
# * [private] Write a single struct out.
#
# Now that we've written it out, we can use it for arrays
#
# * Unfortunately, we're stuck with the sins of an initial poor design. Because
# * of that, we are going to have to support the old way of writing data via
# * writeData. There we insert the values that you want to write into the
# * definition. A little baroque. Internally, we use the new model. So we need to
# * just get those values out of there. But to maintain the principle of least
# * surprise, we're not going to modify the input data.
#
getValues = (def) ->
ii = undefined
out = undefined
key = undefined
out = []
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
mod_assert.ok "value" of def[ii][key]
out.push def[ii][key]["value"]
ii++
out
#
# * This is the second half of what we were born to do, write out the data
# * itself. Historically this function required you to put your values in the
# * definition section. This was not the smartest thing to do and a bit of an
# * oversight to be honest. As such, this function now takes a values argument.
# * If values is non-null and non-undefined, it will be used to determine the
# * values. This means that the old method is still supported, but is no longer
# * acceptable.
# *
# * def The array definition of the data to write out with
# * values
# *
# * buffer The buffer to write to
# *
# * offset The offset in the buffer to write to
# *
# * values An array of values to write.
#
#
# * Functions to go to and from 64 bit numbers in a way that is compatible with
# * Javascript limitations. There are two sets. One where the user is okay with
# * an approximation and one where they are definitely not okay with an
# * approximation.
#
#
# * Attempts to convert an array of two integers returned from rsint64 / ruint64
# * into an absolute 64 bit number. If however the value would exceed 2^52 this
# * will instead throw an error. The mantissa in a double is a 52 bit number and
# * rather than potentially give you a value that is an approximation this will
# * error. If you would rather an approximation, please see toApprox64.
# *
# * val An array of two 32-bit integers
#
toAbs64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
# We have 20 bits worth of precision in this range
throw (new Error("value would become approximated")) if val[0] >= 0x100000
val[0] * Math.pow(2, 32) + val[1]
#
# * Will return the 64 bit value as returned in an array from rsint64 / ruint64
# * to a value as close as it can. Note that Javascript stores all numbers as a
# * double and the mantissa only has 52 bits. Thus this version may approximate
# * the value.
# *
# * val An array of two 32-bit integers
#
toApprox64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
Math.pow(2, 32) * val[0] + val[1]
parseCTF = (json, conf) ->
ctype = new CTypeParser(conf)
mod_ctf.ctfParseJson json, ctype
ctype
mod_ctf = require("./ctf.js")
mod_ctio = require("./ctio.js")
mod_assert = require("assert")
deftypes =
uint8_t:
read: ctReadUint8
write: ctWriteUint8
uint16_t:
read: ctReadUint16
write: ctWriteUint16
uint32_t:
read: ctReadUint32
write: ctWriteUint32
uint64_t:
read: ctReadUint64
write: ctWriteUint64
int8_t:
read: ctReadSint8
write: ctWriteSint8
int16_t:
read: ctReadSint16
write: ctWriteSint16
int32_t:
read: ctReadSint32
write: ctWriteSint32
int64_t:
read: ctReadSint64
write: ctWriteSint64
float:
read: ctReadFloat
write: ctWriteFloat
double:
read: ctReadDouble
write: ctWriteDouble
char:
read: ctReadChar
write: ctWriteChar
"char[]":
read: ctReadCharArray
write: ctWriteCharArray
CTypeParser::setEndian = (endian) ->
throw (new Error("invalid endian type, must be big or " + "little")) if endian isnt "big" and endian isnt "little"
@endian = endian
return
CTypeParser::getEndian = ->
@endian
CTypeParser::typedef = (name, value) ->
type = undefined
throw (new (Error("missing required typedef argument: name"))) if name is `undefined`
throw (new (Error("missing required typedef argument: value"))) if value is `undefined`
throw (new (Error("the name of a type must be a string"))) unless typeof (name) is "string"
type = ctParseType(name)
throw (new Error("Cannot have an array in the typedef name")) if type["len"] isnt `undefined`
throw (new Error("typedef name already present: " + name)) if name of @types
throw (new Error("typedef value must either be a string or " + "struct")) if typeof (value) isnt "string" and (value not instanceof Array)
if typeof (value) is "string"
type = ctParseType(value)
throw (new (Error("typedef value must use " + "fixed size array when outside of a " + "struct"))) if isNaN(parseInt(type["len"], 10)) if type["len"] isnt `undefined`
@types[name] = value
else
ctCheckReq value, @types
@types[name] = value
return
CTypeParser::lstypes = ->
key = <KEY>
ret = {}
for key of @types
continue if key of deftypes
ret[key] = @types[key]
ret
CTypeParser::resolveTypedef = (type, dispatch, buffer, offset, value) ->
pt = undefined
mod_assert.ok type of @types
if typeof (@types[type]) is "string"
pt = ctParseType(@types[type])
if dispatch is "read"
@readEntry pt, buffer, offset
else if dispatch is "write"
@writeEntry value, pt, buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
else
if dispatch is "read"
@readStruct @types[type], buffer, offset
else if dispatch is "write"
@writeStruct value, @types[type], buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
return
CTypeParser::readEntry = (type, buffer, offset) ->
parse = undefined
len = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
parse = @types["char[]"]["read"](len, @endian, buffer, offset)
else
parse = @readArray(type["type"], len, buffer, offset)
else
if type["type"] of deftypes
parse = @types[type["type"]]["read"](@endian, buffer, offset)
else
parse = @resolveTypedef(type["type"], "read", buffer, offset)
parse
CTypeParser::readArray = (type, length, buffer, offset) ->
ii = undefined
ent = undefined
pt = undefined
baseOffset = offset
ret = new Array(length)
pt = ctParseType(type)
ii = 0
while ii < length
ent = @readEntry(pt, buffer, offset)
offset += ent["size"]
ret[ii] = ent["value"]
ii++
value: ret
size: offset - baseOffset
CTypeParser::readStruct = (def, buffer, offset) ->
parse = undefined
ii = undefined
type = undefined
entry = undefined
key = undefined
baseOffset = offset
ret = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], ret))
offset = baseOffset + entry["offset"] if "offset" of entry
parse = @readEntry(type, buffer, offset)
offset += parse["size"]
ret[key] = parse["value"]
ii++
value: ret
size: (offset - baseOffset)
CTypeParser::readData = (def, buffer, offset) ->
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
ctCheckReq def, @types
@readStruct(def, buffer, offset)["value"]
CTypeParser::writeArray = (value, type, length, buffer, offset) ->
ii = undefined
pt = undefined
baseOffset = offset
throw (new Error("asked to write an array, but value is not " + "an array")) unless value instanceof Array
throw (new Error("asked to write array of length " + length + " but that does not match value length: " + value.length)) unless value.length is length
pt = ctParseType(type)
ii = 0
while ii < length
offset += @writeEntry(value[ii], pt, buffer, offset)
ii++
offset - baseOffset
CTypeParser::writeEntry = (value, type, buffer, offset) ->
len = undefined
ret = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
ret = @types["char[]"]["write"](value, len, @endian, buffer, offset)
else
ret = @writeArray(value, type["type"], len, buffer, offset)
else
if type["type"] of deftypes
ret = @types[type["type"]]["write"](value, @endian, buffer, offset)
else
ret = @resolveTypedef(type["type"], "write", buffer, offset, value)
ret
CTypeParser::writeStruct = (value, def, buffer, offset) ->
ii = undefined
entry = undefined
type = undefined
key = undefined
baseOffset = offset
vals = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], vals))
offset = baseOffset + entry["offset"] if "offset" of entry
offset += @writeEntry(value[ii], type, buffer, offset)
vals[key] = value[ii]
ii++
offset
CTypeParser::writeData = (def, buffer, offset, values) ->
hv = undefined
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
hv = (values? and values?)
if hv
throw (new Error("missing values for writing")) unless Array.isArray(values)
ctCheckReq def, @types
else
ctCheckReq def, @types, ["value"]
@writeStruct (if hv then values else getValues(def)), def, buffer, offset
return
#
# * Export the few things we actually want to. Currently this is just the CType
# * Parser and ctio.
#
exports.Parser = CTypeParser
exports.toAbs64 = toAbs64
exports.toApprox64 = toApprox64
exports.parseCTF = parseCTF
exports.ruint8 = mod_ctio.ruint8
exports.ruint16 = mod_ctio.ruint16
exports.ruint32 = mod_ctio.ruint32
exports.ruint64 = mod_ctio.ruint64
exports.wuint8 = mod_ctio.wuint8
exports.wuint16 = mod_ctio.wuint16
exports.wuint32 = mod_ctio.wuint32
exports.wuint64 = mod_ctio.wuint64
exports.rsint8 = mod_ctio.rsint8
exports.rsint16 = mod_ctio.rsint16
exports.rsint32 = mod_ctio.rsint32
exports.rsint64 = mod_ctio.rsint64
exports.wsint8 = mod_ctio.wsint8
exports.wsint16 = mod_ctio.wsint16
exports.wsint32 = mod_ctio.wsint32
exports.wsint64 = mod_ctio.wsint64
exports.rfloat = mod_ctio.rfloat
exports.rdouble = mod_ctio.rdouble
exports.wfloat = mod_ctio.wfloat
exports.wdouble = mod_ctio.wdouble
| true | #
# * rm - Feb 2011
# * ctype.js
# *
# * This module provides a simple abstraction towards reading and writing
# * different types of binary data. It is designed to use ctio.js and provide a
# * richer and more expressive API on top of it.
# *
# * By default we support the following as built in basic types:
# * int8_t
# * int16_t
# * int32_t
# * uint8_t
# * uint16_t
# * uint32_t
# * uint64_t
# * float
# * double
# * char
# * char[]
# *
# * Each type is returned as a Number, with the exception of char and char[]
# * which are returned as Node Buffers. A char is considered a uint8_t.
# *
# * Requests to read and write data are specified as an array of JSON objects.
# * This is also the same way that one declares structs. Even if just a single
# * value is requested, it must be done as a struct. The array order determines
# * the order that we try and read values. Each entry has the following format
# * with values marked with a * being optional.
# *
# * { key: { type: /type/, value*: /value/, offset*: /offset/ }
# *
# * If offset is defined, we lseek(offset, SEEK_SET) before reading the next
# * value. Value is defined when we're writing out data, otherwise it's ignored.
# *
#
#
# * This is the set of basic types that we support.
# *
# * read The function to call to read in a value from a buffer
# *
# * write The function to call to write a value to a buffer
# *
#
#
# * The following are wrappers around the CType IO low level API. They encode
# * knowledge about the size and return something in the expected format.
#
ctReadUint8 = (endian, buffer, offset) ->
val = mod_ctio.ruint8(buffer, endian, offset)
value: val
size: 1
ctReadUint16 = (endian, buffer, offset) ->
val = mod_ctio.ruint16(buffer, endian, offset)
value: val
size: 2
ctReadUint32 = (endian, buffer, offset) ->
val = mod_ctio.ruint32(buffer, endian, offset)
value: val
size: 4
ctReadUint64 = (endian, buffer, offset) ->
val = mod_ctio.ruint64(buffer, endian, offset)
value: val
size: 8
ctReadSint8 = (endian, buffer, offset) ->
val = mod_ctio.rsint8(buffer, endian, offset)
value: val
size: 1
ctReadSint16 = (endian, buffer, offset) ->
val = mod_ctio.rsint16(buffer, endian, offset)
value: val
size: 2
ctReadSint32 = (endian, buffer, offset) ->
val = mod_ctio.rsint32(buffer, endian, offset)
value: val
size: 4
ctReadSint64 = (endian, buffer, offset) ->
val = mod_ctio.rsint64(buffer, endian, offset)
value: val
size: 8
ctReadFloat = (endian, buffer, offset) ->
val = mod_ctio.rfloat(buffer, endian, offset)
value: val
size: 4
ctReadDouble = (endian, buffer, offset) ->
val = mod_ctio.rdouble(buffer, endian, offset)
value: val
size: 8
#
# * Reads a single character into a node buffer
#
ctReadChar = (endian, buffer, offset) ->
res = new Buffer(1)
res[0] = mod_ctio.ruint8(buffer, endian, offset)
value: res
size: 1
ctReadCharArray = (length, endian, buffer, offset) ->
ii = undefined
res = new Buffer(length)
ii = 0
while ii < length
res[ii] = mod_ctio.ruint8(buffer, endian, offset + ii)
ii++
value: res
size: length
ctWriteUint8 = (value, endian, buffer, offset) ->
mod_ctio.wuint8 value, endian, buffer, offset
1
ctWriteUint16 = (value, endian, buffer, offset) ->
mod_ctio.wuint16 value, endian, buffer, offset
2
ctWriteUint32 = (value, endian, buffer, offset) ->
mod_ctio.wuint32 value, endian, buffer, offset
4
ctWriteUint64 = (value, endian, buffer, offset) ->
mod_ctio.wuint64 value, endian, buffer, offset
8
ctWriteSint8 = (value, endian, buffer, offset) ->
mod_ctio.wsint8 value, endian, buffer, offset
1
ctWriteSint16 = (value, endian, buffer, offset) ->
mod_ctio.wsint16 value, endian, buffer, offset
2
ctWriteSint32 = (value, endian, buffer, offset) ->
mod_ctio.wsint32 value, endian, buffer, offset
4
ctWriteSint64 = (value, endian, buffer, offset) ->
mod_ctio.wsint64 value, endian, buffer, offset
8
ctWriteFloat = (value, endian, buffer, offset) ->
mod_ctio.wfloat value, endian, buffer, offset
4
ctWriteDouble = (value, endian, buffer, offset) ->
mod_ctio.wdouble value, endian, buffer, offset
8
#
# * Writes a single character into a node buffer
#
ctWriteChar = (value, endian, buffer, offset) ->
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
mod_ctio.ruint8 value[0], endian, buffer, offset
1
#
# * We're going to write 0s into the buffer if the string is shorter than the
# * length of the array.
#
ctWriteCharArray = (value, length, endian, buffer, offset) ->
ii = undefined
throw (new Error("Input must be a buffer")) unless value instanceof Buffer
throw (new Error("value length greater than array length")) if value.length > length
ii = 0
while ii < value.length and ii < length
mod_ctio.wuint8 value[ii], endian, buffer, offset + ii
ii++
while ii < length
mod_ctio.wuint8 0, endian, offset + ii
ii++
length
#
# * Each parser has their own set of types. We want to make sure that they each
# * get their own copy as they may need to modify it.
#
ctGetBasicTypes = ->
ret = {}
key = undefined
for key of deftypes
ret[key] = deftypes[key]
ret
#
# * Given a string in the form of type[length] we want to split this into an
# * object that extracts that information. We want to note that we could possibly
# * have nested arrays so this should only check the furthest one. It may also be
# * the case that we have no [] pieces, in which case we just return the current
# * type.
#
ctParseType = (str) ->
begInd = undefined
endInd = undefined
type = undefined
len = undefined
throw (new Error("type must be a Javascript string")) unless typeof (str) is "string"
endInd = str.lastIndexOf("]")
if endInd is -1
throw (new Error("found invalid type with '[' but " + "no corresponding ']'")) unless str.lastIndexOf("[") is -1
return (type: str)
begInd = str.lastIndexOf("[")
throw (new Error("found invalid type with ']' but " + "no corresponding '['")) if begInd is -1
throw (new Error("malformed type, ']' appears before '['")) if begInd >= endInd
type = str.substring(0, begInd)
len = str.substring(begInd + 1, endInd)
type: type
len: len
#
# * Given a request validate that all of the fields for it are valid and make
# * sense. This includes verifying the following notions:
# * - Each type requested is present in types
# * - Only allow a name for a field to be specified once
# * - If an array is specified, validate that the requested field exists and
# * comes before it.
# * - If fields is defined, check that each entry has the occurrence of field
#
ctCheckReq = (def, types, fields) ->
ii = undefined
jj = undefined
req = undefined
keys = undefined
key = undefined
found = {}
throw (new Error("definition is not an array")) unless def instanceof Array
throw (new Error("definition must have at least one element")) if def.length is 0
ii = 0
while ii < def.length
req = def[ii]
throw (new Error("definition must be an array of" + "objects")) unless req instanceof Object
keys = Object.keys(req)
throw (new Error("definition entry must only have " + "one key")) unless keys.length is 1
throw (new Error("Specified name already " + "specified: " + keys[0])) if keys[0] of found
throw (new Error("missing required type definition")) unless "type" of req[keys[0]]
key = ctParseType(req[keys[0]]["type"])
#
# * We may have nested arrays, we need to check the validity of
# * the types until the len field is undefined in key. However,
# * each time len is defined we need to verify it is either an
# * integer or corresponds to an already seen key.
#
while key["len"] isnt `undefined`
throw (new Error("Given an array " + "length without a matching type")) unless key["len"] of found if isNaN(parseInt(key["len"], 10))
key = ctParseType(key["type"])
# Now we can validate if the type is valid
throw (new Error("type not found or typdefed: " + key["type"])) unless key["type"] of types
# Check for any required fields
if fields isnt `undefined`
jj = 0
while jj < fields.length
throw (new Error("Missing required " + "field: " + fields[jj])) unless fields[jj] of req[keys[0]]
jj++
found[keys[0]] = true
ii++
return
#
# * Create a new instance of the parser. Each parser has its own store of
# * typedefs and endianness. Conf is an object with the following required
# * values:
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
# * And the following optional values:
# *
# * char-type Valid options here are uint8 and int8. If uint8 is
# * specified this changes the default behavior of a single
# * char from being a buffer of a single character to being
# * a uint8_t. If int8, it becomes an int8_t instead.
#
CTypeParser = (conf) ->
throw (new Error("missing required argument")) unless conf
throw (new Error("missing required endian value")) unless "endian" of conf
throw (new Error("Invalid endian type")) if conf["endian"] isnt "big" and conf["endian"] isnt "little"
throw (new Error("invalid option for char-type: " + conf["char-type"])) if "char-type" of conf and (conf["char-type"] isnt "uint8" and conf["char-type"] isnt "int8")
@endian = conf["endian"]
@types = ctGetBasicTypes()
#
# * There may be a more graceful way to do this, but this will have to
# * serve.
#
@types["char"] = @types["uint8_t"] if "char-type" of conf and conf["char-type"] is "uint8"
@types["char"] = @types["int8_t"] if "char-type" of conf and conf["char-type"] is "int8"
return
#
# * Sets the current endian value for the Parser. If the value is not valid,
# * throws an Error.
# *
# * endian Either 'big' or 'little' do determine the endianness we
# * want to read from or write to.
# *
#
#
# * Returns the current value of the endian value for the parser.
#
#
# * A user has requested to add a type, let us honor their request. Yet, if their
# * request doth spurn us, send them unto the Hells which Dante describes.
# *
# * name The string for the type definition we're adding
# *
# * value Either a string that is a type/array name or an object
# * that describes a struct.
#
# We have a struct, validate it
#
# * Include all of the typedefs, but none of the built in types. This should be
# * treated as read-only.
#
#
# * Given a type string that may have array types that aren't numbers, try and
# * fill them in from the values object. The object should be of the format where
# * indexing into it should return a number for that type.
# *
# * str The type string
# *
# * values An object that can be used to fulfill type information
#
ctResolveArray = (str, values) ->
ret = ""
type = ctParseType(str)
while type["len"] isnt `undefined`
if isNaN(parseInt(type["len"], 10))
throw (new Error("cannot sawp in non-number " + "for array value")) unless typeof (values[type["len"]]) is "number"
ret = "[" + values[type["len"]] + "]" + ret
else
ret = "[" + type["len"] + "]" + ret
type = ctParseType(type["type"])
ret = type["type"] + ret
ret
#
# * [private] Either the typedef resolves to another type string or to a struct.
# * If it resolves to a struct, we just pass it off to read struct. If not, we
# * can just pass it off to read entry.
#
#
# * [private] Try and read in the specific entry.
#
#
# * Because we want to special case char[]s this is unfortunately
# * a bit uglier than it really should be. We want to special
# * case char[]s so that we return a node buffer, thus they are a
# * first class type where as all other arrays just call into a
# * generic array routine which calls their data-specific routine
# * the specified number of times.
# *
# * The valid dispatch options we have are:
# * - Array and char => char[] handler
# * - Generic array handler
# * - Generic typedef handler
# * - Basic type handler
#
#
# * [private] Read an array of data
#
#
# * [private] Read a single struct in.
#
# Walk it and handle doing what's necessary
# Resolve all array values
#
# * This is what we were born to do. We read the data from a buffer and return it
# * in an object whose keys match the values from the object.
# *
# * def The array definition of the data to read in
# *
# * buffer The buffer to read data from
# *
# * offset The offset to start writing to
# *
# * Returns an object where each key corresponds to an entry in def and the value
# * is the read value.
#
# Sanity check for arguments
# Sanity check the object definition
#
# * [private] Write out an array of data
#
#
# * [private] Write the specific entry
#
#
# * [private] Write a single struct out.
#
# Now that we've written it out, we can use it for arrays
#
# * Unfortunately, we're stuck with the sins of an initial poor design. Because
# * of that, we are going to have to support the old way of writing data via
# * writeData. There we insert the values that you want to write into the
# * definition. A little baroque. Internally, we use the new model. So we need to
# * just get those values out of there. But to maintain the principle of least
# * surprise, we're not going to modify the input data.
#
getValues = (def) ->
ii = undefined
out = undefined
key = undefined
out = []
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
mod_assert.ok "value" of def[ii][key]
out.push def[ii][key]["value"]
ii++
out
#
# * This is the second half of what we were born to do, write out the data
# * itself. Historically this function required you to put your values in the
# * definition section. This was not the smartest thing to do and a bit of an
# * oversight to be honest. As such, this function now takes a values argument.
# * If values is non-null and non-undefined, it will be used to determine the
# * values. This means that the old method is still supported, but is no longer
# * acceptable.
# *
# * def The array definition of the data to write out with
# * values
# *
# * buffer The buffer to write to
# *
# * offset The offset in the buffer to write to
# *
# * values An array of values to write.
#
#
# * Functions to go to and from 64 bit numbers in a way that is compatible with
# * Javascript limitations. There are two sets. One where the user is okay with
# * an approximation and one where they are definitely not okay with an
# * approximation.
#
#
# * Attempts to convert an array of two integers returned from rsint64 / ruint64
# * into an absolute 64 bit number. If however the value would exceed 2^52 this
# * will instead throw an error. The mantissa in a double is a 52 bit number and
# * rather than potentially give you a value that is an approximation this will
# * error. If you would rather an approximation, please see toApprox64.
# *
# * val An array of two 32-bit integers
#
toAbs64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
# We have 20 bits worth of precision in this range
throw (new Error("value would become approximated")) if val[0] >= 0x100000
val[0] * Math.pow(2, 32) + val[1]
#
# * Will return the 64 bit value as returned in an array from rsint64 / ruint64
# * to a value as close as it can. Note that Javascript stores all numbers as a
# * double and the mantissa only has 52 bits. Thus this version may approximate
# * the value.
# *
# * val An array of two 32-bit integers
#
toApprox64 = (val) ->
throw (new Error("missing required arg: value")) if val is `undefined`
throw (new Error("value must be an array")) unless Array.isArray(val)
throw (new Error("value must be an array of length 2")) unless val.length is 2
Math.pow(2, 32) * val[0] + val[1]
parseCTF = (json, conf) ->
ctype = new CTypeParser(conf)
mod_ctf.ctfParseJson json, ctype
ctype
mod_ctf = require("./ctf.js")
mod_ctio = require("./ctio.js")
mod_assert = require("assert")
deftypes =
uint8_t:
read: ctReadUint8
write: ctWriteUint8
uint16_t:
read: ctReadUint16
write: ctWriteUint16
uint32_t:
read: ctReadUint32
write: ctWriteUint32
uint64_t:
read: ctReadUint64
write: ctWriteUint64
int8_t:
read: ctReadSint8
write: ctWriteSint8
int16_t:
read: ctReadSint16
write: ctWriteSint16
int32_t:
read: ctReadSint32
write: ctWriteSint32
int64_t:
read: ctReadSint64
write: ctWriteSint64
float:
read: ctReadFloat
write: ctWriteFloat
double:
read: ctReadDouble
write: ctWriteDouble
char:
read: ctReadChar
write: ctWriteChar
"char[]":
read: ctReadCharArray
write: ctWriteCharArray
CTypeParser::setEndian = (endian) ->
throw (new Error("invalid endian type, must be big or " + "little")) if endian isnt "big" and endian isnt "little"
@endian = endian
return
CTypeParser::getEndian = ->
@endian
CTypeParser::typedef = (name, value) ->
type = undefined
throw (new (Error("missing required typedef argument: name"))) if name is `undefined`
throw (new (Error("missing required typedef argument: value"))) if value is `undefined`
throw (new (Error("the name of a type must be a string"))) unless typeof (name) is "string"
type = ctParseType(name)
throw (new Error("Cannot have an array in the typedef name")) if type["len"] isnt `undefined`
throw (new Error("typedef name already present: " + name)) if name of @types
throw (new Error("typedef value must either be a string or " + "struct")) if typeof (value) isnt "string" and (value not instanceof Array)
if typeof (value) is "string"
type = ctParseType(value)
throw (new (Error("typedef value must use " + "fixed size array when outside of a " + "struct"))) if isNaN(parseInt(type["len"], 10)) if type["len"] isnt `undefined`
@types[name] = value
else
ctCheckReq value, @types
@types[name] = value
return
CTypeParser::lstypes = ->
key = PI:KEY:<KEY>END_PI
ret = {}
for key of @types
continue if key of deftypes
ret[key] = @types[key]
ret
CTypeParser::resolveTypedef = (type, dispatch, buffer, offset, value) ->
pt = undefined
mod_assert.ok type of @types
if typeof (@types[type]) is "string"
pt = ctParseType(@types[type])
if dispatch is "read"
@readEntry pt, buffer, offset
else if dispatch is "write"
@writeEntry value, pt, buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
else
if dispatch is "read"
@readStruct @types[type], buffer, offset
else if dispatch is "write"
@writeStruct value, @types[type], buffer, offset
else
throw (new Error("invalid dispatch type to " + "resolveTypedef"))
return
CTypeParser::readEntry = (type, buffer, offset) ->
parse = undefined
len = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
parse = @types["char[]"]["read"](len, @endian, buffer, offset)
else
parse = @readArray(type["type"], len, buffer, offset)
else
if type["type"] of deftypes
parse = @types[type["type"]]["read"](@endian, buffer, offset)
else
parse = @resolveTypedef(type["type"], "read", buffer, offset)
parse
CTypeParser::readArray = (type, length, buffer, offset) ->
ii = undefined
ent = undefined
pt = undefined
baseOffset = offset
ret = new Array(length)
pt = ctParseType(type)
ii = 0
while ii < length
ent = @readEntry(pt, buffer, offset)
offset += ent["size"]
ret[ii] = ent["value"]
ii++
value: ret
size: offset - baseOffset
CTypeParser::readStruct = (def, buffer, offset) ->
parse = undefined
ii = undefined
type = undefined
entry = undefined
key = undefined
baseOffset = offset
ret = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], ret))
offset = baseOffset + entry["offset"] if "offset" of entry
parse = @readEntry(type, buffer, offset)
offset += parse["size"]
ret[key] = parse["value"]
ii++
value: ret
size: (offset - baseOffset)
CTypeParser::readData = (def, buffer, offset) ->
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
ctCheckReq def, @types
@readStruct(def, buffer, offset)["value"]
CTypeParser::writeArray = (value, type, length, buffer, offset) ->
ii = undefined
pt = undefined
baseOffset = offset
throw (new Error("asked to write an array, but value is not " + "an array")) unless value instanceof Array
throw (new Error("asked to write array of length " + length + " but that does not match value length: " + value.length)) unless value.length is length
pt = ctParseType(type)
ii = 0
while ii < length
offset += @writeEntry(value[ii], pt, buffer, offset)
ii++
offset - baseOffset
CTypeParser::writeEntry = (value, type, buffer, offset) ->
len = undefined
ret = undefined
if type["len"] isnt `undefined`
len = parseInt(type["len"], 10)
throw (new Error("somehow got a non-numeric length")) if isNaN(len)
if type["type"] is "char"
ret = @types["char[]"]["write"](value, len, @endian, buffer, offset)
else
ret = @writeArray(value, type["type"], len, buffer, offset)
else
if type["type"] of deftypes
ret = @types[type["type"]]["write"](value, @endian, buffer, offset)
else
ret = @resolveTypedef(type["type"], "write", buffer, offset, value)
ret
CTypeParser::writeStruct = (value, def, buffer, offset) ->
ii = undefined
entry = undefined
type = undefined
key = undefined
baseOffset = offset
vals = {}
ii = 0
while ii < def.length
key = Object.keys(def[ii])[0]
entry = def[ii][key]
type = ctParseType(ctResolveArray(entry["type"], vals))
offset = baseOffset + entry["offset"] if "offset" of entry
offset += @writeEntry(value[ii], type, buffer, offset)
vals[key] = value[ii]
ii++
offset
CTypeParser::writeData = (def, buffer, offset, values) ->
hv = undefined
throw (new Error("missing definition for what we should be" + "parsing")) if def is `undefined`
throw (new Error("missing buffer for what we should be " + "parsing")) if buffer is `undefined`
throw (new Error("missing offset for what we should be " + "parsing")) if offset is `undefined`
hv = (values? and values?)
if hv
throw (new Error("missing values for writing")) unless Array.isArray(values)
ctCheckReq def, @types
else
ctCheckReq def, @types, ["value"]
@writeStruct (if hv then values else getValues(def)), def, buffer, offset
return
#
# * Export the few things we actually want to. Currently this is just the CType
# * Parser and ctio.
#
exports.Parser = CTypeParser
exports.toAbs64 = toAbs64
exports.toApprox64 = toApprox64
exports.parseCTF = parseCTF
exports.ruint8 = mod_ctio.ruint8
exports.ruint16 = mod_ctio.ruint16
exports.ruint32 = mod_ctio.ruint32
exports.ruint64 = mod_ctio.ruint64
exports.wuint8 = mod_ctio.wuint8
exports.wuint16 = mod_ctio.wuint16
exports.wuint32 = mod_ctio.wuint32
exports.wuint64 = mod_ctio.wuint64
exports.rsint8 = mod_ctio.rsint8
exports.rsint16 = mod_ctio.rsint16
exports.rsint32 = mod_ctio.rsint32
exports.rsint64 = mod_ctio.rsint64
exports.wsint8 = mod_ctio.wsint8
exports.wsint16 = mod_ctio.wsint16
exports.wsint32 = mod_ctio.wsint32
exports.wsint64 = mod_ctio.wsint64
exports.rfloat = mod_ctio.rfloat
exports.rdouble = mod_ctio.rdouble
exports.wfloat = mod_ctio.wfloat
exports.wdouble = mod_ctio.wdouble
|
[
{
"context": "cated identity\n#\n# Notes:\n# None\n#\n# Author:\n# Paul Robison <paul@nosybore.net>\n\nmodule.exports = (robot) ->\n",
"end": 241,
"score": 0.9998676180839539,
"start": 229,
"tag": "NAME",
"value": "Paul Robison"
},
{
"context": "#\n# Notes:\n# None\n#\n# Author:\n# Paul Robison <paul@nosybore.net>\n\nmodule.exports = (robot) ->\n robot.respond /(c",
"end": 260,
"score": 0.9999295473098755,
"start": 243,
"tag": "EMAIL",
"value": "paul@nosybore.net"
}
] | src/ingress-codebars.coffee | nosybore/hubot-ingress-codebars | 0 | # Description
# Encode player names that exploit sans-serif to obfuscate their identities.
#
# Configuration:
# None
#
# Commands:
# hubot cb <ABC> - translate ABC to obfuscated identity
#
# Notes:
# None
#
# Author:
# Paul Robison <paul@nosybore.net>
module.exports = (robot) ->
robot.respond /(codebar|cb) (.+)/i, (res) ->
codebar = res.match[2]
b0=(codebar.charCodeAt(0)-64).toString(2)
while b0.length < 5
b0="0"+b0
b1=(codebar.charCodeAt(1)-64).toString(2)
while b1.length < 5
b1="0"+b1
b2=(codebar.charCodeAt(2)-64).toString(2)
while b2.length < 5
b2="0"+b2
barcode = b0+b1+b2
barcode = barcode.replace /0/g, "I"
barcode = barcode.replace /1/g, "l"
if barcode.length == 15
res.send "#{barcode}"
else
res.send "Sorry, I can't encode that. :("
| 49241 | # Description
# Encode player names that exploit sans-serif to obfuscate their identities.
#
# Configuration:
# None
#
# Commands:
# hubot cb <ABC> - translate ABC to obfuscated identity
#
# Notes:
# None
#
# Author:
# <NAME> <<EMAIL>>
module.exports = (robot) ->
robot.respond /(codebar|cb) (.+)/i, (res) ->
codebar = res.match[2]
b0=(codebar.charCodeAt(0)-64).toString(2)
while b0.length < 5
b0="0"+b0
b1=(codebar.charCodeAt(1)-64).toString(2)
while b1.length < 5
b1="0"+b1
b2=(codebar.charCodeAt(2)-64).toString(2)
while b2.length < 5
b2="0"+b2
barcode = b0+b1+b2
barcode = barcode.replace /0/g, "I"
barcode = barcode.replace /1/g, "l"
if barcode.length == 15
res.send "#{barcode}"
else
res.send "Sorry, I can't encode that. :("
| true | # Description
# Encode player names that exploit sans-serif to obfuscate their identities.
#
# Configuration:
# None
#
# Commands:
# hubot cb <ABC> - translate ABC to obfuscated identity
#
# Notes:
# None
#
# Author:
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
module.exports = (robot) ->
robot.respond /(codebar|cb) (.+)/i, (res) ->
codebar = res.match[2]
b0=(codebar.charCodeAt(0)-64).toString(2)
while b0.length < 5
b0="0"+b0
b1=(codebar.charCodeAt(1)-64).toString(2)
while b1.length < 5
b1="0"+b1
b2=(codebar.charCodeAt(2)-64).toString(2)
while b2.length < 5
b2="0"+b2
barcode = b0+b1+b2
barcode = barcode.replace /0/g, "I"
barcode = barcode.replace /1/g, "l"
if barcode.length == 15
res.send "#{barcode}"
else
res.send "Sorry, I can't encode that. :("
|
[
{
"context": " Version: 0.1.1\n# * License: MIT / BSD\n# * By: Simon Waldherr\n# *\n# \n\n#jslint browser: true, indent: 2 \n\n#\n# R",
"end": 97,
"score": 0.9998847842216492,
"start": 83,
"tag": "NAME",
"value": "Simon Waldherr"
}
] | colorconverter.coffee | SimonWaldherr/ColorConverter.js | 21 | #
# *
# * ColorConverter .js
# * Version: 0.1.1
# * License: MIT / BSD
# * By: Simon Waldherr
# *
#
#jslint browser: true, indent: 2
#
# RGB2HSL
# HSL2RGB
# RGB2CMYK
# CMYK2RGB
# HEX2RGB
# RGB2HEX
# RGB2YUV
# YUV2RGB
# RGB2HSV
# HSV2RGB
# HSL2Hex
# Hex2HSL
# complexity2int
# mixRGB
# parse
#
colorconv =
RGB2HSL: (RGB) ->
"use strict"
r = Math.max(Math.min(parseInt(RGB[0], 10) / 255, 1), 0)
g = Math.max(Math.min(parseInt(RGB[1], 10) / 255, 1), 0)
b = Math.max(Math.min(parseInt(RGB[2], 10) / 255, 1), 0)
max = Math.max(r, g, b)
min = Math.min(r, g, b)
l = (max + min) / 2
d = undefined
h = undefined
s = undefined
if max isnt min
d = max - min
s = (if l > 0.5 then d / (2 - max - min) else d / (max + min))
if max is r
h = (g - b) / d + ((if g < b then 6 else 0))
else if max is g
h = (b - r) / d + 2
else
h = (r - g) / d + 4
h = h / 6
else
h = s = 0
[
Math.round(h * 360)
Math.round(s * 100)
Math.round(l * 100)
]
HSL2RGB: (HSL) ->
"use strict"
h = Math.max(Math.min(parseInt(HSL[0], 10), 360), 0) / 360
s = Math.max(Math.min(parseInt(HSL[1], 10), 100), 0) / 100
l = Math.max(Math.min(parseInt(HSL[2], 10), 100), 0) / 100
v = undefined
min = undefined
sv = undefined
six = undefined
fract = undefined
vsfract = undefined
r = undefined
g = undefined
b = undefined
if l <= 0.5
v = l * (1 + s)
else
v = l + s - l * s
if v is 0
return [
0
0
0
]
min = 2 * l - v
sv = (v - min) / v
h = 6 * h
six = Math.floor(h)
fract = h - six
vsfract = v * sv * fract
switch six
when 1
r = v - vsfract
g = v
b = min
when 2
r = min
g = v
b = min + vsfract
when 3
r = min
g = v - vsfract
b = v
when 4
r = min + vsfract
g = min
b = v
when 5
r = v
g = min
b = v - vsfract
else
r = v
g = min + vsfract
b = min
[
Math.round(r * 255)
Math.round(g * 255)
Math.round(b * 255)
]
RGB2CMYK: (RGB) ->
"use strict"
red = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
green = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
blue = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
cyan = 1 - red
magenta = 1 - green
yellow = 1 - blue
black = 1
if red or green or blue
black = Math.min(cyan, Math.min(magenta, yellow))
cyan = (cyan - black) / (1 - black)
magenta = (magenta - black) / (1 - black)
yellow = (yellow - black) / (1 - black)
else
black = 1
[
Math.round(cyan * 255)
Math.round(magenta * 255)
Math.round(yellow * 255)
Math.round(black + 254)
]
CMYK2RGB: (CMYK) ->
"use strict"
cyan = Math.max(Math.min(parseInt(CMYK[0], 10) / 255, 1), 0)
magenta = Math.max(Math.min(parseInt(CMYK[1], 10) / 255, 1), 0)
yellow = Math.max(Math.min(parseInt(CMYK[2], 10) / 255, 1), 0)
black = Math.max(Math.min(parseInt(CMYK[3], 10) / 255, 1), 0)
red = (1 - cyan * (1 - black) - black)
green = (1 - magenta * (1 - black) - black)
blue = (1 - yellow * (1 - black) - black)
[
Math.round(red * 255)
Math.round(green * 255)
Math.round(blue * 255)
]
HEX2RGB: (hex) ->
"use strict"
hex = hex.substr(1) if hex.charAt(0) is "#"
return false if (hex.length < 2) or (hex.length > 6)
values = hex.split("")
r = undefined
g = undefined
b = undefined
if hex.length is 2
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = r
b = r
else if hex.length is 3
r = parseInt(values[0].toString() + values[0].toString(), 16)
g = parseInt(values[1].toString() + values[1].toString(), 16)
b = parseInt(values[2].toString() + values[2].toString(), 16)
else if hex.length is 6
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = parseInt(values[2].toString() + values[3].toString(), 16)
b = parseInt(values[4].toString() + values[5].toString(), 16)
else
return false
[
r
g
b
]
RGB2HEX: (RGB) ->
"use strict"
hexr = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
hexg = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
hexb = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
hexr = (if hexr > 15 then hexr.toString(16) else "0" + hexr.toString(16))
hexg = (if hexg > 15 then hexg.toString(16) else "0" + hexg.toString(16))
hexb = (if hexb > 15 then hexb.toString(16) else "0" + hexb.toString(16))
hexr + hexg + hexb
RGB2YUV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10)
g = parseInt(RGB[1], 10)
b = parseInt(RGB[2], 10)
y = undefined
u = undefined
v = undefined
y = Math.round(0.299 * r + 0.587 * g + 0.114 * b)
u = Math.round((((b - y) * 0.493) + 111) / 222 * 255)
v = Math.round((((r - y) * 0.877) + 155) / 312 * 255)
[
y
u
v
]
YUV2RGB: (YUV) ->
"use strict"
y = parseInt(YUV[0], 10)
u = parseInt(YUV[1], 10) / 255 * 222 - 111
v = parseInt(YUV[2], 10) / 255 * 312 - 155
r = undefined
g = undefined
b = undefined
r = Math.round(y + v / 0.877)
g = Math.round(y - 0.39466 * u - 0.5806 * v)
b = Math.round(y + u / 0.493)
[
r
g
b
]
RGB2HSV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10) / 255
g = parseInt(RGB[1], 10) / 255
b = parseInt(RGB[2], 10) / 255
max = Math.max(r, g, b)
min = Math.min(r, g, b)
d = max - min
v = max
h = undefined
s = undefined
if max is 0
s = 0
else
s = d / max
if max is min
h = 0
else
switch max
when r
h = (g - b) / d + ((if g < b then 6 else 0))
when g
h = (b - r) / d + 2
when b
h = (r - g) / d + 4
h = h / 6
[
h
s
v
]
HSV2RGB: (HSV) ->
"use strict"
r = undefined
g = undefined
b = undefined
h = HSV[0]
s = HSV[1]
v = HSV[2]
i = Math.floor(h * 6)
f = h * 6 - i
p = v * (1 - s)
q = v * (1 - f * s)
t = v * (1 - (1 - f) * s)
switch i % 6
when 0
r = v
g = t
b = p
when 1
r = q
g = v
b = p
when 2
r = p
g = v
b = t
when 3
r = p
g = q
b = v
when 4
r = t
g = p
b = v
when 5
r = v
g = p
b = q
[
r * 255
g * 255
b * 255
]
HSL2HEX: (HSL) ->
"use strict"
colorconv.RGB2HEX colorconv.HSL2RGB(HSL)
HEX2HSL: (hex) ->
"use strict"
colorconv.RGB2HSL colorconv.HEX2RGB(hex)
complexity2int: (string) ->
"use strict"
valunicode = undefined
keys = string.split("")
numbers = 1
uletter = 1
lletter = 1
special = 1
complex = 0
i = undefined
i = 0
while i < keys.length
valunicode = keys[i].charCodeAt(0)
if (valunicode > 0x40) and (valunicode < 0x5B)
#Großbuchstaben A-Z
uletter += 1
else if (valunicode > 0x60) and (valunicode < 0x7B)
#Kleinbuchstaben a-z
lletter += 1
else if (valunicode > 0x2F) and (valunicode < 0x3A)
#Zahlen 0-9
numbers += 1
#Sonderzeichen
else special += 1 if (valunicode > 0x20) and (valunicode < 0x7F)
i += 1
complex = ((uletter * lletter * numbers * special) + Math.round(uletter * 1.8 + lletter * 1.5 + numbers + special * 2)) - 6
complex
int2RGB: (intval) ->
"use strict"
intval = parseInt(intval, 10) if (typeof intval isnt "number") and (intval isnt false) and (intval isnt true)
if typeof intval is "number"
if (intval < 115) and (intval > 1)
return [
255
153 + intval
153 - intval
]
if (intval > 115) and (intval < 230)
return [
255 - intval
243
63
]
if (intval > 230) or (intval is true)
return [
145
243
63
]
if intval is "none"
return [
204
204
204
]
if intval is true
return [
204
204
204
]
false
complexity2RGB: (string) ->
"use strict"
colorconv.int2RGB colorconv.complexity2int(string)
mixRGB: (RGB1, RGB2) ->
"use strict"
r = undefined
g = undefined
b = undefined
r = parseInt((RGB1[0] + RGB2[0]) / 2, 10)
g = parseInt((RGB1[1] + RGB2[1]) / 2, 10)
b = parseInt((RGB1[2] + RGB2[2]) / 2, 10)
[
r
g
b
]
parse: (input) ->
"use strict"
geregext = undefined
pattern = /((rgb|hsl|#|yuv)(\(([%, ]*([\d]+)[%, ]+([\d]+)[%, ]+([\d]+)[%, ]*)+\)|([a-f0-9]+)))/g
geregext = pattern.exec(input)
if geregext isnt null
switch geregext[2]
when "#"
return colorconv.HEX2RGB(geregext[3])
when "rgb"
return [
]
when "hsl"
return colorconv.HSL2RGB([
])
when "yuv"
return colorconv.YUV2RGB([
])
else
return false
false
| 30418 | #
# *
# * ColorConverter .js
# * Version: 0.1.1
# * License: MIT / BSD
# * By: <NAME>
# *
#
#jslint browser: true, indent: 2
#
# RGB2HSL
# HSL2RGB
# RGB2CMYK
# CMYK2RGB
# HEX2RGB
# RGB2HEX
# RGB2YUV
# YUV2RGB
# RGB2HSV
# HSV2RGB
# HSL2Hex
# Hex2HSL
# complexity2int
# mixRGB
# parse
#
colorconv =
RGB2HSL: (RGB) ->
"use strict"
r = Math.max(Math.min(parseInt(RGB[0], 10) / 255, 1), 0)
g = Math.max(Math.min(parseInt(RGB[1], 10) / 255, 1), 0)
b = Math.max(Math.min(parseInt(RGB[2], 10) / 255, 1), 0)
max = Math.max(r, g, b)
min = Math.min(r, g, b)
l = (max + min) / 2
d = undefined
h = undefined
s = undefined
if max isnt min
d = max - min
s = (if l > 0.5 then d / (2 - max - min) else d / (max + min))
if max is r
h = (g - b) / d + ((if g < b then 6 else 0))
else if max is g
h = (b - r) / d + 2
else
h = (r - g) / d + 4
h = h / 6
else
h = s = 0
[
Math.round(h * 360)
Math.round(s * 100)
Math.round(l * 100)
]
HSL2RGB: (HSL) ->
"use strict"
h = Math.max(Math.min(parseInt(HSL[0], 10), 360), 0) / 360
s = Math.max(Math.min(parseInt(HSL[1], 10), 100), 0) / 100
l = Math.max(Math.min(parseInt(HSL[2], 10), 100), 0) / 100
v = undefined
min = undefined
sv = undefined
six = undefined
fract = undefined
vsfract = undefined
r = undefined
g = undefined
b = undefined
if l <= 0.5
v = l * (1 + s)
else
v = l + s - l * s
if v is 0
return [
0
0
0
]
min = 2 * l - v
sv = (v - min) / v
h = 6 * h
six = Math.floor(h)
fract = h - six
vsfract = v * sv * fract
switch six
when 1
r = v - vsfract
g = v
b = min
when 2
r = min
g = v
b = min + vsfract
when 3
r = min
g = v - vsfract
b = v
when 4
r = min + vsfract
g = min
b = v
when 5
r = v
g = min
b = v - vsfract
else
r = v
g = min + vsfract
b = min
[
Math.round(r * 255)
Math.round(g * 255)
Math.round(b * 255)
]
RGB2CMYK: (RGB) ->
"use strict"
red = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
green = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
blue = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
cyan = 1 - red
magenta = 1 - green
yellow = 1 - blue
black = 1
if red or green or blue
black = Math.min(cyan, Math.min(magenta, yellow))
cyan = (cyan - black) / (1 - black)
magenta = (magenta - black) / (1 - black)
yellow = (yellow - black) / (1 - black)
else
black = 1
[
Math.round(cyan * 255)
Math.round(magenta * 255)
Math.round(yellow * 255)
Math.round(black + 254)
]
CMYK2RGB: (CMYK) ->
"use strict"
cyan = Math.max(Math.min(parseInt(CMYK[0], 10) / 255, 1), 0)
magenta = Math.max(Math.min(parseInt(CMYK[1], 10) / 255, 1), 0)
yellow = Math.max(Math.min(parseInt(CMYK[2], 10) / 255, 1), 0)
black = Math.max(Math.min(parseInt(CMYK[3], 10) / 255, 1), 0)
red = (1 - cyan * (1 - black) - black)
green = (1 - magenta * (1 - black) - black)
blue = (1 - yellow * (1 - black) - black)
[
Math.round(red * 255)
Math.round(green * 255)
Math.round(blue * 255)
]
HEX2RGB: (hex) ->
"use strict"
hex = hex.substr(1) if hex.charAt(0) is "#"
return false if (hex.length < 2) or (hex.length > 6)
values = hex.split("")
r = undefined
g = undefined
b = undefined
if hex.length is 2
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = r
b = r
else if hex.length is 3
r = parseInt(values[0].toString() + values[0].toString(), 16)
g = parseInt(values[1].toString() + values[1].toString(), 16)
b = parseInt(values[2].toString() + values[2].toString(), 16)
else if hex.length is 6
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = parseInt(values[2].toString() + values[3].toString(), 16)
b = parseInt(values[4].toString() + values[5].toString(), 16)
else
return false
[
r
g
b
]
RGB2HEX: (RGB) ->
"use strict"
hexr = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
hexg = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
hexb = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
hexr = (if hexr > 15 then hexr.toString(16) else "0" + hexr.toString(16))
hexg = (if hexg > 15 then hexg.toString(16) else "0" + hexg.toString(16))
hexb = (if hexb > 15 then hexb.toString(16) else "0" + hexb.toString(16))
hexr + hexg + hexb
RGB2YUV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10)
g = parseInt(RGB[1], 10)
b = parseInt(RGB[2], 10)
y = undefined
u = undefined
v = undefined
y = Math.round(0.299 * r + 0.587 * g + 0.114 * b)
u = Math.round((((b - y) * 0.493) + 111) / 222 * 255)
v = Math.round((((r - y) * 0.877) + 155) / 312 * 255)
[
y
u
v
]
YUV2RGB: (YUV) ->
"use strict"
y = parseInt(YUV[0], 10)
u = parseInt(YUV[1], 10) / 255 * 222 - 111
v = parseInt(YUV[2], 10) / 255 * 312 - 155
r = undefined
g = undefined
b = undefined
r = Math.round(y + v / 0.877)
g = Math.round(y - 0.39466 * u - 0.5806 * v)
b = Math.round(y + u / 0.493)
[
r
g
b
]
RGB2HSV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10) / 255
g = parseInt(RGB[1], 10) / 255
b = parseInt(RGB[2], 10) / 255
max = Math.max(r, g, b)
min = Math.min(r, g, b)
d = max - min
v = max
h = undefined
s = undefined
if max is 0
s = 0
else
s = d / max
if max is min
h = 0
else
switch max
when r
h = (g - b) / d + ((if g < b then 6 else 0))
when g
h = (b - r) / d + 2
when b
h = (r - g) / d + 4
h = h / 6
[
h
s
v
]
HSV2RGB: (HSV) ->
"use strict"
r = undefined
g = undefined
b = undefined
h = HSV[0]
s = HSV[1]
v = HSV[2]
i = Math.floor(h * 6)
f = h * 6 - i
p = v * (1 - s)
q = v * (1 - f * s)
t = v * (1 - (1 - f) * s)
switch i % 6
when 0
r = v
g = t
b = p
when 1
r = q
g = v
b = p
when 2
r = p
g = v
b = t
when 3
r = p
g = q
b = v
when 4
r = t
g = p
b = v
when 5
r = v
g = p
b = q
[
r * 255
g * 255
b * 255
]
HSL2HEX: (HSL) ->
"use strict"
colorconv.RGB2HEX colorconv.HSL2RGB(HSL)
HEX2HSL: (hex) ->
"use strict"
colorconv.RGB2HSL colorconv.HEX2RGB(hex)
complexity2int: (string) ->
"use strict"
valunicode = undefined
keys = string.split("")
numbers = 1
uletter = 1
lletter = 1
special = 1
complex = 0
i = undefined
i = 0
while i < keys.length
valunicode = keys[i].charCodeAt(0)
if (valunicode > 0x40) and (valunicode < 0x5B)
#Großbuchstaben A-Z
uletter += 1
else if (valunicode > 0x60) and (valunicode < 0x7B)
#Kleinbuchstaben a-z
lletter += 1
else if (valunicode > 0x2F) and (valunicode < 0x3A)
#Zahlen 0-9
numbers += 1
#Sonderzeichen
else special += 1 if (valunicode > 0x20) and (valunicode < 0x7F)
i += 1
complex = ((uletter * lletter * numbers * special) + Math.round(uletter * 1.8 + lletter * 1.5 + numbers + special * 2)) - 6
complex
int2RGB: (intval) ->
"use strict"
intval = parseInt(intval, 10) if (typeof intval isnt "number") and (intval isnt false) and (intval isnt true)
if typeof intval is "number"
if (intval < 115) and (intval > 1)
return [
255
153 + intval
153 - intval
]
if (intval > 115) and (intval < 230)
return [
255 - intval
243
63
]
if (intval > 230) or (intval is true)
return [
145
243
63
]
if intval is "none"
return [
204
204
204
]
if intval is true
return [
204
204
204
]
false
complexity2RGB: (string) ->
"use strict"
colorconv.int2RGB colorconv.complexity2int(string)
mixRGB: (RGB1, RGB2) ->
"use strict"
r = undefined
g = undefined
b = undefined
r = parseInt((RGB1[0] + RGB2[0]) / 2, 10)
g = parseInt((RGB1[1] + RGB2[1]) / 2, 10)
b = parseInt((RGB1[2] + RGB2[2]) / 2, 10)
[
r
g
b
]
parse: (input) ->
"use strict"
geregext = undefined
pattern = /((rgb|hsl|#|yuv)(\(([%, ]*([\d]+)[%, ]+([\d]+)[%, ]+([\d]+)[%, ]*)+\)|([a-f0-9]+)))/g
geregext = pattern.exec(input)
if geregext isnt null
switch geregext[2]
when "#"
return colorconv.HEX2RGB(geregext[3])
when "rgb"
return [
]
when "hsl"
return colorconv.HSL2RGB([
])
when "yuv"
return colorconv.YUV2RGB([
])
else
return false
false
| true | #
# *
# * ColorConverter .js
# * Version: 0.1.1
# * License: MIT / BSD
# * By: PI:NAME:<NAME>END_PI
# *
#
#jslint browser: true, indent: 2
#
# RGB2HSL
# HSL2RGB
# RGB2CMYK
# CMYK2RGB
# HEX2RGB
# RGB2HEX
# RGB2YUV
# YUV2RGB
# RGB2HSV
# HSV2RGB
# HSL2Hex
# Hex2HSL
# complexity2int
# mixRGB
# parse
#
colorconv =
RGB2HSL: (RGB) ->
"use strict"
r = Math.max(Math.min(parseInt(RGB[0], 10) / 255, 1), 0)
g = Math.max(Math.min(parseInt(RGB[1], 10) / 255, 1), 0)
b = Math.max(Math.min(parseInt(RGB[2], 10) / 255, 1), 0)
max = Math.max(r, g, b)
min = Math.min(r, g, b)
l = (max + min) / 2
d = undefined
h = undefined
s = undefined
if max isnt min
d = max - min
s = (if l > 0.5 then d / (2 - max - min) else d / (max + min))
if max is r
h = (g - b) / d + ((if g < b then 6 else 0))
else if max is g
h = (b - r) / d + 2
else
h = (r - g) / d + 4
h = h / 6
else
h = s = 0
[
Math.round(h * 360)
Math.round(s * 100)
Math.round(l * 100)
]
HSL2RGB: (HSL) ->
"use strict"
h = Math.max(Math.min(parseInt(HSL[0], 10), 360), 0) / 360
s = Math.max(Math.min(parseInt(HSL[1], 10), 100), 0) / 100
l = Math.max(Math.min(parseInt(HSL[2], 10), 100), 0) / 100
v = undefined
min = undefined
sv = undefined
six = undefined
fract = undefined
vsfract = undefined
r = undefined
g = undefined
b = undefined
if l <= 0.5
v = l * (1 + s)
else
v = l + s - l * s
if v is 0
return [
0
0
0
]
min = 2 * l - v
sv = (v - min) / v
h = 6 * h
six = Math.floor(h)
fract = h - six
vsfract = v * sv * fract
switch six
when 1
r = v - vsfract
g = v
b = min
when 2
r = min
g = v
b = min + vsfract
when 3
r = min
g = v - vsfract
b = v
when 4
r = min + vsfract
g = min
b = v
when 5
r = v
g = min
b = v - vsfract
else
r = v
g = min + vsfract
b = min
[
Math.round(r * 255)
Math.round(g * 255)
Math.round(b * 255)
]
RGB2CMYK: (RGB) ->
"use strict"
red = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
green = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
blue = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
cyan = 1 - red
magenta = 1 - green
yellow = 1 - blue
black = 1
if red or green or blue
black = Math.min(cyan, Math.min(magenta, yellow))
cyan = (cyan - black) / (1 - black)
magenta = (magenta - black) / (1 - black)
yellow = (yellow - black) / (1 - black)
else
black = 1
[
Math.round(cyan * 255)
Math.round(magenta * 255)
Math.round(yellow * 255)
Math.round(black + 254)
]
CMYK2RGB: (CMYK) ->
"use strict"
cyan = Math.max(Math.min(parseInt(CMYK[0], 10) / 255, 1), 0)
magenta = Math.max(Math.min(parseInt(CMYK[1], 10) / 255, 1), 0)
yellow = Math.max(Math.min(parseInt(CMYK[2], 10) / 255, 1), 0)
black = Math.max(Math.min(parseInt(CMYK[3], 10) / 255, 1), 0)
red = (1 - cyan * (1 - black) - black)
green = (1 - magenta * (1 - black) - black)
blue = (1 - yellow * (1 - black) - black)
[
Math.round(red * 255)
Math.round(green * 255)
Math.round(blue * 255)
]
HEX2RGB: (hex) ->
"use strict"
hex = hex.substr(1) if hex.charAt(0) is "#"
return false if (hex.length < 2) or (hex.length > 6)
values = hex.split("")
r = undefined
g = undefined
b = undefined
if hex.length is 2
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = r
b = r
else if hex.length is 3
r = parseInt(values[0].toString() + values[0].toString(), 16)
g = parseInt(values[1].toString() + values[1].toString(), 16)
b = parseInt(values[2].toString() + values[2].toString(), 16)
else if hex.length is 6
r = parseInt(values[0].toString() + values[1].toString(), 16)
g = parseInt(values[2].toString() + values[3].toString(), 16)
b = parseInt(values[4].toString() + values[5].toString(), 16)
else
return false
[
r
g
b
]
RGB2HEX: (RGB) ->
"use strict"
hexr = Math.max(Math.min(parseInt(RGB[0], 10), 255), 0)
hexg = Math.max(Math.min(parseInt(RGB[1], 10), 255), 0)
hexb = Math.max(Math.min(parseInt(RGB[2], 10), 255), 0)
hexr = (if hexr > 15 then hexr.toString(16) else "0" + hexr.toString(16))
hexg = (if hexg > 15 then hexg.toString(16) else "0" + hexg.toString(16))
hexb = (if hexb > 15 then hexb.toString(16) else "0" + hexb.toString(16))
hexr + hexg + hexb
RGB2YUV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10)
g = parseInt(RGB[1], 10)
b = parseInt(RGB[2], 10)
y = undefined
u = undefined
v = undefined
y = Math.round(0.299 * r + 0.587 * g + 0.114 * b)
u = Math.round((((b - y) * 0.493) + 111) / 222 * 255)
v = Math.round((((r - y) * 0.877) + 155) / 312 * 255)
[
y
u
v
]
YUV2RGB: (YUV) ->
"use strict"
y = parseInt(YUV[0], 10)
u = parseInt(YUV[1], 10) / 255 * 222 - 111
v = parseInt(YUV[2], 10) / 255 * 312 - 155
r = undefined
g = undefined
b = undefined
r = Math.round(y + v / 0.877)
g = Math.round(y - 0.39466 * u - 0.5806 * v)
b = Math.round(y + u / 0.493)
[
r
g
b
]
RGB2HSV: (RGB) ->
"use strict"
r = parseInt(RGB[0], 10) / 255
g = parseInt(RGB[1], 10) / 255
b = parseInt(RGB[2], 10) / 255
max = Math.max(r, g, b)
min = Math.min(r, g, b)
d = max - min
v = max
h = undefined
s = undefined
if max is 0
s = 0
else
s = d / max
if max is min
h = 0
else
switch max
when r
h = (g - b) / d + ((if g < b then 6 else 0))
when g
h = (b - r) / d + 2
when b
h = (r - g) / d + 4
h = h / 6
[
h
s
v
]
HSV2RGB: (HSV) ->
"use strict"
r = undefined
g = undefined
b = undefined
h = HSV[0]
s = HSV[1]
v = HSV[2]
i = Math.floor(h * 6)
f = h * 6 - i
p = v * (1 - s)
q = v * (1 - f * s)
t = v * (1 - (1 - f) * s)
switch i % 6
when 0
r = v
g = t
b = p
when 1
r = q
g = v
b = p
when 2
r = p
g = v
b = t
when 3
r = p
g = q
b = v
when 4
r = t
g = p
b = v
when 5
r = v
g = p
b = q
[
r * 255
g * 255
b * 255
]
HSL2HEX: (HSL) ->
"use strict"
colorconv.RGB2HEX colorconv.HSL2RGB(HSL)
HEX2HSL: (hex) ->
"use strict"
colorconv.RGB2HSL colorconv.HEX2RGB(hex)
complexity2int: (string) ->
"use strict"
valunicode = undefined
keys = string.split("")
numbers = 1
uletter = 1
lletter = 1
special = 1
complex = 0
i = undefined
i = 0
while i < keys.length
valunicode = keys[i].charCodeAt(0)
if (valunicode > 0x40) and (valunicode < 0x5B)
#Großbuchstaben A-Z
uletter += 1
else if (valunicode > 0x60) and (valunicode < 0x7B)
#Kleinbuchstaben a-z
lletter += 1
else if (valunicode > 0x2F) and (valunicode < 0x3A)
#Zahlen 0-9
numbers += 1
#Sonderzeichen
else special += 1 if (valunicode > 0x20) and (valunicode < 0x7F)
i += 1
complex = ((uletter * lletter * numbers * special) + Math.round(uletter * 1.8 + lletter * 1.5 + numbers + special * 2)) - 6
complex
int2RGB: (intval) ->
"use strict"
intval = parseInt(intval, 10) if (typeof intval isnt "number") and (intval isnt false) and (intval isnt true)
if typeof intval is "number"
if (intval < 115) and (intval > 1)
return [
255
153 + intval
153 - intval
]
if (intval > 115) and (intval < 230)
return [
255 - intval
243
63
]
if (intval > 230) or (intval is true)
return [
145
243
63
]
if intval is "none"
return [
204
204
204
]
if intval is true
return [
204
204
204
]
false
complexity2RGB: (string) ->
"use strict"
colorconv.int2RGB colorconv.complexity2int(string)
mixRGB: (RGB1, RGB2) ->
"use strict"
r = undefined
g = undefined
b = undefined
r = parseInt((RGB1[0] + RGB2[0]) / 2, 10)
g = parseInt((RGB1[1] + RGB2[1]) / 2, 10)
b = parseInt((RGB1[2] + RGB2[2]) / 2, 10)
[
r
g
b
]
parse: (input) ->
"use strict"
geregext = undefined
pattern = /((rgb|hsl|#|yuv)(\(([%, ]*([\d]+)[%, ]+([\d]+)[%, ]+([\d]+)[%, ]*)+\)|([a-f0-9]+)))/g
geregext = pattern.exec(input)
if geregext isnt null
switch geregext[2]
when "#"
return colorconv.HEX2RGB(geregext[3])
when "rgb"
return [
]
when "hsl"
return colorconv.HSL2RGB([
])
when "yuv"
return colorconv.YUV2RGB([
])
else
return false
false
|
[
{
"context": " Backbone\n) ->\n\n ###*\n # @author David Bouman\n # @module App\n # @submodule ",
"end": 339,
"score": 0.9997648596763611,
"start": 327,
"tag": "NAME",
"value": "David Bouman"
}
] | generators/app/templates/src/models/api-service.coffee | marviq/generator-bat | 3 | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
], factory )
return
)((
Backbone
) ->
###*
# @author David Bouman
# @module App
# @submodule Models
###
###*
# Model for the `{{#crossLink 'ApiServicesCollection'}}{{/crossLink}}`.
#
# @class ApiServiceModel
# @extends Backbone.Model
# @constructor
###
class ApiServiceModel extends Backbone.Model
###*
# List of [valid attribute names](#attrs).
#
# @property schema
# @type Array[String]
# @final
###
###*
# The `ApiServiceModel`'s unique identifier.
#
# @attribute id
# @type String
###
###*
# A url path relative to the API's base `url` for accessing this service's API endpoint.
#
# @attribute urlPath
# @type String
###
schema: [
'id'
'urlPath'
]
###*
# This method caters for `Backbone.Model.url()` to not break on `ApiServiceModel` values. It is, in short, a hack.
#
# Not many libraries do ensure to stringify values before invoking `String` prototype methods on them and `Backbone.Model.url()` is no exception.
#
# See:
# [`String.prototype.replace`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#wiki-document-head)
# for a complete description of this method's signature.
#
# @method replace
###
replace: () ->
return @toString().replace( arguments... )
###*
# Creates a complete service API endpoint url from the API's base `url` and the model's `urlPath`.
#
# Most time you won't need to call this method explicitly; simply provide this model wherever you have a need for this value.
#
# @method toString
#
# @return {String} The complete url of the service's API endpoint.
###
toString: () ->
return "#{ @collection.url }/#{ @attributes.urlPath }"
)
| 188399 | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
], factory )
return
)((
Backbone
) ->
###*
# @author <NAME>
# @module App
# @submodule Models
###
###*
# Model for the `{{#crossLink 'ApiServicesCollection'}}{{/crossLink}}`.
#
# @class ApiServiceModel
# @extends Backbone.Model
# @constructor
###
class ApiServiceModel extends Backbone.Model
###*
# List of [valid attribute names](#attrs).
#
# @property schema
# @type Array[String]
# @final
###
###*
# The `ApiServiceModel`'s unique identifier.
#
# @attribute id
# @type String
###
###*
# A url path relative to the API's base `url` for accessing this service's API endpoint.
#
# @attribute urlPath
# @type String
###
schema: [
'id'
'urlPath'
]
###*
# This method caters for `Backbone.Model.url()` to not break on `ApiServiceModel` values. It is, in short, a hack.
#
# Not many libraries do ensure to stringify values before invoking `String` prototype methods on them and `Backbone.Model.url()` is no exception.
#
# See:
# [`String.prototype.replace`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#wiki-document-head)
# for a complete description of this method's signature.
#
# @method replace
###
replace: () ->
return @toString().replace( arguments... )
###*
# Creates a complete service API endpoint url from the API's base `url` and the model's `urlPath`.
#
# Most time you won't need to call this method explicitly; simply provide this model wherever you have a need for this value.
#
# @method toString
#
# @return {String} The complete url of the service's API endpoint.
###
toString: () ->
return "#{ @collection.url }/#{ @attributes.urlPath }"
)
| true | 'use strict'
( ( factory ) ->
if typeof exports is 'object'
module.exports = factory(
require( 'backbone' )
)
else if typeof define is 'function' and define.amd
define( [
'backbone'
], factory )
return
)((
Backbone
) ->
###*
# @author PI:NAME:<NAME>END_PI
# @module App
# @submodule Models
###
###*
# Model for the `{{#crossLink 'ApiServicesCollection'}}{{/crossLink}}`.
#
# @class ApiServiceModel
# @extends Backbone.Model
# @constructor
###
class ApiServiceModel extends Backbone.Model
###*
# List of [valid attribute names](#attrs).
#
# @property schema
# @type Array[String]
# @final
###
###*
# The `ApiServiceModel`'s unique identifier.
#
# @attribute id
# @type String
###
###*
# A url path relative to the API's base `url` for accessing this service's API endpoint.
#
# @attribute urlPath
# @type String
###
schema: [
'id'
'urlPath'
]
###*
# This method caters for `Backbone.Model.url()` to not break on `ApiServiceModel` values. It is, in short, a hack.
#
# Not many libraries do ensure to stringify values before invoking `String` prototype methods on them and `Backbone.Model.url()` is no exception.
#
# See:
# [`String.prototype.replace`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#wiki-document-head)
# for a complete description of this method's signature.
#
# @method replace
###
replace: () ->
return @toString().replace( arguments... )
###*
# Creates a complete service API endpoint url from the API's base `url` and the model's `urlPath`.
#
# Most time you won't need to call this method explicitly; simply provide this model wherever you have a need for this value.
#
# @method toString
#
# @return {String} The complete url of the service's API endpoint.
###
toString: () ->
return "#{ @collection.url }/#{ @attributes.urlPath }"
)
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998868107795715,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyright and license informa",
"end": 96,
"score": 0.999934196472168,
"start": 76,
"tag": "EMAIL",
"value": "jessym@konsserto.com"
},
{
"context": "enerate and manage directory/file tree\n#\n# @author Jessym Reziga <jessym@konsserto.com>\nclass Filesystem\n\n\n # Tra",
"end": 432,
"score": 0.9998888373374939,
"start": 419,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "ge directory/file tree\n#\n# @author Jessym Reziga <jessym@konsserto.com>\nclass Filesystem\n\n\n # Trace the output stack\n ",
"end": 454,
"score": 0.9999338388442993,
"start": 434,
"tag": "EMAIL",
"value": "jessym@konsserto.com"
}
] | node_modules/konsserto/lib/src/Konsserto/Component/Filesystem/Filesystem.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
filesystem = use('fs');
path_helper = use('path');
Tools = use('@Konsserto/Component/Static/Tools');
# Filesystem contains helper to generate and manage directory/file tree
#
# @author Jessym Reziga <jessym@konsserto.com>
class Filesystem
# Trace the output stack
# @param {Object} object The object concern by the trace
@traceOutput: (object) ->
out = object.replace(/\\/g, '/')
root = process.cwd()
root = root.replace(/\\/g, '/')
out = out.replace(root, '')
return out
# Make tree
# @param {String} root The root directory
# @param {Object} tree The tree object
# @param {Number} chmod The access right on the directory
# @param {Boolean} verbose Should the mktree be logged ?
# @return {Boolean} True if created, false in the other cases
@mktree: (root, tree, chmod = 755, verbose = true) ->
if tree.constructor.name == 'Array'
for dir,index in tree
root += dir + '/'
if verbose
console.info('Generated directory : [' + @traceOutput(root) + ']')
try
filesystem.mkdirSync(root, chmod)
catch e
if e.code != 'EEXIST' then throw e
return path_helper.normalize(root)
else if tree.constructor.name == 'Object'
for name,content of tree
if name == ':files'
Filesystem.mkfiles(root, content)
else
dir = root + name + '/'
try
filesystem.mkdirSync(dir, chmod)
catch e
if e.code != 'EEXIST' then throw e
if verbose
console.info('Generated directory : [' + @traceOutput(dir) + ']')
if content != false && content != null && content != undefined
Filesystem.mktree(dir, content)
return true
# Make files
# @param {String} root The root directory
# @param [Object] files The files to make
@mkfiles: (root, files) ->
for file,content of files
if content.indexOf('@file:') == 0
filesystem.createReadStream(content.replace('@file:', '')).pipe(filesystem.createWriteStream(root + file));
else
try
buffFile = filesystem.createWriteStream(root + file)
buffFile.write(content);
buffFile.close();
console.info('Generated file : [' + @traceOutput(root + file) + ']')
catch e
throw e
# Make directory
# @param {String} dir The directory to make
# @param {Number} chmod The chmod used to create
# @param {Boolean} Should the directory be deleted before ? Default is false
@mkdir: (dir, chmod, del = false) ->
if del
@rmtree(dir)
filesystem.mkdirSync(dir, chmod) if !filesystem.existsSync(dir)
# Remove tree
# @param {String} dir The directory to remove
@rmtree: (dir) ->
if filesystem.existsSync(dir)
list = filesystem.readdirSync(dir)
for i in [0..list.length]
if list[i]?
filename = path.join(dir, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
@rmtree(filename)
else
filesystem.unlinkSync(filename)
try
filesystem.rmdirSync(dir)
catch e
throw e if e.errno != 53
# Copy tree
# @param {String} src The directory to copy
# @param {String} dst The destination folder
@copytree: (src, dst) ->
@mktree(dst + '/', @generatetree(src))
# Generate a tree
# @param {String} src The dsource directory where to gene
# @param {Object} root The root object
# @return {Object} The root object
@generatetree: (src, root = {}) ->
if filesystem.existsSync(src)
list = filesystem.readdirSync(src)
for i in [0..list.length]
if list[i]?
filename = path.join(src, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
root[path.basename(filename)] = {}
@generatetree(filename, root[path.basename(filename)])
else
root ?= {}
root[':files'] ?= {}
root[':files'][path.basename(filename)] = '@file:' + filename
return root
module.exports = Filesystem;
| 131220 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
filesystem = use('fs');
path_helper = use('path');
Tools = use('@Konsserto/Component/Static/Tools');
# Filesystem contains helper to generate and manage directory/file tree
#
# @author <NAME> <<EMAIL>>
class Filesystem
# Trace the output stack
# @param {Object} object The object concern by the trace
@traceOutput: (object) ->
out = object.replace(/\\/g, '/')
root = process.cwd()
root = root.replace(/\\/g, '/')
out = out.replace(root, '')
return out
# Make tree
# @param {String} root The root directory
# @param {Object} tree The tree object
# @param {Number} chmod The access right on the directory
# @param {Boolean} verbose Should the mktree be logged ?
# @return {Boolean} True if created, false in the other cases
@mktree: (root, tree, chmod = 755, verbose = true) ->
if tree.constructor.name == 'Array'
for dir,index in tree
root += dir + '/'
if verbose
console.info('Generated directory : [' + @traceOutput(root) + ']')
try
filesystem.mkdirSync(root, chmod)
catch e
if e.code != 'EEXIST' then throw e
return path_helper.normalize(root)
else if tree.constructor.name == 'Object'
for name,content of tree
if name == ':files'
Filesystem.mkfiles(root, content)
else
dir = root + name + '/'
try
filesystem.mkdirSync(dir, chmod)
catch e
if e.code != 'EEXIST' then throw e
if verbose
console.info('Generated directory : [' + @traceOutput(dir) + ']')
if content != false && content != null && content != undefined
Filesystem.mktree(dir, content)
return true
# Make files
# @param {String} root The root directory
# @param [Object] files The files to make
@mkfiles: (root, files) ->
for file,content of files
if content.indexOf('@file:') == 0
filesystem.createReadStream(content.replace('@file:', '')).pipe(filesystem.createWriteStream(root + file));
else
try
buffFile = filesystem.createWriteStream(root + file)
buffFile.write(content);
buffFile.close();
console.info('Generated file : [' + @traceOutput(root + file) + ']')
catch e
throw e
# Make directory
# @param {String} dir The directory to make
# @param {Number} chmod The chmod used to create
# @param {Boolean} Should the directory be deleted before ? Default is false
@mkdir: (dir, chmod, del = false) ->
if del
@rmtree(dir)
filesystem.mkdirSync(dir, chmod) if !filesystem.existsSync(dir)
# Remove tree
# @param {String} dir The directory to remove
@rmtree: (dir) ->
if filesystem.existsSync(dir)
list = filesystem.readdirSync(dir)
for i in [0..list.length]
if list[i]?
filename = path.join(dir, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
@rmtree(filename)
else
filesystem.unlinkSync(filename)
try
filesystem.rmdirSync(dir)
catch e
throw e if e.errno != 53
# Copy tree
# @param {String} src The directory to copy
# @param {String} dst The destination folder
@copytree: (src, dst) ->
@mktree(dst + '/', @generatetree(src))
# Generate a tree
# @param {String} src The dsource directory where to gene
# @param {Object} root The root object
# @return {Object} The root object
@generatetree: (src, root = {}) ->
if filesystem.existsSync(src)
list = filesystem.readdirSync(src)
for i in [0..list.length]
if list[i]?
filename = path.join(src, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
root[path.basename(filename)] = {}
@generatetree(filename, root[path.basename(filename)])
else
root ?= {}
root[':files'] ?= {}
root[':files'][path.basename(filename)] = '@file:' + filename
return root
module.exports = Filesystem;
| true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
filesystem = use('fs');
path_helper = use('path');
Tools = use('@Konsserto/Component/Static/Tools');
# Filesystem contains helper to generate and manage directory/file tree
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
class Filesystem
# Trace the output stack
# @param {Object} object The object concern by the trace
@traceOutput: (object) ->
out = object.replace(/\\/g, '/')
root = process.cwd()
root = root.replace(/\\/g, '/')
out = out.replace(root, '')
return out
# Make tree
# @param {String} root The root directory
# @param {Object} tree The tree object
# @param {Number} chmod The access right on the directory
# @param {Boolean} verbose Should the mktree be logged ?
# @return {Boolean} True if created, false in the other cases
@mktree: (root, tree, chmod = 755, verbose = true) ->
if tree.constructor.name == 'Array'
for dir,index in tree
root += dir + '/'
if verbose
console.info('Generated directory : [' + @traceOutput(root) + ']')
try
filesystem.mkdirSync(root, chmod)
catch e
if e.code != 'EEXIST' then throw e
return path_helper.normalize(root)
else if tree.constructor.name == 'Object'
for name,content of tree
if name == ':files'
Filesystem.mkfiles(root, content)
else
dir = root + name + '/'
try
filesystem.mkdirSync(dir, chmod)
catch e
if e.code != 'EEXIST' then throw e
if verbose
console.info('Generated directory : [' + @traceOutput(dir) + ']')
if content != false && content != null && content != undefined
Filesystem.mktree(dir, content)
return true
# Make files
# @param {String} root The root directory
# @param [Object] files The files to make
@mkfiles: (root, files) ->
for file,content of files
if content.indexOf('@file:') == 0
filesystem.createReadStream(content.replace('@file:', '')).pipe(filesystem.createWriteStream(root + file));
else
try
buffFile = filesystem.createWriteStream(root + file)
buffFile.write(content);
buffFile.close();
console.info('Generated file : [' + @traceOutput(root + file) + ']')
catch e
throw e
# Make directory
# @param {String} dir The directory to make
# @param {Number} chmod The chmod used to create
# @param {Boolean} Should the directory be deleted before ? Default is false
@mkdir: (dir, chmod, del = false) ->
if del
@rmtree(dir)
filesystem.mkdirSync(dir, chmod) if !filesystem.existsSync(dir)
# Remove tree
# @param {String} dir The directory to remove
@rmtree: (dir) ->
if filesystem.existsSync(dir)
list = filesystem.readdirSync(dir)
for i in [0..list.length]
if list[i]?
filename = path.join(dir, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
@rmtree(filename)
else
filesystem.unlinkSync(filename)
try
filesystem.rmdirSync(dir)
catch e
throw e if e.errno != 53
# Copy tree
# @param {String} src The directory to copy
# @param {String} dst The destination folder
@copytree: (src, dst) ->
@mktree(dst + '/', @generatetree(src))
# Generate a tree
# @param {String} src The dsource directory where to gene
# @param {Object} root The root object
# @return {Object} The root object
@generatetree: (src, root = {}) ->
if filesystem.existsSync(src)
list = filesystem.readdirSync(src)
for i in [0..list.length]
if list[i]?
filename = path.join(src, list[i])
stat = filesystem.statSync(filename)
if filename == "." || filename == ".."
continue
else if stat.isDirectory()
root[path.basename(filename)] = {}
@generatetree(filename, root[path.basename(filename)])
else
root ?= {}
root[':files'] ?= {}
root[':files'][path.basename(filename)] = '@file:' + filename
return root
module.exports = Filesystem;
|
[
{
"context": "ould process values\", ->\n val = @schema.val(\" Test@g.com \").val()\n equal val, \"test@g.com\"\n ok n",
"end": 447,
"score": 0.999921441078186,
"start": 437,
"tag": "EMAIL",
"value": "Test@g.com"
},
{
"context": "chema.val(\" Test@g.com \").val()\n equal val, \"test@g.com\"\n ok not @schema.validate()\n\n it \"should ",
"end": 485,
"score": 0.9999254941940308,
"start": 475,
"tag": "EMAIL",
"value": "test@g.com"
},
{
"context": "->\n sc = @schema.clone()\n val = sc.val(\" Test@g.com \").val()\n equal val, \"test@g.com\"\n ok n",
"end": 1239,
"score": 0.9999228715896606,
"start": 1229,
"tag": "EMAIL",
"value": "Test@g.com"
},
{
"context": " = sc.val(\" Test@g.com \").val()\n equal val, \"test@g.com\"\n ok not sc.validate()\n\n it \"should be ab",
"end": 1277,
"score": 0.9999256134033203,
"start": 1267,
"tag": "EMAIL",
"value": "test@g.com"
},
{
"context": " if both are valid\", (done) ->\n @schema.val \"ddddt@g.com\"\n errs = @schema.validate((errs) ->\n ",
"end": 2334,
"score": 0.9999260306358337,
"start": 2323,
"tag": "EMAIL",
"value": "ddddt@g.com"
},
{
"context": "done) ->\n sc = @schema.clone()\n sc.val \"ddddt@g.com\"\n errs = sc.validate((errs) ->\n ok no",
"end": 2548,
"score": 0.9999251365661621,
"start": 2537,
"tag": "EMAIL",
"value": "ddddt@g.com"
}
] | test/and.test.coffee | zzdhidden/EVE | 4 | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
type = eve.type
describe "type", ->
describe "and", ->
beforeEach ->
@schema = type.and([ type.string().lowercase().notEmpty().len(3, 12), type.string().trim().notEmpty().email() ])
it "should have and type", ->
ok type.and
it "should process values", ->
val = @schema.val(" Test@g.com ").val()
equal val, "test@g.com"
ok not @schema.validate()
it "should validate required if required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]).required())
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok errs
equal errs.messages().length, 1
)
ok errs
equal errs.messages().length, 1
it "should not validate required if not required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]))
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok not errs
)
ok not errs
it "should process values for clones", ->
sc = @schema.clone()
val = sc.val(" Test@g.com ").val()
equal val, "test@g.com"
ok not sc.validate()
it "should be able to validate if both fails", (done) ->
@schema.val ""
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if both fails for clones", (done) ->
sc = @schema.clone()
sc.val ""
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if one is valid", (done) ->
@schema.val "test"
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if one is valid for clones", (done) ->
sc = @schema.clone()
sc.val "test"
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if both are valid", (done) ->
@schema.val "ddddt@g.com"
errs = @schema.validate((errs) ->
ok not errs
done()
)
it "should be able to validate if both are valid for clones", (done) ->
sc = @schema.clone()
sc.val "ddddt@g.com"
errs = sc.validate((errs) ->
ok not errs
done()
)
it "should be able to validate async", (done) ->
sc = type.and([ type.string().validator((val, next) ->
setTimeout (->
next val isnt "admin"
), 100
, "must not be admin"), type.string().trim().email().validator((val, next) ->
setTimeout (->
next val.length isnt 5
), 100
, "must not have 5 chars") ])
sc.val("admin").validate (errs) ->
ok errs
equal errs.messages().length, 3
done()
it "should support custom validators", ->
sc = type.and([ type.string().validator((val) ->
ok this
equal val, "admin"
true
) ])
sc.val("admin").validate()
| 91493 | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
type = eve.type
describe "type", ->
describe "and", ->
beforeEach ->
@schema = type.and([ type.string().lowercase().notEmpty().len(3, 12), type.string().trim().notEmpty().email() ])
it "should have and type", ->
ok type.and
it "should process values", ->
val = @schema.val(" <EMAIL> ").val()
equal val, "<EMAIL>"
ok not @schema.validate()
it "should validate required if required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]).required())
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok errs
equal errs.messages().length, 1
)
ok errs
equal errs.messages().length, 1
it "should not validate required if not required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]))
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok not errs
)
ok not errs
it "should process values for clones", ->
sc = @schema.clone()
val = sc.val(" <EMAIL> ").val()
equal val, "<EMAIL>"
ok not sc.validate()
it "should be able to validate if both fails", (done) ->
@schema.val ""
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if both fails for clones", (done) ->
sc = @schema.clone()
sc.val ""
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if one is valid", (done) ->
@schema.val "test"
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if one is valid for clones", (done) ->
sc = @schema.clone()
sc.val "test"
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if both are valid", (done) ->
@schema.val "<EMAIL>"
errs = @schema.validate((errs) ->
ok not errs
done()
)
it "should be able to validate if both are valid for clones", (done) ->
sc = @schema.clone()
sc.val "<EMAIL>"
errs = sc.validate((errs) ->
ok not errs
done()
)
it "should be able to validate async", (done) ->
sc = type.and([ type.string().validator((val, next) ->
setTimeout (->
next val isnt "admin"
), 100
, "must not be admin"), type.string().trim().email().validator((val, next) ->
setTimeout (->
next val.length isnt 5
), 100
, "must not have 5 chars") ])
sc.val("admin").validate (errs) ->
ok errs
equal errs.messages().length, 3
done()
it "should support custom validators", ->
sc = type.and([ type.string().validator((val) ->
ok this
equal val, "admin"
true
) ])
sc.val("admin").validate()
| true | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
type = eve.type
describe "type", ->
describe "and", ->
beforeEach ->
@schema = type.and([ type.string().lowercase().notEmpty().len(3, 12), type.string().trim().notEmpty().email() ])
it "should have and type", ->
ok type.and
it "should process values", ->
val = @schema.val(" PI:EMAIL:<EMAIL>END_PI ").val()
equal val, "PI:EMAIL:<EMAIL>END_PI"
ok not @schema.validate()
it "should validate required if required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]).required())
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok errs
equal errs.messages().length, 1
)
ok errs
equal errs.messages().length, 1
it "should not validate required if not required and embedded in object", ->
sc = type.object(test: type.and([ type.string().len(5), type.string().email() ]))
errs = sc.val(test2: [ "a" ]).validate((errs) ->
ok not errs
)
ok not errs
it "should process values for clones", ->
sc = @schema.clone()
val = sc.val(" PI:EMAIL:<EMAIL>END_PI ").val()
equal val, "PI:EMAIL:<EMAIL>END_PI"
ok not sc.validate()
it "should be able to validate if both fails", (done) ->
@schema.val ""
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if both fails for clones", (done) ->
sc = @schema.clone()
sc.val ""
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 2
done()
)
ok errs
it "should be able to validate if one is valid", (done) ->
@schema.val "test"
errs = @schema.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if one is valid for clones", (done) ->
sc = @schema.clone()
sc.val "test"
errs = sc.validate((errs) ->
ok errs
equal errs.messages().length, 1
done()
)
ok errs
it "should be able to validate if both are valid", (done) ->
@schema.val "PI:EMAIL:<EMAIL>END_PI"
errs = @schema.validate((errs) ->
ok not errs
done()
)
it "should be able to validate if both are valid for clones", (done) ->
sc = @schema.clone()
sc.val "PI:EMAIL:<EMAIL>END_PI"
errs = sc.validate((errs) ->
ok not errs
done()
)
it "should be able to validate async", (done) ->
sc = type.and([ type.string().validator((val, next) ->
setTimeout (->
next val isnt "admin"
), 100
, "must not be admin"), type.string().trim().email().validator((val, next) ->
setTimeout (->
next val.length isnt 5
), 100
, "must not have 5 chars") ])
sc.val("admin").validate (errs) ->
ok errs
equal errs.messages().length, 3
done()
it "should support custom validators", ->
sc = type.and([ type.string().validator((val) ->
ok this
equal val, "admin"
true
) ])
sc.val("admin").validate()
|
[
{
"context": " Under MIT license, see LICENSE file for details\n Andrey Popp (c) 2013\n\n###\n\nBackbone = re",
"end": 105,
"score": 0.999833345413208,
"start": 94,
"tag": "NAME",
"value": "Andrey Popp"
}
] | ui/index.coffee | andreypopp/wall | 2 | ###*
UI entry point
@jsx React.DOM
Under MIT license, see LICENSE file for details
Andrey Popp (c) 2013
###
Backbone = require 'backbone'
BackboneQueryParameters = require 'backbone-query-parameters'
Backbone.$ = require 'jqueryify'
React = require 'react-tools/build/modules/React'
DOMEvents = require 'react-dom-events'
Control = require './control'
HasModal = require './modal'
{AppEvents, LocationAware,
UserAware} = require './utils'
Dropdown = require './dropdown'
{Item, Items} = require '../models'
_BootstrapModal = require './bootstrap-modal'
router = require './router'
SubmitDialog = require './submit'
ItemScreen = require './item_screen'
ItemsScreen = require './items_screen'
App = React.createClass
mixins: [AppEvents, DOMEvents, LocationAware, UserAware, HasModal]
propTypes:
title: React.PropTypes.string.isRequired
routes:
'': 'items'
'items/:id': 'item'
'~:username': 'user'
'auth/:provider': 'auth'
events:
'click a': 'onClick'
'touchstart a': 'onClick'
onClick: (e) ->
href = e.currentTarget.attributes?.href?.value
if href? and not /https?:/.exec href
e.preventDefault()
this.router.navigate href, trigger: true
getInitialState: ->
{user: this.getUser()}
componentDidMount: ->
this.listenTo this.router, 'route:items', (params) =>
model = new Items(params)
model.fetch().then => this.show new ItemsScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:item', (id) =>
model = new Item {id}
model.fetch().then => this.show new ItemScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:auth', (provider) =>
window.open(window.location.pathname)
Backbone.history.history.back()
show: (screen, options = {}) ->
window.scrollTo(0)
this.setState {screen}
screenURL = screen.url()
unless options.suppressNavigation
this.router.navigate screenURL, {trigger: options.trigger} if screenURL?
renderControls: ->
controls = if this.getUser()?
[Control(
class: 'submit', icon: 'pencil', label: 'Submit', tabIndex: 3,
onClick: => this.showModal SubmitDialog()),
Control(
class: 'logout', icon: 'signout', tabIndex: 4,
href: '/auth/logout', label: 'Sign out')]
else
Dropdown({class: 'login', icon: 'signin', label: 'Sign in'},
Control(href: '/auth/facebook', label: 'with Facebook', icon: 'facebook'))
`<div class="Controls">{controls}</div>`
render: ->
screen = this.state?.screen
`<div class="App">
<header>
<h1 class="title"><Control tabIndex="2" href="/" label={this.props.title} /></h1>
{this.renderControls()}
</header>
<div class="screen">{screen}</div>
{this.renderModal()}
</div>`
window.onload = ->
React.initializeTouchEvents(true)
Wall = window.Wall = App(title: __data.title or 'wall')
Wall.settings = __data
React.renderComponent Wall, document.body
Backbone.history.start(pushState: true)
| 218736 | ###*
UI entry point
@jsx React.DOM
Under MIT license, see LICENSE file for details
<NAME> (c) 2013
###
Backbone = require 'backbone'
BackboneQueryParameters = require 'backbone-query-parameters'
Backbone.$ = require 'jqueryify'
React = require 'react-tools/build/modules/React'
DOMEvents = require 'react-dom-events'
Control = require './control'
HasModal = require './modal'
{AppEvents, LocationAware,
UserAware} = require './utils'
Dropdown = require './dropdown'
{Item, Items} = require '../models'
_BootstrapModal = require './bootstrap-modal'
router = require './router'
SubmitDialog = require './submit'
ItemScreen = require './item_screen'
ItemsScreen = require './items_screen'
App = React.createClass
mixins: [AppEvents, DOMEvents, LocationAware, UserAware, HasModal]
propTypes:
title: React.PropTypes.string.isRequired
routes:
'': 'items'
'items/:id': 'item'
'~:username': 'user'
'auth/:provider': 'auth'
events:
'click a': 'onClick'
'touchstart a': 'onClick'
onClick: (e) ->
href = e.currentTarget.attributes?.href?.value
if href? and not /https?:/.exec href
e.preventDefault()
this.router.navigate href, trigger: true
getInitialState: ->
{user: this.getUser()}
componentDidMount: ->
this.listenTo this.router, 'route:items', (params) =>
model = new Items(params)
model.fetch().then => this.show new ItemsScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:item', (id) =>
model = new Item {id}
model.fetch().then => this.show new ItemScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:auth', (provider) =>
window.open(window.location.pathname)
Backbone.history.history.back()
show: (screen, options = {}) ->
window.scrollTo(0)
this.setState {screen}
screenURL = screen.url()
unless options.suppressNavigation
this.router.navigate screenURL, {trigger: options.trigger} if screenURL?
renderControls: ->
controls = if this.getUser()?
[Control(
class: 'submit', icon: 'pencil', label: 'Submit', tabIndex: 3,
onClick: => this.showModal SubmitDialog()),
Control(
class: 'logout', icon: 'signout', tabIndex: 4,
href: '/auth/logout', label: 'Sign out')]
else
Dropdown({class: 'login', icon: 'signin', label: 'Sign in'},
Control(href: '/auth/facebook', label: 'with Facebook', icon: 'facebook'))
`<div class="Controls">{controls}</div>`
render: ->
screen = this.state?.screen
`<div class="App">
<header>
<h1 class="title"><Control tabIndex="2" href="/" label={this.props.title} /></h1>
{this.renderControls()}
</header>
<div class="screen">{screen}</div>
{this.renderModal()}
</div>`
window.onload = ->
React.initializeTouchEvents(true)
Wall = window.Wall = App(title: __data.title or 'wall')
Wall.settings = __data
React.renderComponent Wall, document.body
Backbone.history.start(pushState: true)
| true | ###*
UI entry point
@jsx React.DOM
Under MIT license, see LICENSE file for details
PI:NAME:<NAME>END_PI (c) 2013
###
Backbone = require 'backbone'
BackboneQueryParameters = require 'backbone-query-parameters'
Backbone.$ = require 'jqueryify'
React = require 'react-tools/build/modules/React'
DOMEvents = require 'react-dom-events'
Control = require './control'
HasModal = require './modal'
{AppEvents, LocationAware,
UserAware} = require './utils'
Dropdown = require './dropdown'
{Item, Items} = require '../models'
_BootstrapModal = require './bootstrap-modal'
router = require './router'
SubmitDialog = require './submit'
ItemScreen = require './item_screen'
ItemsScreen = require './items_screen'
App = React.createClass
mixins: [AppEvents, DOMEvents, LocationAware, UserAware, HasModal]
propTypes:
title: React.PropTypes.string.isRequired
routes:
'': 'items'
'items/:id': 'item'
'~:username': 'user'
'auth/:provider': 'auth'
events:
'click a': 'onClick'
'touchstart a': 'onClick'
onClick: (e) ->
href = e.currentTarget.attributes?.href?.value
if href? and not /https?:/.exec href
e.preventDefault()
this.router.navigate href, trigger: true
getInitialState: ->
{user: this.getUser()}
componentDidMount: ->
this.listenTo this.router, 'route:items', (params) =>
model = new Items(params)
model.fetch().then => this.show new ItemsScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:item', (id) =>
model = new Item {id}
model.fetch().then => this.show new ItemScreen({model}), suppressNavigation: true
this.listenTo this.router, 'route:auth', (provider) =>
window.open(window.location.pathname)
Backbone.history.history.back()
show: (screen, options = {}) ->
window.scrollTo(0)
this.setState {screen}
screenURL = screen.url()
unless options.suppressNavigation
this.router.navigate screenURL, {trigger: options.trigger} if screenURL?
renderControls: ->
controls = if this.getUser()?
[Control(
class: 'submit', icon: 'pencil', label: 'Submit', tabIndex: 3,
onClick: => this.showModal SubmitDialog()),
Control(
class: 'logout', icon: 'signout', tabIndex: 4,
href: '/auth/logout', label: 'Sign out')]
else
Dropdown({class: 'login', icon: 'signin', label: 'Sign in'},
Control(href: '/auth/facebook', label: 'with Facebook', icon: 'facebook'))
`<div class="Controls">{controls}</div>`
render: ->
screen = this.state?.screen
`<div class="App">
<header>
<h1 class="title"><Control tabIndex="2" href="/" label={this.props.title} /></h1>
{this.renderControls()}
</header>
<div class="screen">{screen}</div>
{this.renderModal()}
</div>`
window.onload = ->
React.initializeTouchEvents(true)
Wall = window.Wall = App(title: __data.title or 'wall')
Wall.settings = __data
React.renderComponent Wall, document.body
Backbone.history.start(pushState: true)
|
[
{
"context": "{@serverPort}\"\n auth:\n username: 'some-uuid'\n password: 'some-token'\n json:\n ",
"end": 1488,
"score": 0.9890213012695312,
"start": 1479,
"tag": "USERNAME",
"value": "some-uuid"
},
{
"context": " username: 'some-uuid'\n password: 'some-token'\n json:\n something: true\n\n r",
"end": 1521,
"score": 0.9993361830711365,
"start": 1511,
"tag": "PASSWORD",
"value": "some-token"
},
{
"context": "l {\n uuid: 'some-uuid'\n token: 'some-token'\n }\n expect(result.metadata).to.dee",
"end": 2285,
"score": 0.8887411952018738,
"start": 2275,
"tag": "PASSWORD",
"value": "some-token"
}
] | test/integration/meshblu-otp-generate-spec.coffee | octoblu/meshblu-otp-service | 0 | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
request = require 'request'
shmock = require 'shmock'
mongojs = require 'mongojs'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Encryption = require '../../src/services/encryption'
{privateKey} = require './secrets'
describe 'Generate', ->
beforeEach (done) ->
@keys = mongojs('mongodb://localhost:27017/meshblu-otp-service-test').collection('keys')
@keys.remove done
beforeEach (done) ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
serverOptions =
port: undefined,
disableLogging: true
privateKey: privateKey
@encryption = new Encryption {privateKey: privateKey}
meshbluConfig =
server: 'localhost'
port: 0xd00d
@server = new Server serverOptions, {meshbluConfig, @keys}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.destroy done
afterEach (done) ->
@server.stop done
describe 'On POST /generate', ->
beforeEach (done) ->
userAuth = new Buffer('some-uuid:some-token').toString 'base64'
@authDevice = @meshblu
.post '/authenticate'
.set 'Authorization', "Basic #{userAuth}"
.reply 200, uuid: 'some-uuid', token: 'some-token'
options =
uri: '/generate'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'some-uuid'
password: 'some-token'
json:
something: true
request.post options, (error, @response, @body) =>
done error
it 'should auth handler', ->
@authDevice.done()
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
it 'should find one result', (done) ->
@keys.find {}, (error, result) =>
return done error if error?
expect(result.length).to.equal 1
done()
it 'should return a key', (done) ->
@keys.findOne {key: @body.key}, (error, result) =>
return done error if error?
return done new Error 'Missing record' unless result?
expect(@encryption.decrypt(result.encryptedSecret)).to.deep.equal {
uuid: 'some-uuid'
token: 'some-token'
}
expect(result.metadata).to.deep.equal something: true
done()
| 156124 | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
request = require 'request'
shmock = require 'shmock'
mongojs = require 'mongojs'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Encryption = require '../../src/services/encryption'
{privateKey} = require './secrets'
describe 'Generate', ->
beforeEach (done) ->
@keys = mongojs('mongodb://localhost:27017/meshblu-otp-service-test').collection('keys')
@keys.remove done
beforeEach (done) ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
serverOptions =
port: undefined,
disableLogging: true
privateKey: privateKey
@encryption = new Encryption {privateKey: privateKey}
meshbluConfig =
server: 'localhost'
port: 0xd00d
@server = new Server serverOptions, {meshbluConfig, @keys}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.destroy done
afterEach (done) ->
@server.stop done
describe 'On POST /generate', ->
beforeEach (done) ->
userAuth = new Buffer('some-uuid:some-token').toString 'base64'
@authDevice = @meshblu
.post '/authenticate'
.set 'Authorization', "Basic #{userAuth}"
.reply 200, uuid: 'some-uuid', token: 'some-token'
options =
uri: '/generate'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'some-uuid'
password: '<PASSWORD>'
json:
something: true
request.post options, (error, @response, @body) =>
done error
it 'should auth handler', ->
@authDevice.done()
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
it 'should find one result', (done) ->
@keys.find {}, (error, result) =>
return done error if error?
expect(result.length).to.equal 1
done()
it 'should return a key', (done) ->
@keys.findOne {key: @body.key}, (error, result) =>
return done error if error?
return done new Error 'Missing record' unless result?
expect(@encryption.decrypt(result.encryptedSecret)).to.deep.equal {
uuid: 'some-uuid'
token: '<PASSWORD>'
}
expect(result.metadata).to.deep.equal something: true
done()
| true | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
request = require 'request'
shmock = require 'shmock'
mongojs = require 'mongojs'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Encryption = require '../../src/services/encryption'
{privateKey} = require './secrets'
describe 'Generate', ->
beforeEach (done) ->
@keys = mongojs('mongodb://localhost:27017/meshblu-otp-service-test').collection('keys')
@keys.remove done
beforeEach (done) ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
serverOptions =
port: undefined,
disableLogging: true
privateKey: privateKey
@encryption = new Encryption {privateKey: privateKey}
meshbluConfig =
server: 'localhost'
port: 0xd00d
@server = new Server serverOptions, {meshbluConfig, @keys}
@server.run =>
@serverPort = @server.address().port
done()
afterEach (done) ->
@meshblu.destroy done
afterEach (done) ->
@server.stop done
describe 'On POST /generate', ->
beforeEach (done) ->
userAuth = new Buffer('some-uuid:some-token').toString 'base64'
@authDevice = @meshblu
.post '/authenticate'
.set 'Authorization', "Basic #{userAuth}"
.reply 200, uuid: 'some-uuid', token: 'some-token'
options =
uri: '/generate'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'some-uuid'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json:
something: true
request.post options, (error, @response, @body) =>
done error
it 'should auth handler', ->
@authDevice.done()
it 'should return a 201', ->
expect(@response.statusCode).to.equal 201
it 'should find one result', (done) ->
@keys.find {}, (error, result) =>
return done error if error?
expect(result.length).to.equal 1
done()
it 'should return a key', (done) ->
@keys.findOne {key: @body.key}, (error, result) =>
return done error if error?
return done new Error 'Missing record' unless result?
expect(@encryption.decrypt(result.encryptedSecret)).to.deep.equal {
uuid: 'some-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
}
expect(result.metadata).to.deep.equal something: true
done()
|
[
{
"context": " name: 'item1'\n hkeys: [\n 'sample1'\n 'sample2'\n ]\n sele",
"end": 6100,
"score": 0.9068337082862854,
"start": 6093,
"tag": "KEY",
"value": "sample1"
},
{
"context": " hkeys: [\n 'sample1'\n 'sample2'\n ]\n selected_hkey_index: 0\n ",
"end": 6122,
"score": 0.9198386073112488,
"start": 6115,
"tag": "KEY",
"value": "sample2"
}
] | coffee_src/components/config_card.coffee | pluripotency/react-parcel-coffee | 0 | h = require 'react-hyperscript'
React = require 'react'
_ = require 'lodash'
class HostTable extends React.Component
constructor: (props) ->
super arguments...
@state =
search: ''
editing_row: ''
reorder: false
temp_rows: []
rows: props.hosts
input_search: (value)=>
@setState
search: value
clear_search: ()=>
@setState
search: ''
set_default: ()=>
@setState
search: ''
editing_row: ''
reorder: false
temp_rows: []
start_edit: (i)=>
()=>
if @state.editing_row!=i
@set_default()
@setState editing_row: i
setTimeout ()=>
document.querySelector("#input#{i}").focus()
, 200
toggle_checked: (i)=> ()=>
rows = @state.rows
rows[i].checked = !rows[i].checked
@setState
rows: rows
select_hkey: (row_index, hkey_index)=>()=>
rows = @state.rows
rows[row_index].selected_hkey_index = hkey_index
@setState
rows: rows
on_click_reorder: (e)=>
e.preventDefault
if @state.reorder
temp_rows = @state.temp_rows
drop_rows = _.reject @state.rows, (r)-> _.find temp_rows, (t)-> t.name == r.name
new_rows = [
temp_rows...
drop_rows...
]
@setState
rows: new_rows
temp_rows: []
reorder: false
else
@set_default()
@setState reorder: true
render: ()=>
rows = _(@state.rows)
.filter (r)=>
if @state.search
r.name.match(new RegExp(@state.search))
else
true
.value()
h 'table.table.table-sm.table-hover', [
h 'thead.bg-info', [
h 'tr', [
h 'th', [h 'i.far.fa-check-square']
h 'th', '#'
(_.map @props.headers, (head)-> h 'th', head)...
h 'th', [
h 'button.btn.btn-sm.btn-primary',
onClick: (e)=>
e.preventDefault()
current_rows = @state.rows
current_rows.push name: ''
@setState
rows: current_rows
editing_rows: current_rows.length-1
, 'Add'
]
]
]
h 'tbody', _.map @state.temp_rows, (row, i)=>
h 'tr', [
h 'td', i+1
h 'td', row.name
]
h 'tbody', _.map rows, (row, i)=>
h 'tr', [
h 'td', [
h 'i.far.fa-check-square',
className: if row.checked then 'fa-check-square' else 'fa-square'
onClick: @toggle_checked(i)
]
h 'td', [
if @state.reorder
h 'button.btn.btn-sm.btn-secondary',
onClick: ()=>
temp_rows = @state.temp_rows
if not _.find(temp_rows, (r)-> row.name == r.name)
temp_rows.push row
@setState temp_rows: temp_rows
, 'select'
else
i+1
]
h 'td', [
if @state.editing_row==i
h '.input-group.input-group-sm', [
h 'input.form-control',
id: "input#{i}"
onChange: (e)=>
rows = @state.rows
rows[i].name = e.target.value
@setState rows: rows
value: row.name
]
else
h 'label', row.name
]
h 'td', [
((row.hkeys?.map (hkey, j)=>
h 'button.btn.btn-sm.btn-info',
className: if row.selected_hkey_index==j then 'active' else ''
onClick: @select_hkey(i, j)
, j
) or [h 'button.btn.btn-sm.btn-danger', [h 'i.fas.fa-sync']])...
]
h 'td', [
if @state.editing_row !=''
if @state.editing_row == i
h 'button.btn.btn-sm.btn-primary',
onClick: ()=>
rows = @state.rows
@setState
editing_row: ''
rows: _.reject rows, (r)-> r.name == ''
, 'Save'
else
h 'div'
else
h 'button.btn.btn-sm.btn-primary',
onClick: @start_edit(i)
, 'Edit'
]
]
h 'tfoot', [
h 'tr', [
h 'td',
colSpan: 2
, [
h 'button.btn.btn-sm',
onClick: @on_click_reorder
, if @state.reorder then 'done' else 'reorder'
]
h 'td',
colSpan: 2
, [
h SearchForm,
input_search: @input_search
clear_search: @clear_search
set_default: @set_default
]
h 'td', [
h 'button.btn.btn-sm.btn-primary',
onClick: @props.run
, 'Run'
]
]
]
]
class SearchForm extends React.Component
constructor: ->
super arguments...
input_search: (e)=>
@props.input_search(e.target.value)
set_default: ()=>
@props.set_default()
clear_search: ()=>
@props.clear_search()
render: ()=>
h '.input-group.input-group-sm.mb-3', [
h 'input.form-control',
onFocus: @set_default
onChange: @input_search
h '.input-group-append', [
h 'span.input-group-text',
onClick: @clear_search
, 'X'
h 'span.input-group-text', 'Search'
]
]
class HostsBox extends React.Component
render: ()=>
h '.card',
style:
position: 'absolute'
top: '45px'
margin: '15px'
, [
h '.card-header.bg-info', [
'Hosts'
h 'button.close', 'x'
]
h HostTable,
headers: [
'hostname'
'hkeys'
]
hosts: [
name: 'item1'
hkeys: [
'sample1'
'sample2'
]
selected_hkey_index: 0
checked: true
,
name: 'item2'
]
run: ()=>
]
class Background extends React.Component
constructor: ->
super arguments...
render: ()=>
h 'div',
style:
position: 'fixed'
top: '0'
left : '0'
width: '100%'
height: '100%'
backgroundColor: '#50c6dd'
, [
h HostsBox
]
module.exports = Background
| 144972 | h = require 'react-hyperscript'
React = require 'react'
_ = require 'lodash'
class HostTable extends React.Component
constructor: (props) ->
super arguments...
@state =
search: ''
editing_row: ''
reorder: false
temp_rows: []
rows: props.hosts
input_search: (value)=>
@setState
search: value
clear_search: ()=>
@setState
search: ''
set_default: ()=>
@setState
search: ''
editing_row: ''
reorder: false
temp_rows: []
start_edit: (i)=>
()=>
if @state.editing_row!=i
@set_default()
@setState editing_row: i
setTimeout ()=>
document.querySelector("#input#{i}").focus()
, 200
toggle_checked: (i)=> ()=>
rows = @state.rows
rows[i].checked = !rows[i].checked
@setState
rows: rows
select_hkey: (row_index, hkey_index)=>()=>
rows = @state.rows
rows[row_index].selected_hkey_index = hkey_index
@setState
rows: rows
on_click_reorder: (e)=>
e.preventDefault
if @state.reorder
temp_rows = @state.temp_rows
drop_rows = _.reject @state.rows, (r)-> _.find temp_rows, (t)-> t.name == r.name
new_rows = [
temp_rows...
drop_rows...
]
@setState
rows: new_rows
temp_rows: []
reorder: false
else
@set_default()
@setState reorder: true
render: ()=>
rows = _(@state.rows)
.filter (r)=>
if @state.search
r.name.match(new RegExp(@state.search))
else
true
.value()
h 'table.table.table-sm.table-hover', [
h 'thead.bg-info', [
h 'tr', [
h 'th', [h 'i.far.fa-check-square']
h 'th', '#'
(_.map @props.headers, (head)-> h 'th', head)...
h 'th', [
h 'button.btn.btn-sm.btn-primary',
onClick: (e)=>
e.preventDefault()
current_rows = @state.rows
current_rows.push name: ''
@setState
rows: current_rows
editing_rows: current_rows.length-1
, 'Add'
]
]
]
h 'tbody', _.map @state.temp_rows, (row, i)=>
h 'tr', [
h 'td', i+1
h 'td', row.name
]
h 'tbody', _.map rows, (row, i)=>
h 'tr', [
h 'td', [
h 'i.far.fa-check-square',
className: if row.checked then 'fa-check-square' else 'fa-square'
onClick: @toggle_checked(i)
]
h 'td', [
if @state.reorder
h 'button.btn.btn-sm.btn-secondary',
onClick: ()=>
temp_rows = @state.temp_rows
if not _.find(temp_rows, (r)-> row.name == r.name)
temp_rows.push row
@setState temp_rows: temp_rows
, 'select'
else
i+1
]
h 'td', [
if @state.editing_row==i
h '.input-group.input-group-sm', [
h 'input.form-control',
id: "input#{i}"
onChange: (e)=>
rows = @state.rows
rows[i].name = e.target.value
@setState rows: rows
value: row.name
]
else
h 'label', row.name
]
h 'td', [
((row.hkeys?.map (hkey, j)=>
h 'button.btn.btn-sm.btn-info',
className: if row.selected_hkey_index==j then 'active' else ''
onClick: @select_hkey(i, j)
, j
) or [h 'button.btn.btn-sm.btn-danger', [h 'i.fas.fa-sync']])...
]
h 'td', [
if @state.editing_row !=''
if @state.editing_row == i
h 'button.btn.btn-sm.btn-primary',
onClick: ()=>
rows = @state.rows
@setState
editing_row: ''
rows: _.reject rows, (r)-> r.name == ''
, 'Save'
else
h 'div'
else
h 'button.btn.btn-sm.btn-primary',
onClick: @start_edit(i)
, 'Edit'
]
]
h 'tfoot', [
h 'tr', [
h 'td',
colSpan: 2
, [
h 'button.btn.btn-sm',
onClick: @on_click_reorder
, if @state.reorder then 'done' else 'reorder'
]
h 'td',
colSpan: 2
, [
h SearchForm,
input_search: @input_search
clear_search: @clear_search
set_default: @set_default
]
h 'td', [
h 'button.btn.btn-sm.btn-primary',
onClick: @props.run
, 'Run'
]
]
]
]
class SearchForm extends React.Component
constructor: ->
super arguments...
input_search: (e)=>
@props.input_search(e.target.value)
set_default: ()=>
@props.set_default()
clear_search: ()=>
@props.clear_search()
render: ()=>
h '.input-group.input-group-sm.mb-3', [
h 'input.form-control',
onFocus: @set_default
onChange: @input_search
h '.input-group-append', [
h 'span.input-group-text',
onClick: @clear_search
, 'X'
h 'span.input-group-text', 'Search'
]
]
class HostsBox extends React.Component
render: ()=>
h '.card',
style:
position: 'absolute'
top: '45px'
margin: '15px'
, [
h '.card-header.bg-info', [
'Hosts'
h 'button.close', 'x'
]
h HostTable,
headers: [
'hostname'
'hkeys'
]
hosts: [
name: 'item1'
hkeys: [
'<KEY>'
'<KEY>'
]
selected_hkey_index: 0
checked: true
,
name: 'item2'
]
run: ()=>
]
class Background extends React.Component
constructor: ->
super arguments...
render: ()=>
h 'div',
style:
position: 'fixed'
top: '0'
left : '0'
width: '100%'
height: '100%'
backgroundColor: '#50c6dd'
, [
h HostsBox
]
module.exports = Background
| true | h = require 'react-hyperscript'
React = require 'react'
_ = require 'lodash'
class HostTable extends React.Component
constructor: (props) ->
super arguments...
@state =
search: ''
editing_row: ''
reorder: false
temp_rows: []
rows: props.hosts
input_search: (value)=>
@setState
search: value
clear_search: ()=>
@setState
search: ''
set_default: ()=>
@setState
search: ''
editing_row: ''
reorder: false
temp_rows: []
start_edit: (i)=>
()=>
if @state.editing_row!=i
@set_default()
@setState editing_row: i
setTimeout ()=>
document.querySelector("#input#{i}").focus()
, 200
toggle_checked: (i)=> ()=>
rows = @state.rows
rows[i].checked = !rows[i].checked
@setState
rows: rows
select_hkey: (row_index, hkey_index)=>()=>
rows = @state.rows
rows[row_index].selected_hkey_index = hkey_index
@setState
rows: rows
on_click_reorder: (e)=>
e.preventDefault
if @state.reorder
temp_rows = @state.temp_rows
drop_rows = _.reject @state.rows, (r)-> _.find temp_rows, (t)-> t.name == r.name
new_rows = [
temp_rows...
drop_rows...
]
@setState
rows: new_rows
temp_rows: []
reorder: false
else
@set_default()
@setState reorder: true
render: ()=>
rows = _(@state.rows)
.filter (r)=>
if @state.search
r.name.match(new RegExp(@state.search))
else
true
.value()
h 'table.table.table-sm.table-hover', [
h 'thead.bg-info', [
h 'tr', [
h 'th', [h 'i.far.fa-check-square']
h 'th', '#'
(_.map @props.headers, (head)-> h 'th', head)...
h 'th', [
h 'button.btn.btn-sm.btn-primary',
onClick: (e)=>
e.preventDefault()
current_rows = @state.rows
current_rows.push name: ''
@setState
rows: current_rows
editing_rows: current_rows.length-1
, 'Add'
]
]
]
h 'tbody', _.map @state.temp_rows, (row, i)=>
h 'tr', [
h 'td', i+1
h 'td', row.name
]
h 'tbody', _.map rows, (row, i)=>
h 'tr', [
h 'td', [
h 'i.far.fa-check-square',
className: if row.checked then 'fa-check-square' else 'fa-square'
onClick: @toggle_checked(i)
]
h 'td', [
if @state.reorder
h 'button.btn.btn-sm.btn-secondary',
onClick: ()=>
temp_rows = @state.temp_rows
if not _.find(temp_rows, (r)-> row.name == r.name)
temp_rows.push row
@setState temp_rows: temp_rows
, 'select'
else
i+1
]
h 'td', [
if @state.editing_row==i
h '.input-group.input-group-sm', [
h 'input.form-control',
id: "input#{i}"
onChange: (e)=>
rows = @state.rows
rows[i].name = e.target.value
@setState rows: rows
value: row.name
]
else
h 'label', row.name
]
h 'td', [
((row.hkeys?.map (hkey, j)=>
h 'button.btn.btn-sm.btn-info',
className: if row.selected_hkey_index==j then 'active' else ''
onClick: @select_hkey(i, j)
, j
) or [h 'button.btn.btn-sm.btn-danger', [h 'i.fas.fa-sync']])...
]
h 'td', [
if @state.editing_row !=''
if @state.editing_row == i
h 'button.btn.btn-sm.btn-primary',
onClick: ()=>
rows = @state.rows
@setState
editing_row: ''
rows: _.reject rows, (r)-> r.name == ''
, 'Save'
else
h 'div'
else
h 'button.btn.btn-sm.btn-primary',
onClick: @start_edit(i)
, 'Edit'
]
]
h 'tfoot', [
h 'tr', [
h 'td',
colSpan: 2
, [
h 'button.btn.btn-sm',
onClick: @on_click_reorder
, if @state.reorder then 'done' else 'reorder'
]
h 'td',
colSpan: 2
, [
h SearchForm,
input_search: @input_search
clear_search: @clear_search
set_default: @set_default
]
h 'td', [
h 'button.btn.btn-sm.btn-primary',
onClick: @props.run
, 'Run'
]
]
]
]
class SearchForm extends React.Component
constructor: ->
super arguments...
input_search: (e)=>
@props.input_search(e.target.value)
set_default: ()=>
@props.set_default()
clear_search: ()=>
@props.clear_search()
render: ()=>
h '.input-group.input-group-sm.mb-3', [
h 'input.form-control',
onFocus: @set_default
onChange: @input_search
h '.input-group-append', [
h 'span.input-group-text',
onClick: @clear_search
, 'X'
h 'span.input-group-text', 'Search'
]
]
class HostsBox extends React.Component
render: ()=>
h '.card',
style:
position: 'absolute'
top: '45px'
margin: '15px'
, [
h '.card-header.bg-info', [
'Hosts'
h 'button.close', 'x'
]
h HostTable,
headers: [
'hostname'
'hkeys'
]
hosts: [
name: 'item1'
hkeys: [
'PI:KEY:<KEY>END_PI'
'PI:KEY:<KEY>END_PI'
]
selected_hkey_index: 0
checked: true
,
name: 'item2'
]
run: ()=>
]
class Background extends React.Component
constructor: ->
super arguments...
render: ()=>
h 'div',
style:
position: 'fixed'
top: '0'
left : '0'
width: '100%'
height: '100%'
backgroundColor: '#50c6dd'
, [
h HostsBox
]
module.exports = Background
|
[
{
"context": "\n rm \"-rf\", \"guanlecoja-ui\"\n exec \"git clone git@github.com:buildbot/guanlecoja-ui.git\"\n bower_json =\n ",
"end": 444,
"score": 0.8591899871826172,
"start": 430,
"tag": "EMAIL",
"value": "git@github.com"
},
{
"context": "guanlecoja-ui\"\n exec \"git clone git@github.com:buildbot/guanlecoja-ui.git\"\n bower_json =\n name:",
"end": 453,
"score": 0.9995242953300476,
"start": 445,
"tag": "USERNAME",
"value": "buildbot"
}
] | guanlecoja/config.coffee | uglycoyote/guanlecoja-ui | 1 | ### ###############################################################################################
#
# This module contains all configuration for the build process
#
### ###############################################################################################
ANGULAR_TAG = "~1.5.3"
gulp = require("gulp")
require("shelljs/global")
gulp.task "publish", ['default'], ->
rm "-rf", "guanlecoja-ui"
exec "git clone git@github.com:buildbot/guanlecoja-ui.git"
bower_json =
name: "guanlecoja-ui"
version: "1.8.0"
main: ["scripts.js", "styles.css", "fonts/*", "img/*"]
ignore: []
description: "Sets of widgets and integrated bower dependencies useful for dashboard SPAs"
dependencies: {}
cd "guanlecoja-ui"
exec("git reset --hard origin/gh-pages")
cp "-rf", "../static/*", "."
cp "-rf", "../README.md", "."
JSON.stringify(bower_json, null, " ").to("bower.json")
exec("git add .")
exec("git commit -m " + bower_json.version)
exec("git tag " + bower_json.version)
exec("git push origin HEAD:gh-pages")
exec("git push origin " + bower_json.version)
gulp.task "readme", ->
gulp.src("Readme.md").pipe gulp.dest(config.dir.build)
config =
### ###########################################################################################
# Name of the module
### ###########################################################################################
name: 'guanlecoja.ui'
### ###########################################################################################
# Directories
### ###########################################################################################
dir:
# The build folder is where the app resides once it's completely built
build: 'static'
devserver:
# development server port
port: 8080
sourcemaps: true
vendors_apart: true
### ###########################################################################################
# Bower dependancies configuration
### ###########################################################################################
bower:
deps:
jquery:
version: '~2.2.3'
files: 'dist/jquery.js'
angular:
version: ANGULAR_TAG
files: 'angular.js'
"angular-animate":
version: ANGULAR_TAG
files: 'angular-animate.js'
"angular-bootstrap":
version: '~1.1.0'
files: 'ui-bootstrap-tpls.js'
"angular-ui-router":
version: '~0.2.18'
files: 'release/angular-ui-router.js'
"angular-recursion":
version: '~1.0.5'
files: 'angular-recursion.js'
lodash:
version: "~4.11.1"
files: 'dist/lodash.js'
testdeps:
"angular-mocks":
version: ANGULAR_TAG
files: "angular-mocks.js"
"angular-sanitize":
version: ANGULAR_TAG
files: "angular-sanitize.js"
"showdown":
version: '0.3.1'
files: "compressed/showdown.js"
buildtasks: ['scripts', 'styles', 'fonts', 'imgs',
'index', 'tests', 'generatedfixtures', 'fixtures', 'vendors', 'templates', 'readme']
module.exports = config
| 25696 | ### ###############################################################################################
#
# This module contains all configuration for the build process
#
### ###############################################################################################
ANGULAR_TAG = "~1.5.3"
gulp = require("gulp")
require("shelljs/global")
gulp.task "publish", ['default'], ->
rm "-rf", "guanlecoja-ui"
exec "git clone <EMAIL>:buildbot/guanlecoja-ui.git"
bower_json =
name: "guanlecoja-ui"
version: "1.8.0"
main: ["scripts.js", "styles.css", "fonts/*", "img/*"]
ignore: []
description: "Sets of widgets and integrated bower dependencies useful for dashboard SPAs"
dependencies: {}
cd "guanlecoja-ui"
exec("git reset --hard origin/gh-pages")
cp "-rf", "../static/*", "."
cp "-rf", "../README.md", "."
JSON.stringify(bower_json, null, " ").to("bower.json")
exec("git add .")
exec("git commit -m " + bower_json.version)
exec("git tag " + bower_json.version)
exec("git push origin HEAD:gh-pages")
exec("git push origin " + bower_json.version)
gulp.task "readme", ->
gulp.src("Readme.md").pipe gulp.dest(config.dir.build)
config =
### ###########################################################################################
# Name of the module
### ###########################################################################################
name: 'guanlecoja.ui'
### ###########################################################################################
# Directories
### ###########################################################################################
dir:
# The build folder is where the app resides once it's completely built
build: 'static'
devserver:
# development server port
port: 8080
sourcemaps: true
vendors_apart: true
### ###########################################################################################
# Bower dependancies configuration
### ###########################################################################################
bower:
deps:
jquery:
version: '~2.2.3'
files: 'dist/jquery.js'
angular:
version: ANGULAR_TAG
files: 'angular.js'
"angular-animate":
version: ANGULAR_TAG
files: 'angular-animate.js'
"angular-bootstrap":
version: '~1.1.0'
files: 'ui-bootstrap-tpls.js'
"angular-ui-router":
version: '~0.2.18'
files: 'release/angular-ui-router.js'
"angular-recursion":
version: '~1.0.5'
files: 'angular-recursion.js'
lodash:
version: "~4.11.1"
files: 'dist/lodash.js'
testdeps:
"angular-mocks":
version: ANGULAR_TAG
files: "angular-mocks.js"
"angular-sanitize":
version: ANGULAR_TAG
files: "angular-sanitize.js"
"showdown":
version: '0.3.1'
files: "compressed/showdown.js"
buildtasks: ['scripts', 'styles', 'fonts', 'imgs',
'index', 'tests', 'generatedfixtures', 'fixtures', 'vendors', 'templates', 'readme']
module.exports = config
| true | ### ###############################################################################################
#
# This module contains all configuration for the build process
#
### ###############################################################################################
ANGULAR_TAG = "~1.5.3"
gulp = require("gulp")
require("shelljs/global")
gulp.task "publish", ['default'], ->
rm "-rf", "guanlecoja-ui"
exec "git clone PI:EMAIL:<EMAIL>END_PI:buildbot/guanlecoja-ui.git"
bower_json =
name: "guanlecoja-ui"
version: "1.8.0"
main: ["scripts.js", "styles.css", "fonts/*", "img/*"]
ignore: []
description: "Sets of widgets and integrated bower dependencies useful for dashboard SPAs"
dependencies: {}
cd "guanlecoja-ui"
exec("git reset --hard origin/gh-pages")
cp "-rf", "../static/*", "."
cp "-rf", "../README.md", "."
JSON.stringify(bower_json, null, " ").to("bower.json")
exec("git add .")
exec("git commit -m " + bower_json.version)
exec("git tag " + bower_json.version)
exec("git push origin HEAD:gh-pages")
exec("git push origin " + bower_json.version)
gulp.task "readme", ->
gulp.src("Readme.md").pipe gulp.dest(config.dir.build)
config =
### ###########################################################################################
# Name of the module
### ###########################################################################################
name: 'guanlecoja.ui'
### ###########################################################################################
# Directories
### ###########################################################################################
dir:
# The build folder is where the app resides once it's completely built
build: 'static'
devserver:
# development server port
port: 8080
sourcemaps: true
vendors_apart: true
### ###########################################################################################
# Bower dependancies configuration
### ###########################################################################################
bower:
deps:
jquery:
version: '~2.2.3'
files: 'dist/jquery.js'
angular:
version: ANGULAR_TAG
files: 'angular.js'
"angular-animate":
version: ANGULAR_TAG
files: 'angular-animate.js'
"angular-bootstrap":
version: '~1.1.0'
files: 'ui-bootstrap-tpls.js'
"angular-ui-router":
version: '~0.2.18'
files: 'release/angular-ui-router.js'
"angular-recursion":
version: '~1.0.5'
files: 'angular-recursion.js'
lodash:
version: "~4.11.1"
files: 'dist/lodash.js'
testdeps:
"angular-mocks":
version: ANGULAR_TAG
files: "angular-mocks.js"
"angular-sanitize":
version: ANGULAR_TAG
files: "angular-sanitize.js"
"showdown":
version: '0.3.1'
files: "compressed/showdown.js"
buildtasks: ['scripts', 'styles', 'fonts', 'imgs',
'index', 'tests', 'generatedfixtures', 'fixtures', 'vendors', 'templates', 'readme']
module.exports = config
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999111294746399,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/profile-page/achievement-badge.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { Img2x } from 'img2x'
import { div, img } from 'react-dom-factories'
el = React.createElement
export class AchievementBadge extends React.PureComponent
constructor: (props) ->
super props
@tooltip = React.createRef()
render: =>
@tooltipId = "#{@props.achievement.slug}-#{osu.uuid()}"
badgeClass = osu.classWithModifiers('badge-achievement', @props.modifiers)
tooltipBadgeClass = 'badge-achievement badge-achievement--dynamic-height'
if !@props.userAchievement?
tooltipBadgeClass += ' badge-achievement--locked'
badgeClass += ' badge-achievement--locked'
div
className: "js-tooltip-achievement #{badgeClass} #{@props.additionalClasses}",
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
onMouseOver: @onMouseOver
src: @props.achievement.icon_url
div
className: 'hidden'
div
className: 'js-tooltip-achievement--content tooltip-achievement__main'
ref: @tooltip
div
className: 'tooltip-achievement__badge'
div
className: tooltipBadgeClass
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
src: @props.achievement.icon_url
div
className: 'tooltip-achievement__grouping'
@props.achievement.grouping
div
className: "tooltip-achievement__detail-container #{if @props.achievement.instructions? then 'tooltip-achievement__detail-container--hoverable' else ''}"
div
className: "tooltip-achievement__detail tooltip-achievement__detail--normal"
div
className: 'tooltip-achievement__name'
@props.achievement.name
div
className: 'tooltip-achievement__description'
dangerouslySetInnerHTML:
__html: @props.achievement.description
if @props.achievement.instructions?
div
className: 'tooltip-achievement__detail tooltip-achievement__detail--hover'
div
className: 'tooltip-achievement__instructions'
dangerouslySetInnerHTML:
__html: @props.achievement.instructions
if @props.userAchievement?
div
className: 'tooltip-achievement__date'
dangerouslySetInnerHTML:
__html: osu.trans('users.show.extra.achievements.achieved-on', date: @achievementDateElem())
else
div
className: 'tooltip-achievement__date'
osu.trans('users.show.extra.achievements.locked')
achievementDateElem: =>
ret = document.createElement 'span'
ret.classList.add 'js-tooltip-time'
ret.title = @props.userAchievement.achieved_at
ret.textContent = moment(@props.userAchievement.achieved_at).format 'll'
ret.outerHTML
onMouseOver: (event) =>
event.persist()
elem = event.currentTarget
return if elem._loadedTooltipId == @tooltipId
$content = $(@tooltip.current).clone()
if elem._loadedTooltipId?
elem._loadedTooltipId = @tooltipId
$(elem).qtip 'set', 'content.text': $content
return
elem._loadedTooltipId = @tooltipId
classes = 'qtip tooltip-achievement'
classes += ' tooltip-achievement--locked' if !@props.userAchievement?
options =
overwrite: false
content: $content
position:
my: 'bottom center'
at: 'top center'
viewport: $(window)
adjust:
scroll: false
show:
event: event.type
ready: true
delay: 200
hide:
fixed: true
delay: 200
style:
classes: classes
tip:
width: 30
height: 20
$(elem).qtip options, event
| 135114 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { Img2x } from 'img2x'
import { div, img } from 'react-dom-factories'
el = React.createElement
export class AchievementBadge extends React.PureComponent
constructor: (props) ->
super props
@tooltip = React.createRef()
render: =>
@tooltipId = "#{@props.achievement.slug}-#{osu.uuid()}"
badgeClass = osu.classWithModifiers('badge-achievement', @props.modifiers)
tooltipBadgeClass = 'badge-achievement badge-achievement--dynamic-height'
if !@props.userAchievement?
tooltipBadgeClass += ' badge-achievement--locked'
badgeClass += ' badge-achievement--locked'
div
className: "js-tooltip-achievement #{badgeClass} #{@props.additionalClasses}",
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
onMouseOver: @onMouseOver
src: @props.achievement.icon_url
div
className: 'hidden'
div
className: 'js-tooltip-achievement--content tooltip-achievement__main'
ref: @tooltip
div
className: 'tooltip-achievement__badge'
div
className: tooltipBadgeClass
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
src: @props.achievement.icon_url
div
className: 'tooltip-achievement__grouping'
@props.achievement.grouping
div
className: "tooltip-achievement__detail-container #{if @props.achievement.instructions? then 'tooltip-achievement__detail-container--hoverable' else ''}"
div
className: "tooltip-achievement__detail tooltip-achievement__detail--normal"
div
className: 'tooltip-achievement__name'
@props.achievement.name
div
className: 'tooltip-achievement__description'
dangerouslySetInnerHTML:
__html: @props.achievement.description
if @props.achievement.instructions?
div
className: 'tooltip-achievement__detail tooltip-achievement__detail--hover'
div
className: 'tooltip-achievement__instructions'
dangerouslySetInnerHTML:
__html: @props.achievement.instructions
if @props.userAchievement?
div
className: 'tooltip-achievement__date'
dangerouslySetInnerHTML:
__html: osu.trans('users.show.extra.achievements.achieved-on', date: @achievementDateElem())
else
div
className: 'tooltip-achievement__date'
osu.trans('users.show.extra.achievements.locked')
achievementDateElem: =>
ret = document.createElement 'span'
ret.classList.add 'js-tooltip-time'
ret.title = @props.userAchievement.achieved_at
ret.textContent = moment(@props.userAchievement.achieved_at).format 'll'
ret.outerHTML
onMouseOver: (event) =>
event.persist()
elem = event.currentTarget
return if elem._loadedTooltipId == @tooltipId
$content = $(@tooltip.current).clone()
if elem._loadedTooltipId?
elem._loadedTooltipId = @tooltipId
$(elem).qtip 'set', 'content.text': $content
return
elem._loadedTooltipId = @tooltipId
classes = 'qtip tooltip-achievement'
classes += ' tooltip-achievement--locked' if !@props.userAchievement?
options =
overwrite: false
content: $content
position:
my: 'bottom center'
at: 'top center'
viewport: $(window)
adjust:
scroll: false
show:
event: event.type
ready: true
delay: 200
hide:
fixed: true
delay: 200
style:
classes: classes
tip:
width: 30
height: 20
$(elem).qtip options, event
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { Img2x } from 'img2x'
import { div, img } from 'react-dom-factories'
el = React.createElement
export class AchievementBadge extends React.PureComponent
constructor: (props) ->
super props
@tooltip = React.createRef()
render: =>
@tooltipId = "#{@props.achievement.slug}-#{osu.uuid()}"
badgeClass = osu.classWithModifiers('badge-achievement', @props.modifiers)
tooltipBadgeClass = 'badge-achievement badge-achievement--dynamic-height'
if !@props.userAchievement?
tooltipBadgeClass += ' badge-achievement--locked'
badgeClass += ' badge-achievement--locked'
div
className: "js-tooltip-achievement #{badgeClass} #{@props.additionalClasses}",
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
onMouseOver: @onMouseOver
src: @props.achievement.icon_url
div
className: 'hidden'
div
className: 'js-tooltip-achievement--content tooltip-achievement__main'
ref: @tooltip
div
className: 'tooltip-achievement__badge'
div
className: tooltipBadgeClass
el Img2x,
alt: @props.achievement.name
className: 'badge-achievement__image'
src: @props.achievement.icon_url
div
className: 'tooltip-achievement__grouping'
@props.achievement.grouping
div
className: "tooltip-achievement__detail-container #{if @props.achievement.instructions? then 'tooltip-achievement__detail-container--hoverable' else ''}"
div
className: "tooltip-achievement__detail tooltip-achievement__detail--normal"
div
className: 'tooltip-achievement__name'
@props.achievement.name
div
className: 'tooltip-achievement__description'
dangerouslySetInnerHTML:
__html: @props.achievement.description
if @props.achievement.instructions?
div
className: 'tooltip-achievement__detail tooltip-achievement__detail--hover'
div
className: 'tooltip-achievement__instructions'
dangerouslySetInnerHTML:
__html: @props.achievement.instructions
if @props.userAchievement?
div
className: 'tooltip-achievement__date'
dangerouslySetInnerHTML:
__html: osu.trans('users.show.extra.achievements.achieved-on', date: @achievementDateElem())
else
div
className: 'tooltip-achievement__date'
osu.trans('users.show.extra.achievements.locked')
achievementDateElem: =>
ret = document.createElement 'span'
ret.classList.add 'js-tooltip-time'
ret.title = @props.userAchievement.achieved_at
ret.textContent = moment(@props.userAchievement.achieved_at).format 'll'
ret.outerHTML
onMouseOver: (event) =>
event.persist()
elem = event.currentTarget
return if elem._loadedTooltipId == @tooltipId
$content = $(@tooltip.current).clone()
if elem._loadedTooltipId?
elem._loadedTooltipId = @tooltipId
$(elem).qtip 'set', 'content.text': $content
return
elem._loadedTooltipId = @tooltipId
classes = 'qtip tooltip-achievement'
classes += ' tooltip-achievement--locked' if !@props.userAchievement?
options =
overwrite: false
content: $content
position:
my: 'bottom center'
at: 'top center'
viewport: $(window)
adjust:
scroll: false
show:
event: event.type
ready: true
delay: 200
hide:
fixed: true
delay: 200
style:
classes: classes
tip:
width: 30
height: 20
$(elem).qtip options, event
|
[
{
"context": "Each ->\n process.env.HUBOT_ESA_ACCESS_TOKEN = 'dummy'\n process.env.HUBOT_ESA_TEAM = 'ginger'\n pr",
"end": 464,
"score": 0.6873047947883606,
"start": 459,
"tag": "KEY",
"value": "dummy"
},
{
"context": " process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN = 'purrs'\n process.env.HUBOT_ESA_JUST_EMIT = 'false'\n ",
"end": 683,
"score": 0.6899951696395874,
"start": 678,
"tag": "PASSWORD",
"value": "purrs"
},
{
"context": "ame}/fixtures/stats.json\")\n room.user.say('gingy', '@hubot esa stats')\n setTimeout done, 20",
"end": 1507,
"score": 0.9217277765274048,
"start": 1502,
"tag": "USERNAME",
"value": "gingy"
},
{
"context": " beforeEach (done) ->\n room.user.say('gingy', 'https://ginger.esa.io/posts/1390')\n ",
"end": 2485,
"score": 0.7721875905990601,
"start": 2480,
"tag": "NAME",
"value": "gingy"
},
{
"context": " beforeEach (done) ->\n room.user.say('gingy', 'https://ginger.esa.io/posts/1390#1-1-1')\n ",
"end": 2996,
"score": 0.4817507266998291,
"start": 2992,
"tag": "USERNAME",
"value": "ging"
},
{
"context": "foreEach (done) ->\n room.user.say('gingy', 'https://ginger.esa.io/posts/1390#1-1-1')\n ",
"end": 2997,
"score": 0.9437437057495117,
"start": 2996,
"tag": "NAME",
"value": "y"
},
{
"context": "beforeEach (done) ->\n room.user.say('gingy', 'https://zachary.esa.io/posts/1390')\n ",
"end": 3361,
"score": 0.5990707278251648,
"start": 3360,
"tag": "NAME",
"value": "y"
},
{
"context": " expect(emitted_data.user.screen_name).to.equal 'fukayatsu'\n expect(emitted_data.post.name).to.equa",
"end": 7698,
"score": 0.9852221608161926,
"start": 7689,
"tag": "USERNAME",
"value": "fukayatsu"
},
{
"context": " expect(emitted_data.post.name).to.equal 'たいとる'\n expect(emitted_data.comment).to.equal ",
"end": 7755,
"score": 0.8863059282302856,
"start": 7751,
"tag": "NAME",
"value": "たいとる"
},
{
"context": "ageBody()).contain(\"fukayatsu created a new post: たいとる\\n>Create post.\\nhttps://example.esa.io/posts/1",
"end": 7918,
"score": 0.8461175560951233,
"start": 7917,
"tag": "NAME",
"value": "た"
},
{
"context": " expect(emitted_data.user.screen_name).to.equal 'fukayatsu'\n expect(emitted_data.post.name).to.equa",
"end": 8515,
"score": 0.9844650030136108,
"start": 8506,
"tag": "USERNAME",
"value": "fukayatsu"
},
{
"context": " expect(emitted_data.post.name).to.equal 'たいとる'\n expect(emitted_data.comment).to.equal ",
"end": 8572,
"score": 0.8366013169288635,
"start": 8568,
"tag": "NAME",
"value": "たいとる"
},
{
"context": " expect(emitted_data.user.screen_name).to.equal 'fukayatsu'\n expect(emitted_data.post.name).to.equa",
"end": 9333,
"score": 0.9970962405204773,
"start": 9324,
"tag": "USERNAME",
"value": "fukayatsu"
},
{
"context": " expect(emitted_data.user.screen_name).to.equal 'fukayatsu'\n expect(emitted_data.post.name).to.equa",
"end": 10161,
"score": 0.9974269866943359,
"start": 10152,
"tag": "USERNAME",
"value": "fukayatsu"
},
{
"context": " expect(emitted_data.user.screen_name).to.equal 'fukayatsu'\n expect(emitted_data.post).to.equal nul",
"end": 11011,
"score": 0.9982087016105652,
"start": 11002,
"tag": "USERNAME",
"value": "fukayatsu"
},
{
"context": "ect(lastMessageBody()).contain(\"New member joined: Atsuo Fukaya(fukayatsu)\")\n\n context 'with recently and du",
"end": 11224,
"score": 0.9998030662536621,
"start": 11212,
"tag": "NAME",
"value": "Atsuo Fukaya"
},
{
"context": "\n http_opt['headers']['User-Agent'] = 'gingypurrs'\n http_opt\n\n it 'responds with ",
"end": 12665,
"score": 0.784852921962738,
"start": 12655,
"tag": "USERNAME",
"value": "gingypurrs"
}
] | test/esa-test.coffee | hmsk/hubot-esa | 9 | Helper = require('hubot-test-helper')
chai = require 'chai'
http = require 'http'
nock = require 'nock'
fs = require 'fs'
crypto = require 'crypto'
expect = chai.expect
helper = new Helper('../src/esa.coffee')
process.env.EXPRESS_PORT = 8039
describe 'esa', ->
room = null
esaDeliveryKeyOfBrain = 'esaWebhookDeliveries'
lastMessageBody = ()->
room.messages[room.messages.length - 1][1]
beforeEach ->
process.env.HUBOT_ESA_ACCESS_TOKEN = 'dummy'
process.env.HUBOT_ESA_TEAM = 'ginger'
process.env.HUBOT_ESA_WEBHOOK_DEFAULT_ROOM = 'general'
process.env.HUBOT_ESA_WEBHOOK_ENDPOINT = '/hubot/ginger'
process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN = 'purrs'
process.env.HUBOT_ESA_JUST_EMIT = 'false'
process.env.HUBOT_ESA_DEBUG = 'false'
room = helper.createRoom()
afterEach ->
room.destroy()
describe 'Response to chatroom', ->
nockScope = null
beforeEach ->
nock.disableNetConnect()
nockScope = nock('https://api.esa.io')
afterEach ->
nock.cleanAll()
context 'someone requests stats', ->
emitted = false
emitted_stats = null
beforeEach (done) ->
room.robot.on 'esa.hear.stats', (res, stats) ->
emitted = true
emitted_stats = stats
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/stats")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/stats.json")
room.user.say('gingy', '@hubot esa stats')
setTimeout done, 200
it 'responds stats', ->
expect(lastMessageBody()).contain("Members: 20\nPosts: 1959\nComments: 2695\nStars: 3115\nDaily Active Users: 8\nWeekly Active Users: 14\nMonthly Active Users: 15")
it 'emits esa.hear.stats event with args', ->
expect(emitted).to.equal true
expect(emitted_stats.members).to.equal 20
describe 'post', ->
context 'in own team', ->
emitted = false
emitted_post = null
beforeEach ->
room.robot.on 'esa.hear.post', (res, post) ->
emitted = true
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
context 'someone says post url', ->
beforeEach (done) ->
room.user.say('gingy', 'https://ginger.esa.io/posts/1390')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
it 'emits esa.hear.post event with args', ->
expect(emitted).to.equal true
expect(emitted_post.name).to.equal 'hi!'
context 'someone says post url wtih anchor', ->
beforeEach (done) ->
room.user.say('gingy', 'https://ginger.esa.io/posts/1390#1-1-1')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
context 'in other team, someone says post url', ->
beforeEach (done) ->
room.user.say('gingy', 'https://zachary.esa.io/posts/1390')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390']
]
describe 'comment', ->
context 'in own team', ->
emitted = false
emitted_comment = null
emitted_post = null
beforeEach (done) ->
room.robot.on 'esa.hear.comment', (res, comment, post) ->
emitted = true
emitted_comment = comment
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/comments/2121")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/comment.json")
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
room.user.say('gingy', 'https://ginger.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'send message about comment', ->
expect(lastMessageBody()).contain '読みたい'
expect(lastMessageBody()).contain 'hi!'
it 'emits esa.hear.comment event with args', ->
expect(emitted).to.equal true
expect(emitted_comment.body_md).to.equal '読みたい'
expect(emitted_post.name).to.equal 'hi!'
context 'in other team, someone says comment url', ->
beforeEach (done) ->
room.user.say('gingy', 'https://zachary.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390#comment-2121']
]
describe 'Receive webhook', ->
executeWebhook = (fixture_name, callback, opt_callback) ->
body = JSON.stringify(JSON.parse(fs.readFileSync("#{__dirname}/fixtures/#{fixture_name}.json", 'utf-8')))
http_opt =
hostname: 'localhost'
port: 8039
path: '/hubot/ginger'
method: 'POST'
headers:
'Content-Type': 'application/json'
'User-Agent': 'esa-Hookshot/v1'
'X-Esa-Delivery': '1234'
'X-Esa-Signature': generateSignature(body)
unless opt_callback is undefined
http_opt = opt_callback(http_opt)
req = http.request http_opt, (response) => callback(response)
req.write(body)
req.end()
# https://docs.esa.io/posts/37#3-4-0
generateSignature = (body)->
'sha256=' + crypto.createHmac('sha256', process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN).update(body, 'utf-8').digest('hex')
http_opt = null
emitted = null
emitted_kind = null
emitted_data = null
beforeEach ->
room.robot.brain.set esaDeliveryKeyOfBrain, undefined
emitted = false
emitted_data = null
emitted_kind = null
room.robot.on 'esa.webhook', (kind, data) ->
emitted = true
emitted_kind = kind
emitted_data = data
nock.enableNetConnect()
afterEach ->
nock.disableNetConnect()
describe 'as valid request', ->
context 'with unknown formated body', ->
beforeEach (done) ->
executeWebhook 'webhook_unknown', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal null
expect(emitted_data).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("Unknown kind of Webhook received null")
context 'with post_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'たいとる'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu created a new post: たいとる\n>Create post.\nhttps://example.esa.io/posts/1253")
context 'with post_update event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_update', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_update'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'たいとる'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu updated the post: たいとる\n>Update post.\nhttps://example.esa.io/posts/1253")
context 'with post_archive event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_archive', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_archive'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu archived the post: Archived/たいとる\nhttps://example.esa.io/posts/1253")
context 'with comment_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_comment_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'comment_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment.body_md).to.equal 'こめんと'
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu posted a comment to Archived/たいとる\n>こめんと\nhttps://example.esa.io/posts/1253#comment-6385")
context 'with member_join event data', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'member_join'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post).to.equal null
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("New member joined: Atsuo Fukaya(fukayatsu)")
context 'with recently and duplicated hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 10 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 409', ->
expect(@res.statusCode).to.equal 409
it 'not emit esa.webhook event', ->
expect(emitted).to.equal false
it 'not sends message', ->
expect(room.messages).to.be.empty
context 'with duplicated but old hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 50 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'not emit esa.webhook event', ->
expect(emitted).to.equal true
it 'not sends message', ->
expect(room.messages).to.not.be.empty
describe 'as invalid request', ->
context 'with unkown User-Agent', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['User-Agent'] = 'gingypurrs'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
context 'with invalid signature', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['X-Esa-Signature'] = 'sha256-soinvalid'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
| 154091 | Helper = require('hubot-test-helper')
chai = require 'chai'
http = require 'http'
nock = require 'nock'
fs = require 'fs'
crypto = require 'crypto'
expect = chai.expect
helper = new Helper('../src/esa.coffee')
process.env.EXPRESS_PORT = 8039
describe 'esa', ->
room = null
esaDeliveryKeyOfBrain = 'esaWebhookDeliveries'
lastMessageBody = ()->
room.messages[room.messages.length - 1][1]
beforeEach ->
process.env.HUBOT_ESA_ACCESS_TOKEN = '<KEY>'
process.env.HUBOT_ESA_TEAM = 'ginger'
process.env.HUBOT_ESA_WEBHOOK_DEFAULT_ROOM = 'general'
process.env.HUBOT_ESA_WEBHOOK_ENDPOINT = '/hubot/ginger'
process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN = '<PASSWORD>'
process.env.HUBOT_ESA_JUST_EMIT = 'false'
process.env.HUBOT_ESA_DEBUG = 'false'
room = helper.createRoom()
afterEach ->
room.destroy()
describe 'Response to chatroom', ->
nockScope = null
beforeEach ->
nock.disableNetConnect()
nockScope = nock('https://api.esa.io')
afterEach ->
nock.cleanAll()
context 'someone requests stats', ->
emitted = false
emitted_stats = null
beforeEach (done) ->
room.robot.on 'esa.hear.stats', (res, stats) ->
emitted = true
emitted_stats = stats
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/stats")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/stats.json")
room.user.say('gingy', '@hubot esa stats')
setTimeout done, 200
it 'responds stats', ->
expect(lastMessageBody()).contain("Members: 20\nPosts: 1959\nComments: 2695\nStars: 3115\nDaily Active Users: 8\nWeekly Active Users: 14\nMonthly Active Users: 15")
it 'emits esa.hear.stats event with args', ->
expect(emitted).to.equal true
expect(emitted_stats.members).to.equal 20
describe 'post', ->
context 'in own team', ->
emitted = false
emitted_post = null
beforeEach ->
room.robot.on 'esa.hear.post', (res, post) ->
emitted = true
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
context 'someone says post url', ->
beforeEach (done) ->
room.user.say('<NAME>', 'https://ginger.esa.io/posts/1390')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
it 'emits esa.hear.post event with args', ->
expect(emitted).to.equal true
expect(emitted_post.name).to.equal 'hi!'
context 'someone says post url wtih anchor', ->
beforeEach (done) ->
room.user.say('ging<NAME>', 'https://ginger.esa.io/posts/1390#1-1-1')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
context 'in other team, someone says post url', ->
beforeEach (done) ->
room.user.say('ging<NAME>', 'https://zachary.esa.io/posts/1390')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390']
]
describe 'comment', ->
context 'in own team', ->
emitted = false
emitted_comment = null
emitted_post = null
beforeEach (done) ->
room.robot.on 'esa.hear.comment', (res, comment, post) ->
emitted = true
emitted_comment = comment
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/comments/2121")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/comment.json")
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
room.user.say('gingy', 'https://ginger.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'send message about comment', ->
expect(lastMessageBody()).contain '読みたい'
expect(lastMessageBody()).contain 'hi!'
it 'emits esa.hear.comment event with args', ->
expect(emitted).to.equal true
expect(emitted_comment.body_md).to.equal '読みたい'
expect(emitted_post.name).to.equal 'hi!'
context 'in other team, someone says comment url', ->
beforeEach (done) ->
room.user.say('gingy', 'https://zachary.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390#comment-2121']
]
describe 'Receive webhook', ->
executeWebhook = (fixture_name, callback, opt_callback) ->
body = JSON.stringify(JSON.parse(fs.readFileSync("#{__dirname}/fixtures/#{fixture_name}.json", 'utf-8')))
http_opt =
hostname: 'localhost'
port: 8039
path: '/hubot/ginger'
method: 'POST'
headers:
'Content-Type': 'application/json'
'User-Agent': 'esa-Hookshot/v1'
'X-Esa-Delivery': '1234'
'X-Esa-Signature': generateSignature(body)
unless opt_callback is undefined
http_opt = opt_callback(http_opt)
req = http.request http_opt, (response) => callback(response)
req.write(body)
req.end()
# https://docs.esa.io/posts/37#3-4-0
generateSignature = (body)->
'sha256=' + crypto.createHmac('sha256', process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN).update(body, 'utf-8').digest('hex')
http_opt = null
emitted = null
emitted_kind = null
emitted_data = null
beforeEach ->
room.robot.brain.set esaDeliveryKeyOfBrain, undefined
emitted = false
emitted_data = null
emitted_kind = null
room.robot.on 'esa.webhook', (kind, data) ->
emitted = true
emitted_kind = kind
emitted_data = data
nock.enableNetConnect()
afterEach ->
nock.disableNetConnect()
describe 'as valid request', ->
context 'with unknown formated body', ->
beforeEach (done) ->
executeWebhook 'webhook_unknown', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal null
expect(emitted_data).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("Unknown kind of Webhook received null")
context 'with post_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal '<NAME>'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu created a new post: <NAME>いとる\n>Create post.\nhttps://example.esa.io/posts/1253")
context 'with post_update event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_update', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_update'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal '<NAME>'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu updated the post: たいとる\n>Update post.\nhttps://example.esa.io/posts/1253")
context 'with post_archive event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_archive', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_archive'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu archived the post: Archived/たいとる\nhttps://example.esa.io/posts/1253")
context 'with comment_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_comment_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'comment_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment.body_md).to.equal 'こめんと'
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu posted a comment to Archived/たいとる\n>こめんと\nhttps://example.esa.io/posts/1253#comment-6385")
context 'with member_join event data', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'member_join'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post).to.equal null
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("New member joined: <NAME>(fukayatsu)")
context 'with recently and duplicated hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 10 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 409', ->
expect(@res.statusCode).to.equal 409
it 'not emit esa.webhook event', ->
expect(emitted).to.equal false
it 'not sends message', ->
expect(room.messages).to.be.empty
context 'with duplicated but old hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 50 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'not emit esa.webhook event', ->
expect(emitted).to.equal true
it 'not sends message', ->
expect(room.messages).to.not.be.empty
describe 'as invalid request', ->
context 'with unkown User-Agent', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['User-Agent'] = 'gingypurrs'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
context 'with invalid signature', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['X-Esa-Signature'] = 'sha256-soinvalid'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
| true | Helper = require('hubot-test-helper')
chai = require 'chai'
http = require 'http'
nock = require 'nock'
fs = require 'fs'
crypto = require 'crypto'
expect = chai.expect
helper = new Helper('../src/esa.coffee')
process.env.EXPRESS_PORT = 8039
describe 'esa', ->
room = null
esaDeliveryKeyOfBrain = 'esaWebhookDeliveries'
lastMessageBody = ()->
room.messages[room.messages.length - 1][1]
beforeEach ->
process.env.HUBOT_ESA_ACCESS_TOKEN = 'PI:KEY:<KEY>END_PI'
process.env.HUBOT_ESA_TEAM = 'ginger'
process.env.HUBOT_ESA_WEBHOOK_DEFAULT_ROOM = 'general'
process.env.HUBOT_ESA_WEBHOOK_ENDPOINT = '/hubot/ginger'
process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN = 'PI:PASSWORD:<PASSWORD>END_PI'
process.env.HUBOT_ESA_JUST_EMIT = 'false'
process.env.HUBOT_ESA_DEBUG = 'false'
room = helper.createRoom()
afterEach ->
room.destroy()
describe 'Response to chatroom', ->
nockScope = null
beforeEach ->
nock.disableNetConnect()
nockScope = nock('https://api.esa.io')
afterEach ->
nock.cleanAll()
context 'someone requests stats', ->
emitted = false
emitted_stats = null
beforeEach (done) ->
room.robot.on 'esa.hear.stats', (res, stats) ->
emitted = true
emitted_stats = stats
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/stats")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/stats.json")
room.user.say('gingy', '@hubot esa stats')
setTimeout done, 200
it 'responds stats', ->
expect(lastMessageBody()).contain("Members: 20\nPosts: 1959\nComments: 2695\nStars: 3115\nDaily Active Users: 8\nWeekly Active Users: 14\nMonthly Active Users: 15")
it 'emits esa.hear.stats event with args', ->
expect(emitted).to.equal true
expect(emitted_stats.members).to.equal 20
describe 'post', ->
context 'in own team', ->
emitted = false
emitted_post = null
beforeEach ->
room.robot.on 'esa.hear.post', (res, post) ->
emitted = true
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
context 'someone says post url', ->
beforeEach (done) ->
room.user.say('PI:NAME:<NAME>END_PI', 'https://ginger.esa.io/posts/1390')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
it 'emits esa.hear.post event with args', ->
expect(emitted).to.equal true
expect(emitted_post.name).to.equal 'hi!'
context 'someone says post url wtih anchor', ->
beforeEach (done) ->
room.user.say('gingPI:NAME:<NAME>END_PI', 'https://ginger.esa.io/posts/1390#1-1-1')
setTimeout done, 200
it 'send message about post', ->
expect(lastMessageBody()).contain('日報/2015/05/09/hi! #api #dev\nStars: 1, Watchers: 1, Comments: 1, Tasks: 1/1')
context 'in other team, someone says post url', ->
beforeEach (done) ->
room.user.say('gingPI:NAME:<NAME>END_PI', 'https://zachary.esa.io/posts/1390')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390']
]
describe 'comment', ->
context 'in own team', ->
emitted = false
emitted_comment = null
emitted_post = null
beforeEach (done) ->
room.robot.on 'esa.hear.comment', (res, comment, post) ->
emitted = true
emitted_comment = comment
emitted_post = post
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/comments/2121")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/comment.json")
nockScope
.get("/v1/teams/#{process.env.HUBOT_ESA_TEAM}/posts/1390")
.query(access_token: process.env.HUBOT_ESA_ACCESS_TOKEN)
.replyWithFile(200, "#{__dirname}/fixtures/post.json")
room.user.say('gingy', 'https://ginger.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'send message about comment', ->
expect(lastMessageBody()).contain '読みたい'
expect(lastMessageBody()).contain 'hi!'
it 'emits esa.hear.comment event with args', ->
expect(emitted).to.equal true
expect(emitted_comment.body_md).to.equal '読みたい'
expect(emitted_post.name).to.equal 'hi!'
context 'in other team, someone says comment url', ->
beforeEach (done) ->
room.user.say('gingy', 'https://zachary.esa.io/posts/1390#comment-2121')
setTimeout done, 200
it 'nothing to say', ->
expect(room.messages).to.eql [
['gingy', 'https://zachary.esa.io/posts/1390#comment-2121']
]
describe 'Receive webhook', ->
executeWebhook = (fixture_name, callback, opt_callback) ->
body = JSON.stringify(JSON.parse(fs.readFileSync("#{__dirname}/fixtures/#{fixture_name}.json", 'utf-8')))
http_opt =
hostname: 'localhost'
port: 8039
path: '/hubot/ginger'
method: 'POST'
headers:
'Content-Type': 'application/json'
'User-Agent': 'esa-Hookshot/v1'
'X-Esa-Delivery': '1234'
'X-Esa-Signature': generateSignature(body)
unless opt_callback is undefined
http_opt = opt_callback(http_opt)
req = http.request http_opt, (response) => callback(response)
req.write(body)
req.end()
# https://docs.esa.io/posts/37#3-4-0
generateSignature = (body)->
'sha256=' + crypto.createHmac('sha256', process.env.HUBOT_ESA_WEBHOOK_SECRET_TOKEN).update(body, 'utf-8').digest('hex')
http_opt = null
emitted = null
emitted_kind = null
emitted_data = null
beforeEach ->
room.robot.brain.set esaDeliveryKeyOfBrain, undefined
emitted = false
emitted_data = null
emitted_kind = null
room.robot.on 'esa.webhook', (kind, data) ->
emitted = true
emitted_kind = kind
emitted_data = data
nock.enableNetConnect()
afterEach ->
nock.disableNetConnect()
describe 'as valid request', ->
context 'with unknown formated body', ->
beforeEach (done) ->
executeWebhook 'webhook_unknown', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal null
expect(emitted_data).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("Unknown kind of Webhook received null")
context 'with post_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'PI:NAME:<NAME>END_PI'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu created a new post: PI:NAME:<NAME>END_PIいとる\n>Create post.\nhttps://example.esa.io/posts/1253")
context 'with post_update event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_update', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_update'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'PI:NAME:<NAME>END_PI'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu updated the post: たいとる\n>Update post.\nhttps://example.esa.io/posts/1253")
context 'with post_archive event data', ->
beforeEach (done) ->
executeWebhook 'webhook_post_archive', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'post_archive'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu archived the post: Archived/たいとる\nhttps://example.esa.io/posts/1253")
context 'with comment_create event data', ->
beforeEach (done) ->
executeWebhook 'webhook_comment_create', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'comment_create'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post.name).to.equal 'Archived/たいとる'
expect(emitted_data.comment.body_md).to.equal 'こめんと'
it 'sends message', ->
expect(lastMessageBody()).contain("fukayatsu posted a comment to Archived/たいとる\n>こめんと\nhttps://example.esa.io/posts/1253#comment-6385")
context 'with member_join event data', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'emits esa.webhook event with args', ->
expect(emitted).to.equal true
expect(emitted_kind).to.equal 'member_join'
expect(emitted_data.team).to.equal 'esa'
expect(emitted_data.user.screen_name).to.equal 'fukayatsu'
expect(emitted_data.post).to.equal null
expect(emitted_data.comment).to.equal null
it 'sends message', ->
expect(lastMessageBody()).contain("New member joined: PI:NAME:<NAME>END_PI(fukayatsu)")
context 'with recently and duplicated hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 10 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 409', ->
expect(@res.statusCode).to.equal 409
it 'not emit esa.webhook event', ->
expect(emitted).to.equal false
it 'not sends message', ->
expect(room.messages).to.be.empty
context 'with duplicated but old hooked data', ->
beforeEach (done) ->
room.robot.brain.set esaDeliveryKeyOfBrain, { '1234': new Date().getTime() - 3600 * 1000 * 50 }
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
it 'responds with status 204', ->
expect(@res.statusCode).to.equal 204
it 'not emit esa.webhook event', ->
expect(emitted).to.equal true
it 'not sends message', ->
expect(room.messages).to.not.be.empty
describe 'as invalid request', ->
context 'with unkown User-Agent', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['User-Agent'] = 'gingypurrs'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
context 'with invalid signature', ->
beforeEach (done) ->
executeWebhook 'webhook_member_join', (response) =>
@res = response
done()
, (http_opt) ->
http_opt['headers']['X-Esa-Signature'] = 'sha256-soinvalid'
http_opt
it 'responds with status 401', ->
expect(@res.statusCode).to.equal 401
|
[
{
"context": "iew Tests for no-negated-condition rule.\n# @author Alberto Rodríguez\n###\n'use strict'\n\n#------------------------------",
"end": 85,
"score": 0.9998549818992615,
"start": 68,
"tag": "NAME",
"value": "Alberto Rodríguez"
}
] | src/tests/rules/no-negated-condition.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-negated-condition rule.
# @author Alberto Rodríguez
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/no-negated-condition'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-negated-condition', rule,
# Examples of code that should not trigger the rule
valid: [
'if (a) then ;'
'if (a) then ; else ;'
,
code: 'if (!a) then ;'
options: [requireElse: yes]
,
code: 'x if (!a)'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'if (a == b) then ;'
'if (a == b) then ; else ;'
,
code: 'if (a != b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'if (a isnt b) then ;'
options: [requireElse: yes]
,
'if (a is b) then ; else ;'
'(if a then b else c)'
'unless (a) then ;'
'unless (a) then ; else ;'
,
code: 'unless (!a) then ;'
options: [requireElse: yes]
,
code: 'x unless (!a)'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'unless (a == b) then ;'
'unless (a == b) then ; else ;'
,
code: 'unless (a != b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'unless (a isnt b) then ;'
options: [requireElse: yes]
,
'unless (a is b) then ; else ;'
'(unless a then b else c)'
]
# Examples of code that should trigger the rule
invalid: [
code: 'if (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'if (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x if (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x if (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x unless (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x unless (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
]
| 47919 | ###*
# @fileoverview Tests for no-negated-condition rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/no-negated-condition'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-negated-condition', rule,
# Examples of code that should not trigger the rule
valid: [
'if (a) then ;'
'if (a) then ; else ;'
,
code: 'if (!a) then ;'
options: [requireElse: yes]
,
code: 'x if (!a)'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'if (a == b) then ;'
'if (a == b) then ; else ;'
,
code: 'if (a != b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'if (a isnt b) then ;'
options: [requireElse: yes]
,
'if (a is b) then ; else ;'
'(if a then b else c)'
'unless (a) then ;'
'unless (a) then ; else ;'
,
code: 'unless (!a) then ;'
options: [requireElse: yes]
,
code: 'x unless (!a)'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'unless (a == b) then ;'
'unless (a == b) then ; else ;'
,
code: 'unless (a != b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'unless (a isnt b) then ;'
options: [requireElse: yes]
,
'unless (a is b) then ; else ;'
'(unless a then b else c)'
]
# Examples of code that should trigger the rule
invalid: [
code: 'if (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'if (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x if (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x if (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x unless (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x unless (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
]
| true | ###*
# @fileoverview Tests for no-negated-condition rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/no-negated-condition'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-negated-condition', rule,
# Examples of code that should not trigger the rule
valid: [
'if (a) then ;'
'if (a) then ; else ;'
,
code: 'if (!a) then ;'
options: [requireElse: yes]
,
code: 'x if (!a)'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'if (a == b) then ;'
'if (a == b) then ; else ;'
,
code: 'if (a != b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'if (a isnt b) then ;'
options: [requireElse: yes]
,
'if (a is b) then ; else ;'
'(if a then b else c)'
'unless (a) then ;'
'unless (a) then ; else ;'
,
code: 'unless (!a) then ;'
options: [requireElse: yes]
,
code: 'x unless (!a)'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
'unless (a == b) then ;'
'unless (a == b) then ; else ;'
,
code: 'unless (a != b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ;'
options: [requireElse: yes]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
options: [requireElse: yes]
,
code: 'unless (a isnt b) then ;'
options: [requireElse: yes]
,
'unless (a is b) then ; else ;'
'(unless a then b else c)'
]
# Examples of code that should trigger the rule
invalid: [
code: 'if (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(if a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'if (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'if (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x if (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(if (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x if (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ; else ;'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless !a then b else c)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a != b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(unless a isnt b then c else d)'
options: [requireElse: yes]
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: 'unless (!a) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (!a) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a != b) then ; else if (b) then ; else ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'x unless (a isnt b)'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: 'unless (a isnt b) then ;'
errors: [
message: 'Unexpected negated condition.'
type: 'IfStatement'
]
,
code: '(unless (a isnt b) then c)'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
,
code: '(x unless (a isnt b))'
errors: [
message: 'Unexpected negated condition.'
type: 'ConditionalExpression'
]
]
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9989864230155945,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-zlib-write-after-flush.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Make sure that the flush flag was set back to normal
# make sure that flush/write doesn't trigger an assert failure
write = ->
gzip.write input
gzip.end()
gunz.read 0
return
common = require("../common")
assert = require("assert")
zlib = require("zlib")
fs = require("fs")
gzip = zlib.createGzip()
gunz = zlib.createUnzip()
gzip.pipe gunz
output = ""
input = "A line of data\n"
gunz.setEncoding "utf8"
gunz.on "data", (c) ->
output += c
return
process.on "exit", ->
assert.equal output, input
assert.equal gzip._flushFlag, zlib.Z_NO_FLUSH
console.log "ok"
return
gzip.flush()
write()
| 147233 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Make sure that the flush flag was set back to normal
# make sure that flush/write doesn't trigger an assert failure
write = ->
gzip.write input
gzip.end()
gunz.read 0
return
common = require("../common")
assert = require("assert")
zlib = require("zlib")
fs = require("fs")
gzip = zlib.createGzip()
gunz = zlib.createUnzip()
gzip.pipe gunz
output = ""
input = "A line of data\n"
gunz.setEncoding "utf8"
gunz.on "data", (c) ->
output += c
return
process.on "exit", ->
assert.equal output, input
assert.equal gzip._flushFlag, zlib.Z_NO_FLUSH
console.log "ok"
return
gzip.flush()
write()
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Make sure that the flush flag was set back to normal
# make sure that flush/write doesn't trigger an assert failure
write = ->
gzip.write input
gzip.end()
gunz.read 0
return
common = require("../common")
assert = require("assert")
zlib = require("zlib")
fs = require("fs")
gzip = zlib.createGzip()
gunz = zlib.createUnzip()
gzip.pipe gunz
output = ""
input = "A line of data\n"
gunz.setEncoding "utf8"
gunz.on "data", (c) ->
output += c
return
process.on "exit", ->
assert.equal output, input
assert.equal gzip._flushFlag, zlib.Z_NO_FLUSH
console.log "ok"
return
gzip.flush()
write()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.