entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "ogin: (custom_tag) ->\n @_send('login', {userId: @username, password: @password}, custom_tag)\n return\n\n ",
"end": 3587,
"score": 0.9708515405654907,
"start": 3578,
"tag": "USERNAME",
"value": "@username"
},
{
"context": "\n @_send('login', {userId: @username, password: @password}, custom_tag)\n return\n\n logout: (custom_tag) ",
"end": 3608,
"score": 0.9881646037101746,
"start": 3599,
"tag": "PASSWORD",
"value": "@password"
}
] | src/xapi-wrapper.coffee | pgorzelany/xapi-wrapper | 2 | Connector = require('xapi-connector')
Emitter = require('events').EventEmitter
print = (msg) -> console.log(msg)
class Wrapper
constructor: (@server_url, @conn_port, @stream_port, @username, @password) ->
@conn_status = 0
@stream_status = 0
@_req_id = 0
@_stream_session_id = null
@_requests = {}
@_emitter = new Emitter()
@_streamEmitter = new Emitter()
@_connector = new Connector(@server_url, @conn_port, @stream_port, @username, @password)
@_connector.on('open', () =>
@conn_status = 1
@_emitter.emit('open')
)
@_connector.on('close', () =>
@conn_status = 2
print("CONNECTION CLOSED")
@_emitter.emit('close')
)
@_connector.on('error', (err) =>
@conn_status = 3
print("CONNECTION ERROR")
@_emitter.emit('error', err)
)
@_connector.on('message', (msg) =>
#console.log("Received a msg #{msg}")
try
res = JSON.parse(msg)
req_id = parseInt(res.customTag)
req = @_requests[req_id]
if req.customTag? then res.customTag = req.customTag else delete res.customTag
if res.status == true
#print("req_id: #{req_id}, requests: #{JSON.stringify(@_requests)}")
#@_emitter.emit(req_id, null, req, res) #emits the req_id, this enables callbacks for individual requests
@_emitter.emit('_message', req, res, @) #emits a private _message event and passes every message, this enables plugins
@_emitter.emit(req.command, req, res, @) #emits the command name, this enables event handlers for commands
else
@_emitter.emit('apiError', req, res)
catch e
console.log(e)
)
@_connector.onStream('message', (msg) =>
#console.log("Received a stream msg #{msg}")
try
msg = JSON.parse(msg)
@_streamEmitter.emit('_message', msg, @) #enables plugins for stream
@_streamEmitter.emit(msg.command, msg, @)
catch e
console.log(e)
)
@_connector.onStream('open', () =>
@stream_status = 1
@_streamEmitter.emit('open')
)
@_connector.onStream('close', () =>
@stream_status = 2
print("STREAM CLOSED")
@_streamEmitter.emit('close')
)
@_connector.onStream('error', (err) =>
@stream_status = 3
print("STREAM ERROR")
@_streamEmitter.emit('error', err)
)
@on('login', (req, res) =>
@_stream_session_id = res.streamSessionId
)
on: (event, callback) ->
@_emitter.on(event, callback)
return
onStream: (event, callback) ->
@_streamEmitter.on(event, callback)
return
#EXPERIMENTAL
use: (plugin) ->
plugin(@)
return
getQue: () -> @_connector.getQue()
getStreamQue: () -> @_connector.getStreamQue()
_send: (command, args, custom_tag) ->
req_id = @_req_id += 1
#if callback? then @.on(req_id, callback)
@_requests[req_id] =
command: command,
arguments: args if args?
customTag: custom_tag if custom_tag?
req = @_connector.buildCommand(command, args, req_id.toString())
#console.log("Sending message #{req}")
@_connector.send(req)
return
_sendStream: (msg) ->
#print(msg)
@_connector.sendStream(msg)
return
connect: () ->
@_connector.connect()
return
disconnect: () ->
@_connector.disconnect()
return
connectStream: () ->
@_connector.connectStream()
return
disconnectStream: () ->
@_connector.disconnectStream()
return
login: (custom_tag) ->
@_send('login', {userId: @username, password: @password}, custom_tag)
return
logout: (custom_tag) ->
@_send('logout', null, custom_tag)
return
ping: (custom_tag) ->
@_send('ping', null, custom_tag)
return
addOrder: (args, custom_tag) ->
@_send('addOrder', args, custom_tag)
return
closePosition: (args, custom_tag) ->
@_send('closePosition', args, custom_tag)
return
closePositions: (args, custom_tag) ->
@_send('closePositions', args, custom_tag)
return
deletePending: (args, custom_tag) ->
@_send('deletePending', args, custom_tag)
return
getAccountIndicators: (custom_tag) ->
@_send('getAccountIndicators', null, custom_tag)
return
getAccountInfo: (custom_tag) ->
@_send('getAccountInfo', null, custom_tag)
return
getAllSymbols: (custom_tag) ->
@_send('getAllSymbols', null, custom_tag)
return
getCalendar: (custom_tag) ->
@_send('getCalendar', null, custom_tag)
return
getCandles: (args, custom_tag) ->
@_send('getCandles', args, custom_tag)
return
getCashOperationsHistory: (args, custom_tag) ->
@_send('getCashOperationsHistory', args, custom_tag)
return
getCommisionsDef: (args, custom_tag) ->
@_send('getCommisionsDef', args, custom_tag)
return
getlbsHistory: (args, custom_tag) ->
@_send('getlbsHistory', args, custom_tag)
return
getMarginTrade: (args, custom_tag) ->
@_send('getMarginTrade', args, custom_tag)
return
getNews: (args, custom_tag) ->
@_send('getNews', args, custom_tag)
return
getOrderStatus: (args, custom_tag) ->
@_send('getOrderStatus', args, custom_tag)
return
getProfitCalculations: (args, custom_tag) ->
@_send('getProfitCalculations', args, custom_tag)
return
getServerTime: (args, custom_tag) ->
@_send('getServerTime', args, custom_tag)
return
getStepRules: (custom_tag) ->
@_send('getStepRules', null, custom_tag)
return
getSymbol: (args, custom_tag) ->
@_send('getSymbol', args, custom_tag)
return
getTickPrices: (args, custom_tag) ->
@_send('getTickPrices', args, custom_tag)
return
getTradeRecords: (args, custom_tag) ->
@_send('getTradeRecords', args, custom_tag)
return
getTrades: (custom_tag) ->
@_send('getTrades', null, custom_tag)
return
getTradesHistory: (args, custom_tag) ->
@_send('getTradesHistory', args, custom_tag)
return
getTradingHours: (args, custom_tag) ->
@_send('getTradingHours', args, custom_tag)
return
getVersion: (custom_tag) ->
@_send('getVersion', null, custom_tag)
return
modifyPending: (args, custom_tag) ->
@_send('modifyPending', args, custom_tag)
return
modifyPosition: (args, custom_tag) ->
@_send('modifyPosition', args, custom_tag)
return
subscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('getAccountIndicators', @_stream_session_id))
return
subscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('getCandles', @_stream_session_id, args))
return
subscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('getKeepAlive', @_stream_session_id))
return
subscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('getNews', @_stream_session_id))
return
subscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('getOrderStatus', @_stream_session_id))
return
subscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('getProfits', @_stream_session_id))
return
subscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('getTickPrices', @_stream_session_id, args))
return
subscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('getTrades', @_stream_session_id))
return
unsubscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('stopAccountIndicators'))
return
unsubscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopCandles', args))
return
unsubscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('stopKeepAlive'))
return
unsubscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('stopNews'))
return
unsubscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('stopOrderStatus'))
return
unsubscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('stopProfits'))
return
unsubscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopTickPrices', args))
return
unsubscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('stopTrades'))
return
module.exports = Wrapper
| 161578 | Connector = require('xapi-connector')
Emitter = require('events').EventEmitter
print = (msg) -> console.log(msg)
class Wrapper
constructor: (@server_url, @conn_port, @stream_port, @username, @password) ->
@conn_status = 0
@stream_status = 0
@_req_id = 0
@_stream_session_id = null
@_requests = {}
@_emitter = new Emitter()
@_streamEmitter = new Emitter()
@_connector = new Connector(@server_url, @conn_port, @stream_port, @username, @password)
@_connector.on('open', () =>
@conn_status = 1
@_emitter.emit('open')
)
@_connector.on('close', () =>
@conn_status = 2
print("CONNECTION CLOSED")
@_emitter.emit('close')
)
@_connector.on('error', (err) =>
@conn_status = 3
print("CONNECTION ERROR")
@_emitter.emit('error', err)
)
@_connector.on('message', (msg) =>
#console.log("Received a msg #{msg}")
try
res = JSON.parse(msg)
req_id = parseInt(res.customTag)
req = @_requests[req_id]
if req.customTag? then res.customTag = req.customTag else delete res.customTag
if res.status == true
#print("req_id: #{req_id}, requests: #{JSON.stringify(@_requests)}")
#@_emitter.emit(req_id, null, req, res) #emits the req_id, this enables callbacks for individual requests
@_emitter.emit('_message', req, res, @) #emits a private _message event and passes every message, this enables plugins
@_emitter.emit(req.command, req, res, @) #emits the command name, this enables event handlers for commands
else
@_emitter.emit('apiError', req, res)
catch e
console.log(e)
)
@_connector.onStream('message', (msg) =>
#console.log("Received a stream msg #{msg}")
try
msg = JSON.parse(msg)
@_streamEmitter.emit('_message', msg, @) #enables plugins for stream
@_streamEmitter.emit(msg.command, msg, @)
catch e
console.log(e)
)
@_connector.onStream('open', () =>
@stream_status = 1
@_streamEmitter.emit('open')
)
@_connector.onStream('close', () =>
@stream_status = 2
print("STREAM CLOSED")
@_streamEmitter.emit('close')
)
@_connector.onStream('error', (err) =>
@stream_status = 3
print("STREAM ERROR")
@_streamEmitter.emit('error', err)
)
@on('login', (req, res) =>
@_stream_session_id = res.streamSessionId
)
on: (event, callback) ->
@_emitter.on(event, callback)
return
onStream: (event, callback) ->
@_streamEmitter.on(event, callback)
return
#EXPERIMENTAL
use: (plugin) ->
plugin(@)
return
getQue: () -> @_connector.getQue()
getStreamQue: () -> @_connector.getStreamQue()
_send: (command, args, custom_tag) ->
req_id = @_req_id += 1
#if callback? then @.on(req_id, callback)
@_requests[req_id] =
command: command,
arguments: args if args?
customTag: custom_tag if custom_tag?
req = @_connector.buildCommand(command, args, req_id.toString())
#console.log("Sending message #{req}")
@_connector.send(req)
return
_sendStream: (msg) ->
#print(msg)
@_connector.sendStream(msg)
return
connect: () ->
@_connector.connect()
return
disconnect: () ->
@_connector.disconnect()
return
connectStream: () ->
@_connector.connectStream()
return
disconnectStream: () ->
@_connector.disconnectStream()
return
login: (custom_tag) ->
@_send('login', {userId: @username, password: <PASSWORD>}, custom_tag)
return
logout: (custom_tag) ->
@_send('logout', null, custom_tag)
return
ping: (custom_tag) ->
@_send('ping', null, custom_tag)
return
addOrder: (args, custom_tag) ->
@_send('addOrder', args, custom_tag)
return
closePosition: (args, custom_tag) ->
@_send('closePosition', args, custom_tag)
return
closePositions: (args, custom_tag) ->
@_send('closePositions', args, custom_tag)
return
deletePending: (args, custom_tag) ->
@_send('deletePending', args, custom_tag)
return
getAccountIndicators: (custom_tag) ->
@_send('getAccountIndicators', null, custom_tag)
return
getAccountInfo: (custom_tag) ->
@_send('getAccountInfo', null, custom_tag)
return
getAllSymbols: (custom_tag) ->
@_send('getAllSymbols', null, custom_tag)
return
getCalendar: (custom_tag) ->
@_send('getCalendar', null, custom_tag)
return
getCandles: (args, custom_tag) ->
@_send('getCandles', args, custom_tag)
return
getCashOperationsHistory: (args, custom_tag) ->
@_send('getCashOperationsHistory', args, custom_tag)
return
getCommisionsDef: (args, custom_tag) ->
@_send('getCommisionsDef', args, custom_tag)
return
getlbsHistory: (args, custom_tag) ->
@_send('getlbsHistory', args, custom_tag)
return
getMarginTrade: (args, custom_tag) ->
@_send('getMarginTrade', args, custom_tag)
return
getNews: (args, custom_tag) ->
@_send('getNews', args, custom_tag)
return
getOrderStatus: (args, custom_tag) ->
@_send('getOrderStatus', args, custom_tag)
return
getProfitCalculations: (args, custom_tag) ->
@_send('getProfitCalculations', args, custom_tag)
return
getServerTime: (args, custom_tag) ->
@_send('getServerTime', args, custom_tag)
return
getStepRules: (custom_tag) ->
@_send('getStepRules', null, custom_tag)
return
getSymbol: (args, custom_tag) ->
@_send('getSymbol', args, custom_tag)
return
getTickPrices: (args, custom_tag) ->
@_send('getTickPrices', args, custom_tag)
return
getTradeRecords: (args, custom_tag) ->
@_send('getTradeRecords', args, custom_tag)
return
getTrades: (custom_tag) ->
@_send('getTrades', null, custom_tag)
return
getTradesHistory: (args, custom_tag) ->
@_send('getTradesHistory', args, custom_tag)
return
getTradingHours: (args, custom_tag) ->
@_send('getTradingHours', args, custom_tag)
return
getVersion: (custom_tag) ->
@_send('getVersion', null, custom_tag)
return
modifyPending: (args, custom_tag) ->
@_send('modifyPending', args, custom_tag)
return
modifyPosition: (args, custom_tag) ->
@_send('modifyPosition', args, custom_tag)
return
subscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('getAccountIndicators', @_stream_session_id))
return
subscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('getCandles', @_stream_session_id, args))
return
subscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('getKeepAlive', @_stream_session_id))
return
subscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('getNews', @_stream_session_id))
return
subscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('getOrderStatus', @_stream_session_id))
return
subscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('getProfits', @_stream_session_id))
return
subscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('getTickPrices', @_stream_session_id, args))
return
subscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('getTrades', @_stream_session_id))
return
unsubscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('stopAccountIndicators'))
return
unsubscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopCandles', args))
return
unsubscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('stopKeepAlive'))
return
unsubscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('stopNews'))
return
unsubscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('stopOrderStatus'))
return
unsubscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('stopProfits'))
return
unsubscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopTickPrices', args))
return
unsubscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('stopTrades'))
return
module.exports = Wrapper
| true | Connector = require('xapi-connector')
Emitter = require('events').EventEmitter
print = (msg) -> console.log(msg)
class Wrapper
constructor: (@server_url, @conn_port, @stream_port, @username, @password) ->
@conn_status = 0
@stream_status = 0
@_req_id = 0
@_stream_session_id = null
@_requests = {}
@_emitter = new Emitter()
@_streamEmitter = new Emitter()
@_connector = new Connector(@server_url, @conn_port, @stream_port, @username, @password)
@_connector.on('open', () =>
@conn_status = 1
@_emitter.emit('open')
)
@_connector.on('close', () =>
@conn_status = 2
print("CONNECTION CLOSED")
@_emitter.emit('close')
)
@_connector.on('error', (err) =>
@conn_status = 3
print("CONNECTION ERROR")
@_emitter.emit('error', err)
)
@_connector.on('message', (msg) =>
#console.log("Received a msg #{msg}")
try
res = JSON.parse(msg)
req_id = parseInt(res.customTag)
req = @_requests[req_id]
if req.customTag? then res.customTag = req.customTag else delete res.customTag
if res.status == true
#print("req_id: #{req_id}, requests: #{JSON.stringify(@_requests)}")
#@_emitter.emit(req_id, null, req, res) #emits the req_id, this enables callbacks for individual requests
@_emitter.emit('_message', req, res, @) #emits a private _message event and passes every message, this enables plugins
@_emitter.emit(req.command, req, res, @) #emits the command name, this enables event handlers for commands
else
@_emitter.emit('apiError', req, res)
catch e
console.log(e)
)
@_connector.onStream('message', (msg) =>
#console.log("Received a stream msg #{msg}")
try
msg = JSON.parse(msg)
@_streamEmitter.emit('_message', msg, @) #enables plugins for stream
@_streamEmitter.emit(msg.command, msg, @)
catch e
console.log(e)
)
@_connector.onStream('open', () =>
@stream_status = 1
@_streamEmitter.emit('open')
)
@_connector.onStream('close', () =>
@stream_status = 2
print("STREAM CLOSED")
@_streamEmitter.emit('close')
)
@_connector.onStream('error', (err) =>
@stream_status = 3
print("STREAM ERROR")
@_streamEmitter.emit('error', err)
)
@on('login', (req, res) =>
@_stream_session_id = res.streamSessionId
)
on: (event, callback) ->
@_emitter.on(event, callback)
return
onStream: (event, callback) ->
@_streamEmitter.on(event, callback)
return
#EXPERIMENTAL
use: (plugin) ->
plugin(@)
return
getQue: () -> @_connector.getQue()
getStreamQue: () -> @_connector.getStreamQue()
_send: (command, args, custom_tag) ->
req_id = @_req_id += 1
#if callback? then @.on(req_id, callback)
@_requests[req_id] =
command: command,
arguments: args if args?
customTag: custom_tag if custom_tag?
req = @_connector.buildCommand(command, args, req_id.toString())
#console.log("Sending message #{req}")
@_connector.send(req)
return
_sendStream: (msg) ->
#print(msg)
@_connector.sendStream(msg)
return
connect: () ->
@_connector.connect()
return
disconnect: () ->
@_connector.disconnect()
return
connectStream: () ->
@_connector.connectStream()
return
disconnectStream: () ->
@_connector.disconnectStream()
return
login: (custom_tag) ->
@_send('login', {userId: @username, password: PI:PASSWORD:<PASSWORD>END_PI}, custom_tag)
return
logout: (custom_tag) ->
@_send('logout', null, custom_tag)
return
ping: (custom_tag) ->
@_send('ping', null, custom_tag)
return
addOrder: (args, custom_tag) ->
@_send('addOrder', args, custom_tag)
return
closePosition: (args, custom_tag) ->
@_send('closePosition', args, custom_tag)
return
closePositions: (args, custom_tag) ->
@_send('closePositions', args, custom_tag)
return
deletePending: (args, custom_tag) ->
@_send('deletePending', args, custom_tag)
return
getAccountIndicators: (custom_tag) ->
@_send('getAccountIndicators', null, custom_tag)
return
getAccountInfo: (custom_tag) ->
@_send('getAccountInfo', null, custom_tag)
return
getAllSymbols: (custom_tag) ->
@_send('getAllSymbols', null, custom_tag)
return
getCalendar: (custom_tag) ->
@_send('getCalendar', null, custom_tag)
return
getCandles: (args, custom_tag) ->
@_send('getCandles', args, custom_tag)
return
getCashOperationsHistory: (args, custom_tag) ->
@_send('getCashOperationsHistory', args, custom_tag)
return
getCommisionsDef: (args, custom_tag) ->
@_send('getCommisionsDef', args, custom_tag)
return
getlbsHistory: (args, custom_tag) ->
@_send('getlbsHistory', args, custom_tag)
return
getMarginTrade: (args, custom_tag) ->
@_send('getMarginTrade', args, custom_tag)
return
getNews: (args, custom_tag) ->
@_send('getNews', args, custom_tag)
return
getOrderStatus: (args, custom_tag) ->
@_send('getOrderStatus', args, custom_tag)
return
getProfitCalculations: (args, custom_tag) ->
@_send('getProfitCalculations', args, custom_tag)
return
getServerTime: (args, custom_tag) ->
@_send('getServerTime', args, custom_tag)
return
getStepRules: (custom_tag) ->
@_send('getStepRules', null, custom_tag)
return
getSymbol: (args, custom_tag) ->
@_send('getSymbol', args, custom_tag)
return
getTickPrices: (args, custom_tag) ->
@_send('getTickPrices', args, custom_tag)
return
getTradeRecords: (args, custom_tag) ->
@_send('getTradeRecords', args, custom_tag)
return
getTrades: (custom_tag) ->
@_send('getTrades', null, custom_tag)
return
getTradesHistory: (args, custom_tag) ->
@_send('getTradesHistory', args, custom_tag)
return
getTradingHours: (args, custom_tag) ->
@_send('getTradingHours', args, custom_tag)
return
getVersion: (custom_tag) ->
@_send('getVersion', null, custom_tag)
return
modifyPending: (args, custom_tag) ->
@_send('modifyPending', args, custom_tag)
return
modifyPosition: (args, custom_tag) ->
@_send('modifyPosition', args, custom_tag)
return
subscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('getAccountIndicators', @_stream_session_id))
return
subscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('getCandles', @_stream_session_id, args))
return
subscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('getKeepAlive', @_stream_session_id))
return
subscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('getNews', @_stream_session_id))
return
subscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('getOrderStatus', @_stream_session_id))
return
subscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('getProfits', @_stream_session_id))
return
subscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('getTickPrices', @_stream_session_id, args))
return
subscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('getTrades', @_stream_session_id))
return
unsubscribeAccountIndicators: () ->
@_sendStream(@_connector.buildStreamCommand('stopAccountIndicators'))
return
unsubscribeCandles: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopCandles', args))
return
unsubscribeKeepAlive: () ->
@_sendStream(@_connector.buildStreamCommand('stopKeepAlive'))
return
unsubscribeNews: () ->
@_sendStream(@_connector.buildStreamCommand('stopNews'))
return
unsubscribeOrderStatus: () ->
@_sendStream(@_connector.buildStreamCommand('stopOrderStatus'))
return
unsubscribeProfits: () ->
@_sendStream(@_connector.buildStreamCommand('stopProfits'))
return
unsubscribeTickPrices: (args) ->
@_sendStream(@_connector.buildStreamCommand('stopTickPrices', args))
return
unsubscribeTrades: () ->
@_sendStream(@_connector.buildStreamCommand('stopTrades'))
return
module.exports = Wrapper
|
[
{
"context": "\n createdNew: no\n else\n key = @_createHash(url)\n @_insert\n key: key\n ",
"end": 1363,
"score": 0.7981189489364624,
"start": 1350,
"tag": "KEY",
"value": "@_createHash("
}
] | src/unique-shortener.coffee | valiton/node-unique-shortener | 3 | _ = require 'lodash'
base62 = require 'base62'
farmhash = require 'farmhash'
module.exports = class UniqueShortener
httpRegex = /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/i
###*
* create a new Test_lib instance,
*
* @memberOf global
*
* @constructor
* @param {object} config read more about config options in README
* @this {Test_lib}
###
constructor: (@config) ->
@config = _.merge
validation: yes
, @config
###*
* initalize the Test_lib-Instance
*
* @function global.Test_lib.prototype.init
* @returns {this} the current instance for chaining
###
init: (@mongo, @redis, cb) ->
@mongo.collection('urls').ensureIndex {key: 1}, {}, (err, result) =>
if err?
cb? err
else
@mongo.collection('urls').ensureIndex {url: 1}, {}, (err, result) =>
if err?
cb? err
else
cb? null
shorten: (url, cb) ->
if @config.validation and not httpRegex.test url
return cb new Error('InvalidUrl')
# Look up the url first
@_findOne
url: url
, (err, record) =>
# We found one existing, so return it back
if not err? and record?
return cb null,
key: record.key
createdNew: no
else
key = @_createHash(url)
@_insert
key: key
url: url
, (err, record) =>
return cb(err) if err?
return cb null,
key: key
createdNew: yes
resolve: (key, cb) ->
@_findOne
key: key
, (err, record) ->
return cb err if err?
return cb new Error('NotFound') unless record?
cb null, record.url
_findOne: (q, cb) ->
@redis.get JSON.stringify(q), (err, result) =>
if not err? and result?
return cb null, JSON.parse(result)
@mongo.collection('urls').findOne q, {}, (err, result) =>
if err? or not result?
return cb err, result
else
# Cache it
@redis.set JSON.stringify(q), JSON.stringify(result), (err) ->
return cb(err) if err?
cb(null, result)
_insert: (q, cb) ->
@mongo.collection('urls').insert q, (err, result) ->
cb err, result
_createHash: (url)->
hash = farmhash.hash64(new Buffer(url)).value
encodedHash = base62.encode hash
encodedHash
| 17559 | _ = require 'lodash'
base62 = require 'base62'
farmhash = require 'farmhash'
module.exports = class UniqueShortener
httpRegex = /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/i
###*
* create a new Test_lib instance,
*
* @memberOf global
*
* @constructor
* @param {object} config read more about config options in README
* @this {Test_lib}
###
constructor: (@config) ->
@config = _.merge
validation: yes
, @config
###*
* initalize the Test_lib-Instance
*
* @function global.Test_lib.prototype.init
* @returns {this} the current instance for chaining
###
init: (@mongo, @redis, cb) ->
@mongo.collection('urls').ensureIndex {key: 1}, {}, (err, result) =>
if err?
cb? err
else
@mongo.collection('urls').ensureIndex {url: 1}, {}, (err, result) =>
if err?
cb? err
else
cb? null
shorten: (url, cb) ->
if @config.validation and not httpRegex.test url
return cb new Error('InvalidUrl')
# Look up the url first
@_findOne
url: url
, (err, record) =>
# We found one existing, so return it back
if not err? and record?
return cb null,
key: record.key
createdNew: no
else
key = <KEY>url)
@_insert
key: key
url: url
, (err, record) =>
return cb(err) if err?
return cb null,
key: key
createdNew: yes
resolve: (key, cb) ->
@_findOne
key: key
, (err, record) ->
return cb err if err?
return cb new Error('NotFound') unless record?
cb null, record.url
_findOne: (q, cb) ->
@redis.get JSON.stringify(q), (err, result) =>
if not err? and result?
return cb null, JSON.parse(result)
@mongo.collection('urls').findOne q, {}, (err, result) =>
if err? or not result?
return cb err, result
else
# Cache it
@redis.set JSON.stringify(q), JSON.stringify(result), (err) ->
return cb(err) if err?
cb(null, result)
_insert: (q, cb) ->
@mongo.collection('urls').insert q, (err, result) ->
cb err, result
_createHash: (url)->
hash = farmhash.hash64(new Buffer(url)).value
encodedHash = base62.encode hash
encodedHash
| true | _ = require 'lodash'
base62 = require 'base62'
farmhash = require 'farmhash'
module.exports = class UniqueShortener
httpRegex = /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/i
###*
* create a new Test_lib instance,
*
* @memberOf global
*
* @constructor
* @param {object} config read more about config options in README
* @this {Test_lib}
###
constructor: (@config) ->
@config = _.merge
validation: yes
, @config
###*
* initalize the Test_lib-Instance
*
* @function global.Test_lib.prototype.init
* @returns {this} the current instance for chaining
###
init: (@mongo, @redis, cb) ->
@mongo.collection('urls').ensureIndex {key: 1}, {}, (err, result) =>
if err?
cb? err
else
@mongo.collection('urls').ensureIndex {url: 1}, {}, (err, result) =>
if err?
cb? err
else
cb? null
shorten: (url, cb) ->
if @config.validation and not httpRegex.test url
return cb new Error('InvalidUrl')
# Look up the url first
@_findOne
url: url
, (err, record) =>
# We found one existing, so return it back
if not err? and record?
return cb null,
key: record.key
createdNew: no
else
key = PI:KEY:<KEY>END_PIurl)
@_insert
key: key
url: url
, (err, record) =>
return cb(err) if err?
return cb null,
key: key
createdNew: yes
resolve: (key, cb) ->
@_findOne
key: key
, (err, record) ->
return cb err if err?
return cb new Error('NotFound') unless record?
cb null, record.url
_findOne: (q, cb) ->
@redis.get JSON.stringify(q), (err, result) =>
if not err? and result?
return cb null, JSON.parse(result)
@mongo.collection('urls').findOne q, {}, (err, result) =>
if err? or not result?
return cb err, result
else
# Cache it
@redis.set JSON.stringify(q), JSON.stringify(result), (err) ->
return cb(err) if err?
cb(null, result)
_insert: (q, cb) ->
@mongo.collection('urls').insert q, (err, result) ->
cb err, result
_createHash: (url)->
hash = farmhash.hash64(new Buffer(url)).value
encodedHash = base62.encode hash
encodedHash
|
[
{
"context": "###\n termap - Terminal Map Viewer\n by Michael Strassburger <codepoet@cpan.org>\n\n Simple pixel to barille ch",
"end": 60,
"score": 0.9998748898506165,
"start": 40,
"tag": "NAME",
"value": "Michael Strassburger"
},
{
"context": " - Terminal Map Viewer\n by Michael Strassburger <codepoet@cpan.org>\n\n Simple pixel to barille character mapper\n\n I",
"end": 79,
"score": 0.9999316930770874,
"start": 62,
"tag": "EMAIL",
"value": "codepoet@cpan.org"
},
{
"context": "ion inspired by node-drawille (https://github.com/madbence/node-drawille)\n * added color support\n * added ",
"end": 197,
"score": 0.9927468299865723,
"start": 189,
"tag": "USERNAME",
"value": "madbence"
}
] | src/BrailleBuffer.coffee | 82ndAirborneDiv/mapsc | 1 | ###
termap - Terminal Map Viewer
by Michael Strassburger <codepoet@cpan.org>
Simple pixel to barille character mapper
Implementation inspired by node-drawille (https://github.com/madbence/node-drawille)
* added color support
* added text label support
* general optimizations
Will either be merged into node-drawille or become an own module at some point
###
stringWidth = require 'string-width'
config = require './config'
utils = require './utils'
module.exports = class BrailleBuffer
brailleMap: [[0x1, 0x8],[0x2, 0x10],[0x4, 0x20],[0x40, 0x80]]
asciiMap:
# "▬": [2+32, 4+64]
# "¯": [1+16]
"▀": [1+2+16+32]
"▄": [4+8+64+128]
"■": [2+4+32+64]
"▌": [1+2+4+8]
"▐": [16+32+64+128]
# "▓": [1+4+32+128, 2+8+16+64]
"█": [255]
pixelBuffer: null
charBuffer: null
foregroundBuffer: null
backgroundBuffer: null
asciiToBraille: []
globalBackground: null
termReset: "\x1B[39;49m"
constructor: (@width, @height) ->
size = @width*@height/8
@pixelBuffer = new Buffer size
@foregroundBuffer = new Buffer size
@backgroundBuffer = new Buffer size
@_mapBraille()
@clear()
clear: ->
@pixelBuffer.fill 0
@charBuffer = []
@foregroundBuffer.fill 0
@backgroundBuffer.fill 0
setGlobalBackground: (@globalBackground) ->
setBackground: (x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@backgroundBuffer[idx] = color
setPixel: (x, y, color) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] |= mask
@foregroundBuffer[idx] = color
unsetPixel: (x, y) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] &= ~mask
_project: (x, y) ->
(x>>1) + (@width>>1)*(y>>2)
_locate: (x, y, cb) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
mask = @brailleMap[y&3][x&1]
cb idx, mask
_mapBraille: ->
@asciiToBraille = [" "]
masks = []
for char, bits of @asciiMap
continue unless bits instanceof Array
masks.push mask: mask, char: char for mask in bits
for i in [1..255]
braille = (i&7) + ((i&56)<<1) + ((i&64)>>3) + (i&128)
@asciiToBraille[i] = masks.reduce(((best, mask) ->
covered = utils.population(mask.mask&braille)
if not best or best.covered < covered
char: mask.char, covered: covered
else
best
), undefined).char
_termColor: (foreground, background) ->
background = background or @globalBackground
if foreground and background
"\x1B[38;5;#{foreground};48;5;#{background}m"
else if foreground
"\x1B[49;38;5;#{foreground}m"
else if background
"\x1B[39;48;5;#{background}m"
else
@termReset
frame: ->
output = []
currentColor = null
skip = 0
for y in [0...@height/4]
skip = 0
for x in [0...@width/2]
idx = y*@width/2 + x
if idx and not x
output.push config.delimeter
if currentColor isnt colorCode = @_termColor @foregroundBuffer[idx], @backgroundBuffer[idx]
output.push currentColor = colorCode
output.push if char = @charBuffer[idx]
skip += stringWidth(char)-1
if skip+x >= @width/2
''
else
char
else
if not skip
if config.useBraille
String.fromCharCode 0x2800+@pixelBuffer[idx]
else
@asciiToBraille[@pixelBuffer[idx]]
else
skip--
''
output.push @termReset+config.delimeter
output.join ''
setChar: (char, x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@charBuffer[idx] = char
@foregroundBuffer[idx] = color
writeText: (text, x, y, color, center = true) ->
x -= text.length/2+1 if center
@setChar text.charAt(i), x+i*2, y, color for i in [0...text.length]
| 191800 | ###
termap - Terminal Map Viewer
by <NAME> <<EMAIL>>
Simple pixel to barille character mapper
Implementation inspired by node-drawille (https://github.com/madbence/node-drawille)
* added color support
* added text label support
* general optimizations
Will either be merged into node-drawille or become an own module at some point
###
stringWidth = require 'string-width'
config = require './config'
utils = require './utils'
module.exports = class BrailleBuffer
brailleMap: [[0x1, 0x8],[0x2, 0x10],[0x4, 0x20],[0x40, 0x80]]
asciiMap:
# "▬": [2+32, 4+64]
# "¯": [1+16]
"▀": [1+2+16+32]
"▄": [4+8+64+128]
"■": [2+4+32+64]
"▌": [1+2+4+8]
"▐": [16+32+64+128]
# "▓": [1+4+32+128, 2+8+16+64]
"█": [255]
pixelBuffer: null
charBuffer: null
foregroundBuffer: null
backgroundBuffer: null
asciiToBraille: []
globalBackground: null
termReset: "\x1B[39;49m"
constructor: (@width, @height) ->
size = @width*@height/8
@pixelBuffer = new Buffer size
@foregroundBuffer = new Buffer size
@backgroundBuffer = new Buffer size
@_mapBraille()
@clear()
clear: ->
@pixelBuffer.fill 0
@charBuffer = []
@foregroundBuffer.fill 0
@backgroundBuffer.fill 0
setGlobalBackground: (@globalBackground) ->
setBackground: (x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@backgroundBuffer[idx] = color
setPixel: (x, y, color) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] |= mask
@foregroundBuffer[idx] = color
unsetPixel: (x, y) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] &= ~mask
_project: (x, y) ->
(x>>1) + (@width>>1)*(y>>2)
_locate: (x, y, cb) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
mask = @brailleMap[y&3][x&1]
cb idx, mask
_mapBraille: ->
@asciiToBraille = [" "]
masks = []
for char, bits of @asciiMap
continue unless bits instanceof Array
masks.push mask: mask, char: char for mask in bits
for i in [1..255]
braille = (i&7) + ((i&56)<<1) + ((i&64)>>3) + (i&128)
@asciiToBraille[i] = masks.reduce(((best, mask) ->
covered = utils.population(mask.mask&braille)
if not best or best.covered < covered
char: mask.char, covered: covered
else
best
), undefined).char
_termColor: (foreground, background) ->
background = background or @globalBackground
if foreground and background
"\x1B[38;5;#{foreground};48;5;#{background}m"
else if foreground
"\x1B[49;38;5;#{foreground}m"
else if background
"\x1B[39;48;5;#{background}m"
else
@termReset
frame: ->
output = []
currentColor = null
skip = 0
for y in [0...@height/4]
skip = 0
for x in [0...@width/2]
idx = y*@width/2 + x
if idx and not x
output.push config.delimeter
if currentColor isnt colorCode = @_termColor @foregroundBuffer[idx], @backgroundBuffer[idx]
output.push currentColor = colorCode
output.push if char = @charBuffer[idx]
skip += stringWidth(char)-1
if skip+x >= @width/2
''
else
char
else
if not skip
if config.useBraille
String.fromCharCode 0x2800+@pixelBuffer[idx]
else
@asciiToBraille[@pixelBuffer[idx]]
else
skip--
''
output.push @termReset+config.delimeter
output.join ''
setChar: (char, x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@charBuffer[idx] = char
@foregroundBuffer[idx] = color
writeText: (text, x, y, color, center = true) ->
x -= text.length/2+1 if center
@setChar text.charAt(i), x+i*2, y, color for i in [0...text.length]
| true | ###
termap - Terminal Map Viewer
by PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Simple pixel to barille character mapper
Implementation inspired by node-drawille (https://github.com/madbence/node-drawille)
* added color support
* added text label support
* general optimizations
Will either be merged into node-drawille or become an own module at some point
###
stringWidth = require 'string-width'
config = require './config'
utils = require './utils'
module.exports = class BrailleBuffer
brailleMap: [[0x1, 0x8],[0x2, 0x10],[0x4, 0x20],[0x40, 0x80]]
asciiMap:
# "▬": [2+32, 4+64]
# "¯": [1+16]
"▀": [1+2+16+32]
"▄": [4+8+64+128]
"■": [2+4+32+64]
"▌": [1+2+4+8]
"▐": [16+32+64+128]
# "▓": [1+4+32+128, 2+8+16+64]
"█": [255]
pixelBuffer: null
charBuffer: null
foregroundBuffer: null
backgroundBuffer: null
asciiToBraille: []
globalBackground: null
termReset: "\x1B[39;49m"
constructor: (@width, @height) ->
size = @width*@height/8
@pixelBuffer = new Buffer size
@foregroundBuffer = new Buffer size
@backgroundBuffer = new Buffer size
@_mapBraille()
@clear()
clear: ->
@pixelBuffer.fill 0
@charBuffer = []
@foregroundBuffer.fill 0
@backgroundBuffer.fill 0
setGlobalBackground: (@globalBackground) ->
setBackground: (x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@backgroundBuffer[idx] = color
setPixel: (x, y, color) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] |= mask
@foregroundBuffer[idx] = color
unsetPixel: (x, y) ->
@_locate x, y, (idx, mask) =>
@pixelBuffer[idx] &= ~mask
_project: (x, y) ->
(x>>1) + (@width>>1)*(y>>2)
_locate: (x, y, cb) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
mask = @brailleMap[y&3][x&1]
cb idx, mask
_mapBraille: ->
@asciiToBraille = [" "]
masks = []
for char, bits of @asciiMap
continue unless bits instanceof Array
masks.push mask: mask, char: char for mask in bits
for i in [1..255]
braille = (i&7) + ((i&56)<<1) + ((i&64)>>3) + (i&128)
@asciiToBraille[i] = masks.reduce(((best, mask) ->
covered = utils.population(mask.mask&braille)
if not best or best.covered < covered
char: mask.char, covered: covered
else
best
), undefined).char
_termColor: (foreground, background) ->
background = background or @globalBackground
if foreground and background
"\x1B[38;5;#{foreground};48;5;#{background}m"
else if foreground
"\x1B[49;38;5;#{foreground}m"
else if background
"\x1B[39;48;5;#{background}m"
else
@termReset
frame: ->
output = []
currentColor = null
skip = 0
for y in [0...@height/4]
skip = 0
for x in [0...@width/2]
idx = y*@width/2 + x
if idx and not x
output.push config.delimeter
if currentColor isnt colorCode = @_termColor @foregroundBuffer[idx], @backgroundBuffer[idx]
output.push currentColor = colorCode
output.push if char = @charBuffer[idx]
skip += stringWidth(char)-1
if skip+x >= @width/2
''
else
char
else
if not skip
if config.useBraille
String.fromCharCode 0x2800+@pixelBuffer[idx]
else
@asciiToBraille[@pixelBuffer[idx]]
else
skip--
''
output.push @termReset+config.delimeter
output.join ''
setChar: (char, x, y, color) ->
return unless 0 <= x < @width and 0 <= y < @height
idx = @_project x, y
@charBuffer[idx] = char
@foregroundBuffer[idx] = color
writeText: (text, x, y, color, center = true) ->
x -= text.length/2+1 if center
@setChar text.charAt(i), x+i*2, y, color for i in [0...text.length]
|
[
{
"context": "token-manager')\n\nDEFAULT_ADMIN_USER =\n email: 'loopback-with-admin@example.com'\n id: 'loopback-with-admin-user-id'\n passwo",
"end": 132,
"score": 0.9999200701713562,
"start": 101,
"tag": "EMAIL",
"value": "loopback-with-admin@example.com"
},
{
"context": " id: 'loopback-with-admin-user-id'\n password: 'admin-user-password' # No worry, noone can login through REST API.\n\nO",
"end": 206,
"score": 0.9992610216140747,
"start": 187,
"tag": "PASSWORD",
"value": "admin-user-password"
},
{
"context": "\n\nONE_YEAR = 60 * 60 * 24 * 365\n\nDEFAULT_TOKEN = 'loopback-with-admin-token'\n\npromisify = (fn) ->\n new Promise (y, n) =>\n ",
"end": 328,
"score": 0.9735371470451355,
"start": 303,
"tag": "PASSWORD",
"value": "loopback-with-admin-token"
},
{
"context": " access token.\n @param {String} [options.email=loopback-with-admin@example.com] email address for admin user\n @param {String}",
"end": 775,
"score": 0.9999220967292786,
"start": 744,
"tag": "EMAIL",
"value": "loopback-with-admin@example.com"
},
{
"context": " admin user\n @param {String} [options.password=admin-user-password] password of admin user\n ###\n constructor: ",
"end": 941,
"score": 0.9953728914260864,
"start": 922,
"tag": "PASSWORD",
"value": "admin-user-password"
},
{
"context": "am {String} [options.password=admin-user-password] password of admin user\n ###\n constructor: (options =",
"end": 951,
"score": 0.5248528122901917,
"start": 943,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ns.password=admin-user-password] password of admin user\n ###\n constructor: (options = {}) ->\n\n ",
"end": 965,
"score": 0.5349631309509277,
"start": 961,
"tag": "PASSWORD",
"value": "user"
},
{
"context": " id or DEFAULT_ADMIN_USER.id\n password: password or DEFAULT_ADMIN_USER.password\n\n @tokensById ",
"end": 1269,
"score": 0.981355607509613,
"start": 1258,
"tag": "PASSWORD",
"value": "password or"
},
{
"context": "LT_ADMIN_USER.id\n password: password or DEFAULT_ADMIN_USER.password\n\n @tokensById = {}\n\n\n\n ###*\n Set fet",
"end": 1297,
"score": 0.9797292947769165,
"start": 1270,
"tag": "PASSWORD",
"value": "DEFAULT_ADMIN_USER.password"
},
{
"context": "ser: ->\n ____(\"creating admin user. id: #{@adminUser.id}\")\n { User } = @models\n\n pro",
"end": 4723,
"score": 0.598662257194519,
"start": 4718,
"tag": "USERNAME",
"value": "admin"
},
{
"context": " promisify (cb) =>\n User.create @adminUser, cb\n\n\n ###*\n Create admin role, called once",
"end": 4822,
"score": 0.6524626016616821,
"start": 4813,
"tag": "USERNAME",
"value": "adminUser"
},
{
"context": "ype: RoleMapping.USER\n principalId: @adminUser.id\n\n promisify (cb) =>\n ",
"end": 5221,
"score": 0.888824462890625,
"start": 5211,
"tag": "USERNAME",
"value": "@adminUser"
}
] | src/server/admin-token-manager.coffee | CureApp/loopback-with-admin | 13 | ____ = require('debug')('loopback-with-admin:admin-token-manager')
DEFAULT_ADMIN_USER =
email: 'loopback-with-admin@example.com'
id: 'loopback-with-admin-user-id'
password: 'admin-user-password' # No worry, noone can login through REST API.
ONE_YEAR = 60 * 60 * 24 * 365
DEFAULT_TOKEN = 'loopback-with-admin-token'
promisify = (fn) ->
new Promise (y, n) =>
cb = (e, o) => if e? then n(e) else y(o)
fn(cb)
###*
Admin token manager
@class AdminTokenManager
###
class AdminTokenManager
###*
@param {Function|Array(String)} [options.fetch] function to return admin tokens (or promise of it). When string[] is given, these value are used for the admin access token.
@param {String} [options.email=loopback-with-admin@example.com] email address for admin user
@param {String} [options.id=loopback-with-admin-user-id] id of admin user
@param {String} [options.password=admin-user-password] password of admin user
###
constructor: (options = {}) ->
{ fetch, email, id, password } = options
@fetch = @constructor.createFetchFunction(fetch)
@adminUser =
email: email or DEFAULT_ADMIN_USER.email
id: id or DEFAULT_ADMIN_USER.id
password: password or DEFAULT_ADMIN_USER.password
@tokensById = {}
###*
Set fetched tokens as admin tokens.
@public
@method init
@param {Object} models app.models in LoopBack
@return {Promise}
###
init: (@models) ->
@createAdminUser()
.then =>
@createAdminRole()
.then =>
@fetch()
.then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
throw @invalidTokenError(tokenStrs)
@updateTokens(tokenStrs)
###*
Refresh admin tokens.
@public
@method refreshTokens
@return {Promise}
###
refreshTokens: ->
@fetch().then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
console.error("""
AdminTokenManager: Fetched tokens are not valid!
Results: #{tokenStrs}
""")
return Promise.resolve(false)
@updateTokens(tokenStrs)
###*
Get current tokens
@public
@method getCurrentTokens
@return {Array(String)}
###
getCurrentTokens: ->
Object.keys @tokensById
###*
Save new tokens and destroy old tokens.
@private
###
updateTokens: (tokenStrs) ->
tokens = tokenStrs.map (tokenStr) => new AdminToken(tokenStr, @adminUser.id)
Promise.all(tokens.map (token) => @setNew token).then =>
promises = []
for tokenStr of @tokensById when tokenStr not in tokenStrs
promises.push @destroy(tokenStr)
Promise.all promises
.then =>
____("tokens: #{Object.keys(@tokensById).join(',')}")
###*
set new token
@private
###
setNew: (token) ->
{ AccessToken } = @models
@findById(token.id).then (foundToken) =>
if foundToken?
____("token: #{token.id} already exists.")
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is already exist for non-admin user. Skip creating.
"""
console.error()
return false
____("saving token: #{token.id}")
promisify (cb) =>
AccessToken.create token, cb
.then => true
.then (tokenIsSavedNow) =>
@tokensById[token.id] = token
###*
Destroy the token
@private
###
destroy: (tokenStr) ->
@findById(tokenStr).then (foundToken) =>
# check if the token to be deleted is admin token
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is not the admin token. Skip destroying.
"""
return false
{ AccessToken } = @models
promisify (cb) =>
AccessToken.destroyById tokenStr, cb
.then =>
delete @tokensById[tokenStr]
###*
Find AccessToken model by tokenStr
@private
###
findById: (tokenStr) ->
{ AccessToken } = @models
promisify (cb) =>
AccessToken.findById tokenStr, cb
###*
Create admin user, called once in 'init' function.
@private
###
createAdminUser: ->
____("creating admin user. id: #{@adminUser.id}")
{ User } = @models
promisify (cb) =>
User.create @adminUser, cb
###*
Create admin role, called once in 'init' function.
@private
###
createAdminRole: ->
____("creating admin role.")
{ Role, RoleMapping } = @models
promisify (cb) =>
Role.create name: 'admin', cb
.then (role) =>
principal =
principalType: RoleMapping.USER
principalId: @adminUser.id
promisify (cb) =>
role.principals.create principal, cb
###*
Check the fetched results are valid
@private
###
validTokenStrs: (tokenStrs) ->
Array.isArray(tokenStrs) and tokenStrs.length > 0 and tokenStrs.every (v) -> typeof v is 'string'
###*
Create an error to indicate the tokenStrs are invalid
@private
###
invalidTokenError: (tokenStrs) ->
new Error """
AdminTokenManager could not fetch valid access tokens.
Result: '#{tokenStrs}'
Check if the valid function is passed to the 3rd arugment of run() method.
var fn = function() {
return Promise.resolve(['token1', 'token2', 'token3'])
};
require('loopback-with-admin').run(models, config, { admin: {fetch: fn} })
"""
###*
Create valid fetch function
@private
@static
###
@createFetchFunction: (fetch) ->
if not fetch?
return => Promise.resolve([DEFAULT_TOKEN])
if typeof fetch is 'string'
return => Promise.resolve([fetch])
if Array.isArray fetch
return => Promise.resolve(fetch.slice())
if typeof fetch isnt 'function'
return => Promise.resolve([DEFAULT_TOKEN])
# if typeof fetch is 'function'
return =>
Promise.resolve(fetch()).then (results) =>
if typeof results is 'string'
return [results]
if Array.isArray results
return results
return [] # will throw error in init()
###*
Admin token
@class AdminToken
@private
###
class AdminToken
constructor: (@id, @userId) ->
@ttl = ONE_YEAR
@isAdmin = true
module.exports = AdminTokenManager
| 78631 | ____ = require('debug')('loopback-with-admin:admin-token-manager')
DEFAULT_ADMIN_USER =
email: '<EMAIL>'
id: 'loopback-with-admin-user-id'
password: '<PASSWORD>' # No worry, noone can login through REST API.
ONE_YEAR = 60 * 60 * 24 * 365
DEFAULT_TOKEN = '<PASSWORD>'
promisify = (fn) ->
new Promise (y, n) =>
cb = (e, o) => if e? then n(e) else y(o)
fn(cb)
###*
Admin token manager
@class AdminTokenManager
###
class AdminTokenManager
###*
@param {Function|Array(String)} [options.fetch] function to return admin tokens (or promise of it). When string[] is given, these value are used for the admin access token.
@param {String} [options.email=<EMAIL>] email address for admin user
@param {String} [options.id=loopback-with-admin-user-id] id of admin user
@param {String} [options.password=<PASSWORD>] <PASSWORD> of admin <PASSWORD>
###
constructor: (options = {}) ->
{ fetch, email, id, password } = options
@fetch = @constructor.createFetchFunction(fetch)
@adminUser =
email: email or DEFAULT_ADMIN_USER.email
id: id or DEFAULT_ADMIN_USER.id
password: <PASSWORD> <PASSWORD>
@tokensById = {}
###*
Set fetched tokens as admin tokens.
@public
@method init
@param {Object} models app.models in LoopBack
@return {Promise}
###
init: (@models) ->
@createAdminUser()
.then =>
@createAdminRole()
.then =>
@fetch()
.then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
throw @invalidTokenError(tokenStrs)
@updateTokens(tokenStrs)
###*
Refresh admin tokens.
@public
@method refreshTokens
@return {Promise}
###
refreshTokens: ->
@fetch().then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
console.error("""
AdminTokenManager: Fetched tokens are not valid!
Results: #{tokenStrs}
""")
return Promise.resolve(false)
@updateTokens(tokenStrs)
###*
Get current tokens
@public
@method getCurrentTokens
@return {Array(String)}
###
getCurrentTokens: ->
Object.keys @tokensById
###*
Save new tokens and destroy old tokens.
@private
###
updateTokens: (tokenStrs) ->
tokens = tokenStrs.map (tokenStr) => new AdminToken(tokenStr, @adminUser.id)
Promise.all(tokens.map (token) => @setNew token).then =>
promises = []
for tokenStr of @tokensById when tokenStr not in tokenStrs
promises.push @destroy(tokenStr)
Promise.all promises
.then =>
____("tokens: #{Object.keys(@tokensById).join(',')}")
###*
set new token
@private
###
setNew: (token) ->
{ AccessToken } = @models
@findById(token.id).then (foundToken) =>
if foundToken?
____("token: #{token.id} already exists.")
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is already exist for non-admin user. Skip creating.
"""
console.error()
return false
____("saving token: #{token.id}")
promisify (cb) =>
AccessToken.create token, cb
.then => true
.then (tokenIsSavedNow) =>
@tokensById[token.id] = token
###*
Destroy the token
@private
###
destroy: (tokenStr) ->
@findById(tokenStr).then (foundToken) =>
# check if the token to be deleted is admin token
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is not the admin token. Skip destroying.
"""
return false
{ AccessToken } = @models
promisify (cb) =>
AccessToken.destroyById tokenStr, cb
.then =>
delete @tokensById[tokenStr]
###*
Find AccessToken model by tokenStr
@private
###
findById: (tokenStr) ->
{ AccessToken } = @models
promisify (cb) =>
AccessToken.findById tokenStr, cb
###*
Create admin user, called once in 'init' function.
@private
###
createAdminUser: ->
____("creating admin user. id: #{@adminUser.id}")
{ User } = @models
promisify (cb) =>
User.create @adminUser, cb
###*
Create admin role, called once in 'init' function.
@private
###
createAdminRole: ->
____("creating admin role.")
{ Role, RoleMapping } = @models
promisify (cb) =>
Role.create name: 'admin', cb
.then (role) =>
principal =
principalType: RoleMapping.USER
principalId: @adminUser.id
promisify (cb) =>
role.principals.create principal, cb
###*
Check the fetched results are valid
@private
###
validTokenStrs: (tokenStrs) ->
Array.isArray(tokenStrs) and tokenStrs.length > 0 and tokenStrs.every (v) -> typeof v is 'string'
###*
Create an error to indicate the tokenStrs are invalid
@private
###
invalidTokenError: (tokenStrs) ->
new Error """
AdminTokenManager could not fetch valid access tokens.
Result: '#{tokenStrs}'
Check if the valid function is passed to the 3rd arugment of run() method.
var fn = function() {
return Promise.resolve(['token1', 'token2', 'token3'])
};
require('loopback-with-admin').run(models, config, { admin: {fetch: fn} })
"""
###*
Create valid fetch function
@private
@static
###
@createFetchFunction: (fetch) ->
if not fetch?
return => Promise.resolve([DEFAULT_TOKEN])
if typeof fetch is 'string'
return => Promise.resolve([fetch])
if Array.isArray fetch
return => Promise.resolve(fetch.slice())
if typeof fetch isnt 'function'
return => Promise.resolve([DEFAULT_TOKEN])
# if typeof fetch is 'function'
return =>
Promise.resolve(fetch()).then (results) =>
if typeof results is 'string'
return [results]
if Array.isArray results
return results
return [] # will throw error in init()
###*
Admin token
@class AdminToken
@private
###
class AdminToken
constructor: (@id, @userId) ->
@ttl = ONE_YEAR
@isAdmin = true
module.exports = AdminTokenManager
| true | ____ = require('debug')('loopback-with-admin:admin-token-manager')
DEFAULT_ADMIN_USER =
email: 'PI:EMAIL:<EMAIL>END_PI'
id: 'loopback-with-admin-user-id'
password: 'PI:PASSWORD:<PASSWORD>END_PI' # No worry, noone can login through REST API.
ONE_YEAR = 60 * 60 * 24 * 365
DEFAULT_TOKEN = 'PI:PASSWORD:<PASSWORD>END_PI'
promisify = (fn) ->
new Promise (y, n) =>
cb = (e, o) => if e? then n(e) else y(o)
fn(cb)
###*
Admin token manager
@class AdminTokenManager
###
class AdminTokenManager
###*
@param {Function|Array(String)} [options.fetch] function to return admin tokens (or promise of it). When string[] is given, these value are used for the admin access token.
@param {String} [options.email=PI:EMAIL:<EMAIL>END_PI] email address for admin user
@param {String} [options.id=loopback-with-admin-user-id] id of admin user
@param {String} [options.password=PI:PASSWORD:<PASSWORD>END_PI] PI:PASSWORD:<PASSWORD>END_PI of admin PI:PASSWORD:<PASSWORD>END_PI
###
constructor: (options = {}) ->
{ fetch, email, id, password } = options
@fetch = @constructor.createFetchFunction(fetch)
@adminUser =
email: email or DEFAULT_ADMIN_USER.email
id: id or DEFAULT_ADMIN_USER.id
password: PI:PASSWORD:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI
@tokensById = {}
###*
Set fetched tokens as admin tokens.
@public
@method init
@param {Object} models app.models in LoopBack
@return {Promise}
###
init: (@models) ->
@createAdminUser()
.then =>
@createAdminRole()
.then =>
@fetch()
.then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
throw @invalidTokenError(tokenStrs)
@updateTokens(tokenStrs)
###*
Refresh admin tokens.
@public
@method refreshTokens
@return {Promise}
###
refreshTokens: ->
@fetch().then (tokenStrs) =>
if not @validTokenStrs(tokenStrs)
console.error("""
AdminTokenManager: Fetched tokens are not valid!
Results: #{tokenStrs}
""")
return Promise.resolve(false)
@updateTokens(tokenStrs)
###*
Get current tokens
@public
@method getCurrentTokens
@return {Array(String)}
###
getCurrentTokens: ->
Object.keys @tokensById
###*
Save new tokens and destroy old tokens.
@private
###
updateTokens: (tokenStrs) ->
tokens = tokenStrs.map (tokenStr) => new AdminToken(tokenStr, @adminUser.id)
Promise.all(tokens.map (token) => @setNew token).then =>
promises = []
for tokenStr of @tokensById when tokenStr not in tokenStrs
promises.push @destroy(tokenStr)
Promise.all promises
.then =>
____("tokens: #{Object.keys(@tokensById).join(',')}")
###*
set new token
@private
###
setNew: (token) ->
{ AccessToken } = @models
@findById(token.id).then (foundToken) =>
if foundToken?
____("token: #{token.id} already exists.")
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is already exist for non-admin user. Skip creating.
"""
console.error()
return false
____("saving token: #{token.id}")
promisify (cb) =>
AccessToken.create token, cb
.then => true
.then (tokenIsSavedNow) =>
@tokensById[token.id] = token
###*
Destroy the token
@private
###
destroy: (tokenStr) ->
@findById(tokenStr).then (foundToken) =>
# check if the token to be deleted is admin token
if foundToken.userId isnt @adminUser.id
console.error """
AdminTokenManager: The token `#{token.id}` is not the admin token. Skip destroying.
"""
return false
{ AccessToken } = @models
promisify (cb) =>
AccessToken.destroyById tokenStr, cb
.then =>
delete @tokensById[tokenStr]
###*
Find AccessToken model by tokenStr
@private
###
findById: (tokenStr) ->
{ AccessToken } = @models
promisify (cb) =>
AccessToken.findById tokenStr, cb
###*
Create admin user, called once in 'init' function.
@private
###
createAdminUser: ->
____("creating admin user. id: #{@adminUser.id}")
{ User } = @models
promisify (cb) =>
User.create @adminUser, cb
###*
Create admin role, called once in 'init' function.
@private
###
createAdminRole: ->
____("creating admin role.")
{ Role, RoleMapping } = @models
promisify (cb) =>
Role.create name: 'admin', cb
.then (role) =>
principal =
principalType: RoleMapping.USER
principalId: @adminUser.id
promisify (cb) =>
role.principals.create principal, cb
###*
Check the fetched results are valid
@private
###
validTokenStrs: (tokenStrs) ->
Array.isArray(tokenStrs) and tokenStrs.length > 0 and tokenStrs.every (v) -> typeof v is 'string'
###*
Create an error to indicate the tokenStrs are invalid
@private
###
invalidTokenError: (tokenStrs) ->
new Error """
AdminTokenManager could not fetch valid access tokens.
Result: '#{tokenStrs}'
Check if the valid function is passed to the 3rd arugment of run() method.
var fn = function() {
return Promise.resolve(['token1', 'token2', 'token3'])
};
require('loopback-with-admin').run(models, config, { admin: {fetch: fn} })
"""
###*
Create valid fetch function
@private
@static
###
@createFetchFunction: (fetch) ->
if not fetch?
return => Promise.resolve([DEFAULT_TOKEN])
if typeof fetch is 'string'
return => Promise.resolve([fetch])
if Array.isArray fetch
return => Promise.resolve(fetch.slice())
if typeof fetch isnt 'function'
return => Promise.resolve([DEFAULT_TOKEN])
# if typeof fetch is 'function'
return =>
Promise.resolve(fetch()).then (results) =>
if typeof results is 'string'
return [results]
if Array.isArray results
return results
return [] # will throw error in init()
###*
Admin token
@class AdminToken
@private
###
class AdminToken
constructor: (@id, @userId) ->
@ttl = ONE_YEAR
@isAdmin = true
module.exports = AdminTokenManager
|
[
{
"context": "SharedObject = (obj)->\n obj.$$hashKey=\"cb_#{uuid++}\"\n return obj\n\nmodule.exports = Browser\n",
"end": 18939,
"score": 0.6626254916191101,
"start": 18926,
"tag": "KEY",
"value": "cb_#{uuid++}\""
}
] | src/api/browser.coffee | bladepan/cloudbrowser | 0 | Async = require('async')
lodash = require('lodash')
Components = require('../server/components')
User = require('../server/user')
cloudbrowserError = require('../shared/cloudbrowser_error')
{areArgsValid} = require('./utils')
routes = require('../server/application_manager/routes')
uuid = 0
# Permission checks are included wherever possible and a note is made if
# missing. Details like name, id, url etc. are available to everybody.
###*
Event to indicate that the current browser has been shared with
another user
@event Browser#share
###
###*
Event to indicate that the current browser has been renamed
@event Browser#rename
@type {String}
###
###*
API for browsers (internal object).
@class Browser
@param {Object} options
@param {User} options.userCtx The current user.
@param {Cloudbrowser} options.cbCtx The cloudbrowser API object.
@param {BrowserServer} options.browser The browser.
@fires Browser#share
@fires Browser#rename
###
class Browser
constructor : (options) ->
{cbServer, browser, cbCtx, userCtx, appConfig, appInstanceConfig} = options
if not cbServer? or not appConfig? or not appInstanceConfig?
console.log "browser api missing elements"
err = new Error()
console.log err.stack
###*
Gets the ID of the instance.
@method getID
@return {String}
@instance
@memberOf Browser
###
@getID = () ->
return browser.id
@getWorkerID = () ->
return browser.workerId
###*
Gets the url of the instance.
@method getURL
@return {String}
@instance
@memberOf Browser
###
@getURL = () ->
browserUrl = routes.buildBrowserPath(browser.mountPoint, browser.appInstanceId, browser.id)
return "#{cbServer.config.getHttpAddr()}#{browserUrl}"
###*
Gets the date of creation of the instance.
@method getDateCreated
@return {Date}
@instance
@memberOf Browser
###
@getDateCreated = () ->
return browser.dateCreated
###*
Gets the name of the instance.
@method getName
@return {String}
@instance
@memberOf Browser
###
@getName = () ->
return browser.name
###*
Creates a new component. This is called only when the browser is a local object.
@method createComponent
@param {String} name The registered name of the component.
@param {DOMNode} target The DOM node in which the component will be embedded.
@param {Object} options Extra options to customize the component.
@return {DOMNode}
@instance
@memberof Browser
###
@createComponent = (name, target, options) ->
return if typeof name isnt "string" or not target or not target.__nodeID
domBrowser = browser.getBrowser()
domBrowser.createComponent(name, target, options)
return target
###*
Gets the Application API object.
@method getAppConfig
@return {AppConfig}
@memberof Browser
@instance
###
@getAppConfig = () ->
mountPoint = browser.mountPoint
AppConfig = require("./application_config")
app = cbServer.applicationManager.find(mountPoint)
return new AppConfig({
cbServer : cbServer
cbCtx : cbCtx
userCtx : userCtx
app : app
})
###*
Closes the browser.
@method close
@memberof Browser
@instance
@param {errorCallback} callback
###
@close = (callback) ->
# get appInstance by direct property reference. both bserver and appInstance could be remote object
appInstance = browser.appInstance
appInstance.removeBrowser(browser.id, userCtx, callback)
return
###*
Redirects all clients that are connected to the current
instance to the given URL.
@method redirect
@param {String} url
@memberof Browser
@instance
###
@redirect = (url) ->
browser.redirect(url)
return
###*
Gets the email ID that is stored in the session
@method getResetEmail
@param {emailCallback} callback
@memberof Browser
@instance
###
@getResetEmail = (callback) ->
sessionManager = cbServer.sessionManager
browser.getFirstSession((err, session) ->
return callback(err) if err
callback(null,
sessionManager.findPropOnSession(session, 'resetuser'))
)
return
###*
Gets the user that created the instance.
@method getCreator
@return {String}
@instance
@memberof Browser
###
@getCreator = () ->
return browser.creator?.getEmail()
###*
Registers a listener for an event on the browser instance.
@method addEventListener
@param {String} event
@param {errorCallback} callback
@instance
@memberof Browser
###
@addEventListener = (eventName, callback) ->
if typeof callback isnt "function" then return
validEvents = ["share", "rename", "connect", "disconnect"]
if typeof eventName isnt "string" or validEvents.indexOf(eventName) is -1
return
callbackRegistered = callback
if @isAssocWithCurrentUser() and eventName is 'share'
callbackRegistered = (userInfo) ->
newUserInfo = {}
newUserInfo.role = userInfo.role
newUserInfo.user = User.getEmail(userInfo.user)
callback(newUserInfo)
# this is really nasty, now the browser object is stale
cbCtx.addEventListener(browser, eventName, callbackRegistered)
return
###*
Checks if the current user has some permission
associated with this browser
@method isAssocWithCurrentUser
@return {Bool}
@instance
@memberof Browser
###
@isAssocWithCurrentUser = () ->
appConfig = @getAppConfig()
if not appConfig.isAuthConfigured() or
browser.isOwner(userCtx) or
browser.isReaderWriter(userCtx) or
browser.isReader(userCtx) or
appConfig.isOwner()
return true
else
return false
###*
Gets all users that have the permission only to read and
write to the instance.
@method getReaderWriters
@return {Array<User>}
@instance
@memberof Browser
###
@getReaderWriters = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaderWriters isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readwrite
return users
###*
Gets all users that have the permission only to read
@method getReaders
@return {Array<User>}
@instance
@memberof Browser
###
@getReaders = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaders isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readonly
return users
###*
Gets all users that are the owners of the instance
There is a separate method for this as it is faster to get only the
number of owners than to construct a list of them using
getOwners and then get that number.
@method getOwners
@return {Array<User>}
@instance
@memberof Browser
###
@getOwners = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getOwners isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.own
return users
###*
Checks if the user is a reader-writer of the instance.
@method isReaderWriter
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isReaderWriter = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReaderWriter isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReaderWriter(userCtx) then return true
else return false
###*
Checks if the user is a reader of the instance.
@method isReader
@param {String} emailID
@return {Bool}
@memberof Browser
@instance
###
@isReader = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReader isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReader(userCtx) then return true
else return false
###*
Checks if the user is an owner of the instance
@method isOwner
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isOwner = () ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isOwner isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isOwner(userCtx) then return true
else return false
# [user],[callback]
@getUserPrevilege = ()->
switch arguments.length
when 1
user = userCtx
callback = arguments[0]
when 2
user = arguments[0]
callback = arguments[1]
return callback(null, null) if typeof browser.getUserPrevilege isnt 'function'
browser.getUserPrevilege(user, callback)
return
###*
Adds a user as a readerwriter of the current browser
@method addReaderWriter
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReaderWriter = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReaderWriter isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReaderWriter"))
@grantPermissions('readwrite', new User(emailID), callback)
###*
Adds a user as an owner of the current browser
@method addOwner
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addOwner = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addOwner isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addOwner"))
@grantPermissions('own', new User(emailID), callback)
###*
Adds a user as a reader of the current browser
@method addReader
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReader = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReader isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReader"))
@grantPermissions('readonly', new User(emailID), callback)
###*
Grants the user a role/permission on the browser.
@method grantPermissions
@param {String} permission
@param {User} user
@param {errorCallback} callback
@instance
@memberof Browser
###
@grantPermissions = (permission, user, callback) ->
{mountPoint, id} = browser
permissionManager = cbServer.permissionManager
Async.waterfall([
(next)->
browser.getUserPrevilege(userCtx, next)
(result, next)->
if result isnt 'own'
next(cloudbrowserError("PERM_DENIED"))
else
permissionManager.addBrowserPermRec
user : user
mountPoint : mountPoint
browserID : id
permission : permission
callback : next
(updateReturn, next)->
browser.addUser({
user : user
permission : permission
}, next)
],(err)->
callback err
)
###*
Renames the instance.
@method rename
@param {String} newName
@fires Browser#rename
@instance
@memberof Browser
###
@rename = (newName) ->
if typeof newName isnt "string" then return
if browser.isOwner(userCtx)
browser.setName(newName)
browser.emit('rename', newName)
return
###*
Gets the application instance associated with the current browser
@method getAppInstanceConfig
@return {AppInstance}
@instance
@memberof Browser
###
@getAppInstanceConfig = () ->
appInstance = browser.getAppInstance()
if not appInstance then return
if @isAssocWithCurrentUser()
AppInstance = require('./app_instance')
return new AppInstance
cbCtx : cbCtx
userCtx : userCtx
appInstance : appInstance
cbServer : cbServer
@getAppInstanceId = ()->
return browser.appInstanceId
###*
Gets the local state with the current browser
@method getLocalState
@return {Object} Custom object provided by the application in the application state file
@instance
@memberof Browser
###
@getLocalState = (property) ->
if @isAssocWithCurrentUser()
return browser.getLocalState(property)
###*
Gets information about the users connected to the current browser
@method getConnectedClients
@return {Array<{{address: String, email: String}}>}
@instance
@memberof Browser
###
@getConnectedClients = () ->
if @isAssocWithCurrentUser()
return browser.getConnectedClients()
@getUsers = (callback)->
browser.getUsers((err, users)->
return callback(err) if err
result ={}
for k, v of users
if lodash.isArray(v)
result[k]= lodash.map(v, (u)->
return u.getEmail()
)
else
result[k] = v.getEmail()
callback null, result
)
# only make sence when it is the currentBrowser
@getLogger = ()->
return browser._logger
# hack : share object among angularJS instances
@createSharedObject = (obj)->
obj.$$hashKey="cb_#{uuid++}"
return obj
module.exports = Browser
| 9133 | Async = require('async')
lodash = require('lodash')
Components = require('../server/components')
User = require('../server/user')
cloudbrowserError = require('../shared/cloudbrowser_error')
{areArgsValid} = require('./utils')
routes = require('../server/application_manager/routes')
uuid = 0
# Permission checks are included wherever possible and a note is made if
# missing. Details like name, id, url etc. are available to everybody.
###*
Event to indicate that the current browser has been shared with
another user
@event Browser#share
###
###*
Event to indicate that the current browser has been renamed
@event Browser#rename
@type {String}
###
###*
API for browsers (internal object).
@class Browser
@param {Object} options
@param {User} options.userCtx The current user.
@param {Cloudbrowser} options.cbCtx The cloudbrowser API object.
@param {BrowserServer} options.browser The browser.
@fires Browser#share
@fires Browser#rename
###
class Browser
constructor : (options) ->
{cbServer, browser, cbCtx, userCtx, appConfig, appInstanceConfig} = options
if not cbServer? or not appConfig? or not appInstanceConfig?
console.log "browser api missing elements"
err = new Error()
console.log err.stack
###*
Gets the ID of the instance.
@method getID
@return {String}
@instance
@memberOf Browser
###
@getID = () ->
return browser.id
@getWorkerID = () ->
return browser.workerId
###*
Gets the url of the instance.
@method getURL
@return {String}
@instance
@memberOf Browser
###
@getURL = () ->
browserUrl = routes.buildBrowserPath(browser.mountPoint, browser.appInstanceId, browser.id)
return "#{cbServer.config.getHttpAddr()}#{browserUrl}"
###*
Gets the date of creation of the instance.
@method getDateCreated
@return {Date}
@instance
@memberOf Browser
###
@getDateCreated = () ->
return browser.dateCreated
###*
Gets the name of the instance.
@method getName
@return {String}
@instance
@memberOf Browser
###
@getName = () ->
return browser.name
###*
Creates a new component. This is called only when the browser is a local object.
@method createComponent
@param {String} name The registered name of the component.
@param {DOMNode} target The DOM node in which the component will be embedded.
@param {Object} options Extra options to customize the component.
@return {DOMNode}
@instance
@memberof Browser
###
@createComponent = (name, target, options) ->
return if typeof name isnt "string" or not target or not target.__nodeID
domBrowser = browser.getBrowser()
domBrowser.createComponent(name, target, options)
return target
###*
Gets the Application API object.
@method getAppConfig
@return {AppConfig}
@memberof Browser
@instance
###
@getAppConfig = () ->
mountPoint = browser.mountPoint
AppConfig = require("./application_config")
app = cbServer.applicationManager.find(mountPoint)
return new AppConfig({
cbServer : cbServer
cbCtx : cbCtx
userCtx : userCtx
app : app
})
###*
Closes the browser.
@method close
@memberof Browser
@instance
@param {errorCallback} callback
###
@close = (callback) ->
# get appInstance by direct property reference. both bserver and appInstance could be remote object
appInstance = browser.appInstance
appInstance.removeBrowser(browser.id, userCtx, callback)
return
###*
Redirects all clients that are connected to the current
instance to the given URL.
@method redirect
@param {String} url
@memberof Browser
@instance
###
@redirect = (url) ->
browser.redirect(url)
return
###*
Gets the email ID that is stored in the session
@method getResetEmail
@param {emailCallback} callback
@memberof Browser
@instance
###
@getResetEmail = (callback) ->
sessionManager = cbServer.sessionManager
browser.getFirstSession((err, session) ->
return callback(err) if err
callback(null,
sessionManager.findPropOnSession(session, 'resetuser'))
)
return
###*
Gets the user that created the instance.
@method getCreator
@return {String}
@instance
@memberof Browser
###
@getCreator = () ->
return browser.creator?.getEmail()
###*
Registers a listener for an event on the browser instance.
@method addEventListener
@param {String} event
@param {errorCallback} callback
@instance
@memberof Browser
###
@addEventListener = (eventName, callback) ->
if typeof callback isnt "function" then return
validEvents = ["share", "rename", "connect", "disconnect"]
if typeof eventName isnt "string" or validEvents.indexOf(eventName) is -1
return
callbackRegistered = callback
if @isAssocWithCurrentUser() and eventName is 'share'
callbackRegistered = (userInfo) ->
newUserInfo = {}
newUserInfo.role = userInfo.role
newUserInfo.user = User.getEmail(userInfo.user)
callback(newUserInfo)
# this is really nasty, now the browser object is stale
cbCtx.addEventListener(browser, eventName, callbackRegistered)
return
###*
Checks if the current user has some permission
associated with this browser
@method isAssocWithCurrentUser
@return {Bool}
@instance
@memberof Browser
###
@isAssocWithCurrentUser = () ->
appConfig = @getAppConfig()
if not appConfig.isAuthConfigured() or
browser.isOwner(userCtx) or
browser.isReaderWriter(userCtx) or
browser.isReader(userCtx) or
appConfig.isOwner()
return true
else
return false
###*
Gets all users that have the permission only to read and
write to the instance.
@method getReaderWriters
@return {Array<User>}
@instance
@memberof Browser
###
@getReaderWriters = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaderWriters isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readwrite
return users
###*
Gets all users that have the permission only to read
@method getReaders
@return {Array<User>}
@instance
@memberof Browser
###
@getReaders = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaders isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readonly
return users
###*
Gets all users that are the owners of the instance
There is a separate method for this as it is faster to get only the
number of owners than to construct a list of them using
getOwners and then get that number.
@method getOwners
@return {Array<User>}
@instance
@memberof Browser
###
@getOwners = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getOwners isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.own
return users
###*
Checks if the user is a reader-writer of the instance.
@method isReaderWriter
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isReaderWriter = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReaderWriter isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReaderWriter(userCtx) then return true
else return false
###*
Checks if the user is a reader of the instance.
@method isReader
@param {String} emailID
@return {Bool}
@memberof Browser
@instance
###
@isReader = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReader isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReader(userCtx) then return true
else return false
###*
Checks if the user is an owner of the instance
@method isOwner
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isOwner = () ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isOwner isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isOwner(userCtx) then return true
else return false
# [user],[callback]
@getUserPrevilege = ()->
switch arguments.length
when 1
user = userCtx
callback = arguments[0]
when 2
user = arguments[0]
callback = arguments[1]
return callback(null, null) if typeof browser.getUserPrevilege isnt 'function'
browser.getUserPrevilege(user, callback)
return
###*
Adds a user as a readerwriter of the current browser
@method addReaderWriter
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReaderWriter = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReaderWriter isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReaderWriter"))
@grantPermissions('readwrite', new User(emailID), callback)
###*
Adds a user as an owner of the current browser
@method addOwner
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addOwner = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addOwner isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addOwner"))
@grantPermissions('own', new User(emailID), callback)
###*
Adds a user as a reader of the current browser
@method addReader
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReader = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReader isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReader"))
@grantPermissions('readonly', new User(emailID), callback)
###*
Grants the user a role/permission on the browser.
@method grantPermissions
@param {String} permission
@param {User} user
@param {errorCallback} callback
@instance
@memberof Browser
###
@grantPermissions = (permission, user, callback) ->
{mountPoint, id} = browser
permissionManager = cbServer.permissionManager
Async.waterfall([
(next)->
browser.getUserPrevilege(userCtx, next)
(result, next)->
if result isnt 'own'
next(cloudbrowserError("PERM_DENIED"))
else
permissionManager.addBrowserPermRec
user : user
mountPoint : mountPoint
browserID : id
permission : permission
callback : next
(updateReturn, next)->
browser.addUser({
user : user
permission : permission
}, next)
],(err)->
callback err
)
###*
Renames the instance.
@method rename
@param {String} newName
@fires Browser#rename
@instance
@memberof Browser
###
@rename = (newName) ->
if typeof newName isnt "string" then return
if browser.isOwner(userCtx)
browser.setName(newName)
browser.emit('rename', newName)
return
###*
Gets the application instance associated with the current browser
@method getAppInstanceConfig
@return {AppInstance}
@instance
@memberof Browser
###
@getAppInstanceConfig = () ->
appInstance = browser.getAppInstance()
if not appInstance then return
if @isAssocWithCurrentUser()
AppInstance = require('./app_instance')
return new AppInstance
cbCtx : cbCtx
userCtx : userCtx
appInstance : appInstance
cbServer : cbServer
@getAppInstanceId = ()->
return browser.appInstanceId
###*
Gets the local state with the current browser
@method getLocalState
@return {Object} Custom object provided by the application in the application state file
@instance
@memberof Browser
###
@getLocalState = (property) ->
if @isAssocWithCurrentUser()
return browser.getLocalState(property)
###*
Gets information about the users connected to the current browser
@method getConnectedClients
@return {Array<{{address: String, email: String}}>}
@instance
@memberof Browser
###
@getConnectedClients = () ->
if @isAssocWithCurrentUser()
return browser.getConnectedClients()
@getUsers = (callback)->
browser.getUsers((err, users)->
return callback(err) if err
result ={}
for k, v of users
if lodash.isArray(v)
result[k]= lodash.map(v, (u)->
return u.getEmail()
)
else
result[k] = v.getEmail()
callback null, result
)
# only make sence when it is the currentBrowser
@getLogger = ()->
return browser._logger
# hack : share object among angularJS instances
@createSharedObject = (obj)->
obj.$$hashKey="<KEY>
return obj
module.exports = Browser
| true | Async = require('async')
lodash = require('lodash')
Components = require('../server/components')
User = require('../server/user')
cloudbrowserError = require('../shared/cloudbrowser_error')
{areArgsValid} = require('./utils')
routes = require('../server/application_manager/routes')
uuid = 0
# Permission checks are included wherever possible and a note is made if
# missing. Details like name, id, url etc. are available to everybody.
###*
Event to indicate that the current browser has been shared with
another user
@event Browser#share
###
###*
Event to indicate that the current browser has been renamed
@event Browser#rename
@type {String}
###
###*
API for browsers (internal object).
@class Browser
@param {Object} options
@param {User} options.userCtx The current user.
@param {Cloudbrowser} options.cbCtx The cloudbrowser API object.
@param {BrowserServer} options.browser The browser.
@fires Browser#share
@fires Browser#rename
###
class Browser
constructor : (options) ->
{cbServer, browser, cbCtx, userCtx, appConfig, appInstanceConfig} = options
if not cbServer? or not appConfig? or not appInstanceConfig?
console.log "browser api missing elements"
err = new Error()
console.log err.stack
###*
Gets the ID of the instance.
@method getID
@return {String}
@instance
@memberOf Browser
###
@getID = () ->
return browser.id
@getWorkerID = () ->
return browser.workerId
###*
Gets the url of the instance.
@method getURL
@return {String}
@instance
@memberOf Browser
###
@getURL = () ->
browserUrl = routes.buildBrowserPath(browser.mountPoint, browser.appInstanceId, browser.id)
return "#{cbServer.config.getHttpAddr()}#{browserUrl}"
###*
Gets the date of creation of the instance.
@method getDateCreated
@return {Date}
@instance
@memberOf Browser
###
@getDateCreated = () ->
return browser.dateCreated
###*
Gets the name of the instance.
@method getName
@return {String}
@instance
@memberOf Browser
###
@getName = () ->
return browser.name
###*
Creates a new component. This is called only when the browser is a local object.
@method createComponent
@param {String} name The registered name of the component.
@param {DOMNode} target The DOM node in which the component will be embedded.
@param {Object} options Extra options to customize the component.
@return {DOMNode}
@instance
@memberof Browser
###
@createComponent = (name, target, options) ->
return if typeof name isnt "string" or not target or not target.__nodeID
domBrowser = browser.getBrowser()
domBrowser.createComponent(name, target, options)
return target
###*
Gets the Application API object.
@method getAppConfig
@return {AppConfig}
@memberof Browser
@instance
###
@getAppConfig = () ->
mountPoint = browser.mountPoint
AppConfig = require("./application_config")
app = cbServer.applicationManager.find(mountPoint)
return new AppConfig({
cbServer : cbServer
cbCtx : cbCtx
userCtx : userCtx
app : app
})
###*
Closes the browser.
@method close
@memberof Browser
@instance
@param {errorCallback} callback
###
@close = (callback) ->
# get appInstance by direct property reference. both bserver and appInstance could be remote object
appInstance = browser.appInstance
appInstance.removeBrowser(browser.id, userCtx, callback)
return
###*
Redirects all clients that are connected to the current
instance to the given URL.
@method redirect
@param {String} url
@memberof Browser
@instance
###
@redirect = (url) ->
browser.redirect(url)
return
###*
Gets the email ID that is stored in the session
@method getResetEmail
@param {emailCallback} callback
@memberof Browser
@instance
###
@getResetEmail = (callback) ->
sessionManager = cbServer.sessionManager
browser.getFirstSession((err, session) ->
return callback(err) if err
callback(null,
sessionManager.findPropOnSession(session, 'resetuser'))
)
return
###*
Gets the user that created the instance.
@method getCreator
@return {String}
@instance
@memberof Browser
###
@getCreator = () ->
return browser.creator?.getEmail()
###*
Registers a listener for an event on the browser instance.
@method addEventListener
@param {String} event
@param {errorCallback} callback
@instance
@memberof Browser
###
@addEventListener = (eventName, callback) ->
if typeof callback isnt "function" then return
validEvents = ["share", "rename", "connect", "disconnect"]
if typeof eventName isnt "string" or validEvents.indexOf(eventName) is -1
return
callbackRegistered = callback
if @isAssocWithCurrentUser() and eventName is 'share'
callbackRegistered = (userInfo) ->
newUserInfo = {}
newUserInfo.role = userInfo.role
newUserInfo.user = User.getEmail(userInfo.user)
callback(newUserInfo)
# this is really nasty, now the browser object is stale
cbCtx.addEventListener(browser, eventName, callbackRegistered)
return
###*
Checks if the current user has some permission
associated with this browser
@method isAssocWithCurrentUser
@return {Bool}
@instance
@memberof Browser
###
@isAssocWithCurrentUser = () ->
appConfig = @getAppConfig()
if not appConfig.isAuthConfigured() or
browser.isOwner(userCtx) or
browser.isReaderWriter(userCtx) or
browser.isReader(userCtx) or
appConfig.isOwner()
return true
else
return false
###*
Gets all users that have the permission only to read and
write to the instance.
@method getReaderWriters
@return {Array<User>}
@instance
@memberof Browser
###
@getReaderWriters = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaderWriters isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readwrite
return users
###*
Gets all users that have the permission only to read
@method getReaders
@return {Array<User>}
@instance
@memberof Browser
###
@getReaders = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getReaders isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.readonly
return users
###*
Gets all users that are the owners of the instance
There is a separate method for this as it is faster to get only the
number of owners than to construct a list of them using
getOwners and then get that number.
@method getOwners
@return {Array<User>}
@instance
@memberof Browser
###
@getOwners = () ->
# There will not be any users in case authentication has
# not been enabled
users = []
if typeof browser.getOwners isnt "function"
return users
if @isAssocWithCurrentUser()
users.push(rw.getEmail()) for rw in browser.own
return users
###*
Checks if the user is a reader-writer of the instance.
@method isReaderWriter
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isReaderWriter = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReaderWriter isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReaderWriter(userCtx) then return true
else return false
###*
Checks if the user is a reader of the instance.
@method isReader
@param {String} emailID
@return {Bool}
@memberof Browser
@instance
###
@isReader = (emailID) ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isReader isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isReader(userCtx) then return true
else return false
###*
Checks if the user is an owner of the instance
@method isOwner
@param {String} user
@return {Bool}
@instance
@memberof Browser
###
@isOwner = () ->
# There will not be any users in case authentication has
# not been enabled
return if typeof browser.isOwner isnt "function"
switch arguments.length
# Check for current user
when 0 then
# Check for given user
when 1
emailID = arguments[0]
if not areArgsValid [
{item : emailID, type : "string"}
] then return
userCtx = new User(emailID)
else return
if @isAssocWithCurrentUser()
if browser.isOwner(userCtx) then return true
else return false
# [user],[callback]
@getUserPrevilege = ()->
switch arguments.length
when 1
user = userCtx
callback = arguments[0]
when 2
user = arguments[0]
callback = arguments[1]
return callback(null, null) if typeof browser.getUserPrevilege isnt 'function'
browser.getUserPrevilege(user, callback)
return
###*
Adds a user as a readerwriter of the current browser
@method addReaderWriter
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReaderWriter = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReaderWriter isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReaderWriter"))
@grantPermissions('readwrite', new User(emailID), callback)
###*
Adds a user as an owner of the current browser
@method addOwner
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addOwner = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addOwner isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addOwner"))
@grantPermissions('own', new User(emailID), callback)
###*
Adds a user as a reader of the current browser
@method addReader
@param {String} emailID
@param {errorCallback} callback
@instance
@memberof Browser
###
@addReader = (emailID, callback) ->
return if not areArgsValid [
{item : emailID, type : "string", action : callback}
]
# There will not be any users in case authentication has
# not been enabled
if typeof browser.addReader isnt "function"
return callback?(cloudbrowserError('API_INVALID', "- addReader"))
@grantPermissions('readonly', new User(emailID), callback)
###*
Grants the user a role/permission on the browser.
@method grantPermissions
@param {String} permission
@param {User} user
@param {errorCallback} callback
@instance
@memberof Browser
###
@grantPermissions = (permission, user, callback) ->
{mountPoint, id} = browser
permissionManager = cbServer.permissionManager
Async.waterfall([
(next)->
browser.getUserPrevilege(userCtx, next)
(result, next)->
if result isnt 'own'
next(cloudbrowserError("PERM_DENIED"))
else
permissionManager.addBrowserPermRec
user : user
mountPoint : mountPoint
browserID : id
permission : permission
callback : next
(updateReturn, next)->
browser.addUser({
user : user
permission : permission
}, next)
],(err)->
callback err
)
###*
Renames the instance.
@method rename
@param {String} newName
@fires Browser#rename
@instance
@memberof Browser
###
@rename = (newName) ->
if typeof newName isnt "string" then return
if browser.isOwner(userCtx)
browser.setName(newName)
browser.emit('rename', newName)
return
###*
Gets the application instance associated with the current browser
@method getAppInstanceConfig
@return {AppInstance}
@instance
@memberof Browser
###
@getAppInstanceConfig = () ->
appInstance = browser.getAppInstance()
if not appInstance then return
if @isAssocWithCurrentUser()
AppInstance = require('./app_instance')
return new AppInstance
cbCtx : cbCtx
userCtx : userCtx
appInstance : appInstance
cbServer : cbServer
@getAppInstanceId = ()->
return browser.appInstanceId
###*
Gets the local state with the current browser
@method getLocalState
@return {Object} Custom object provided by the application in the application state file
@instance
@memberof Browser
###
@getLocalState = (property) ->
if @isAssocWithCurrentUser()
return browser.getLocalState(property)
###*
Gets information about the users connected to the current browser
@method getConnectedClients
@return {Array<{{address: String, email: String}}>}
@instance
@memberof Browser
###
@getConnectedClients = () ->
if @isAssocWithCurrentUser()
return browser.getConnectedClients()
@getUsers = (callback)->
browser.getUsers((err, users)->
return callback(err) if err
result ={}
for k, v of users
if lodash.isArray(v)
result[k]= lodash.map(v, (u)->
return u.getEmail()
)
else
result[k] = v.getEmail()
callback null, result
)
# only make sence when it is the currentBrowser
@getLogger = ()->
return browser._logger
# hack : share object among angularJS instances
@createSharedObject = (obj)->
obj.$$hashKey="PI:KEY:<KEY>END_PI
return obj
module.exports = Browser
|
[
{
"context": "Each ->\n co =>\n yield @room.user.say 'alice', '@hubot bikeshedding'\n yield new Promise",
"end": 653,
"score": 0.5896881818771362,
"start": 648,
"tag": "NAME",
"value": "alice"
}
] | test/bikeshed_test.coffee | hubot-scripts/hubot-bikeshed | 3 | chai = require 'chai'
sinon = require 'sinon'
chai.use require 'sinon-chai'
co = require('co')
Helper = require('hubot-test-helper')
helper = new Helper('../src/bikeshed.coffee')
expect = chai.expect
describe 'bikeshed', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
@room = helper.createRoom()
require('../src/bikeshed')(@robot)
afterEach ->
@room.destroy()
context 'registration', ->
it 'registers a hear listener', ->
expect(@robot.hear).to.have.been.calledWith(/What is bikeshedding\?/i)
context 'response', ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot bikeshedding'
yield new Promise((resolve, reject) ->
setTimeout(resolve, 1500);
)
it 'should contain a hexcode', ->
expect(@room.messages[1][1]).to.contain('I suggest we should use #')
| 117713 | chai = require 'chai'
sinon = require 'sinon'
chai.use require 'sinon-chai'
co = require('co')
Helper = require('hubot-test-helper')
helper = new Helper('../src/bikeshed.coffee')
expect = chai.expect
describe 'bikeshed', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
@room = helper.createRoom()
require('../src/bikeshed')(@robot)
afterEach ->
@room.destroy()
context 'registration', ->
it 'registers a hear listener', ->
expect(@robot.hear).to.have.been.calledWith(/What is bikeshedding\?/i)
context 'response', ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot bikeshedding'
yield new Promise((resolve, reject) ->
setTimeout(resolve, 1500);
)
it 'should contain a hexcode', ->
expect(@room.messages[1][1]).to.contain('I suggest we should use #')
| true | chai = require 'chai'
sinon = require 'sinon'
chai.use require 'sinon-chai'
co = require('co')
Helper = require('hubot-test-helper')
helper = new Helper('../src/bikeshed.coffee')
expect = chai.expect
describe 'bikeshed', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
@room = helper.createRoom()
require('../src/bikeshed')(@robot)
afterEach ->
@room.destroy()
context 'registration', ->
it 'registers a hear listener', ->
expect(@robot.hear).to.have.been.calledWith(/What is bikeshedding\?/i)
context 'response', ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot bikeshedding'
yield new Promise((resolve, reject) ->
setTimeout(resolve, 1500);
)
it 'should contain a hexcode', ->
expect(@room.messages[1][1]).to.contain('I suggest we should use #')
|
[
{
"context": "\n\n <input id=\"name\" type='text' placeholder='Your Name' value={nameVal}\n onKeyDown={onKeyDown} onInput",
"end": 421,
"score": 0.5178046822547913,
"start": 417,
"tag": "NAME",
"value": "Name"
}
] | client/Name.coffee | cyechow/cocreate | 181 | import React from 'react'
import {useTracker} from 'meteor/react-meteor-data'
import storage from './lib/storage'
export name = new storage.StringVariable 'name', ''
export Name = React.memo ->
nameVal = useTracker ->
name.get()
, []
onKeyDown = (e) ->
e.stopPropagation() # avoid width setting hotkey
onInput = (e) ->
name.set e.target.value
<input id="name" type='text' placeholder='Your Name' value={nameVal}
onKeyDown={onKeyDown} onInput={onInput}/>
Name.displayName = 'Name'
| 118479 | import React from 'react'
import {useTracker} from 'meteor/react-meteor-data'
import storage from './lib/storage'
export name = new storage.StringVariable 'name', ''
export Name = React.memo ->
nameVal = useTracker ->
name.get()
, []
onKeyDown = (e) ->
e.stopPropagation() # avoid width setting hotkey
onInput = (e) ->
name.set e.target.value
<input id="name" type='text' placeholder='Your <NAME>' value={nameVal}
onKeyDown={onKeyDown} onInput={onInput}/>
Name.displayName = 'Name'
| true | import React from 'react'
import {useTracker} from 'meteor/react-meteor-data'
import storage from './lib/storage'
export name = new storage.StringVariable 'name', ''
export Name = React.memo ->
nameVal = useTracker ->
name.get()
, []
onKeyDown = (e) ->
e.stopPropagation() # avoid width setting hotkey
onInput = (e) ->
name.set e.target.value
<input id="name" type='text' placeholder='Your PI:NAME:<NAME>END_PI' value={nameVal}
onKeyDown={onKeyDown} onInput={onInput}/>
Name.displayName = 'Name'
|
[
{
"context": "newData.pass, (hash) ->\n newData.pass = hash\n # append date stamp when record was c",
"end": 1428,
"score": 0.8927034139633179,
"start": 1424,
"tag": "PASSWORD",
"value": "hash"
},
{
"context": "counts.find { $and: [ {\n email: email\n pass: passHash\n } ] }, (e, o) ->\n callback if o then 'ok' el",
"end": 2969,
"score": 0.8273903727531433,
"start": 2961,
"tag": "PASSWORD",
"value": "passHash"
}
] | app/modules/account-manager.coffee | VirsixInc/newBrainrushLMS | 0 | crypto = require('crypto')
MongoDB = require('mongodb').Db
Server = require('mongodb').Server
moment = require('moment')
dbPort = 27017
dbHost = 'localhost'
dbName = 'node-login'
### establish the database connection ###
db = new MongoDB(dbName, new Server(dbHost, dbPort, auto_reconnect: true), w: 1)
db.open (e, d) ->
if e
console.log e
else
console.log 'connected to database :: ' + dbName
return
accounts = db.collection('accounts')
### login validation methods ###
exports.autoLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o
if o.pass == pass then callback(o) else callback(null)
else
callback null
return
return
exports.manualLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o == null
callback 'user-not-found'
else
validatePassword pass, o.pass, (err, res) ->
if res
callback null, o
else
callback 'invalid-password'
return
return
return
### record insertion, update & deletion methods ###
exports.addNewAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
if o
callback 'username-taken'
else
accounts.findOne { email: newData.email }, (e, o) ->
if o
callback 'email-taken'
else
saltAndHash newData.pass, (hash) ->
newData.pass = hash
# append date stamp when record was created //
newData.date = moment().format('MMMM Do YYYY, h:mm:ss a')
accounts.insert newData, { safe: true }, callback
return
return
return
return
exports.updateAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
o.name = newData.name
o.email = newData.email
o.country = newData.country
if newData.pass == ''
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
else
saltAndHash newData.pass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
return
return
return
exports.updatePassword = (email, newPass, callback) ->
accounts.findOne { email: email }, (e, o) ->
if e
callback e, null
else
saltAndHash newPass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, callback
return
return
return
### account lookup methods ###
exports.deleteAccount = (id, callback) ->
accounts.remove { _id: getObjectId(id) }, callback
return
exports.getAccountByEmail = (email, callback) ->
accounts.findOne { email: email }, (e, o) ->
callback o
return
return
exports.validateResetLink = (email, passHash, callback) ->
accounts.find { $and: [ {
email: email
pass: passHash
} ] }, (e, o) ->
callback if o then 'ok' else null
return
return
exports.getAllRecords = (callback) ->
accounts.find().toArray (e, res) ->
if e
callback e
else
callback null, res
return
return
exports.delAllRecords = (callback) ->
accounts.remove {}, callback
# reset accounts collection for testing //
return
### private encryption & validation methods ###
generateSalt = ->
set = '0123456789abcdefghijklmnopqurstuvwxyzABCDEFGHIJKLMNOPQURSTUVWXYZ'
salt = ''
i = 0
while i < 10
p = Math.floor(Math.random() * set.length)
salt += set[p]
i++
salt
md5 = (str) ->
crypto.createHash('md5').update(str).digest 'hex'
saltAndHash = (pass, callback) ->
salt = generateSalt()
callback salt + md5(pass + salt)
return
validatePassword = (plainPass, hashedPass, callback) ->
salt = hashedPass.substr(0, 10)
validHash = salt + md5(plainPass + salt)
callback null, hashedPass == validHash
return
### auxiliary methods ###
getObjectId = (id) ->
accounts.db.bson_serializer.ObjectID.createFromHexString id
findById = (id, callback) ->
accounts.findOne { _id: getObjectId(id) }, (e, res) ->
if e
callback e
else
callback null, res
return
return
findByMultipleFields = (a, callback) ->
# this takes an array of name/val pairs to search against {fieldName : 'value'} //
accounts.find($or: a).toArray (e, results) ->
if e
callback e
else
callback null, results
return
return
| 175460 | crypto = require('crypto')
MongoDB = require('mongodb').Db
Server = require('mongodb').Server
moment = require('moment')
dbPort = 27017
dbHost = 'localhost'
dbName = 'node-login'
### establish the database connection ###
db = new MongoDB(dbName, new Server(dbHost, dbPort, auto_reconnect: true), w: 1)
db.open (e, d) ->
if e
console.log e
else
console.log 'connected to database :: ' + dbName
return
accounts = db.collection('accounts')
### login validation methods ###
exports.autoLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o
if o.pass == pass then callback(o) else callback(null)
else
callback null
return
return
exports.manualLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o == null
callback 'user-not-found'
else
validatePassword pass, o.pass, (err, res) ->
if res
callback null, o
else
callback 'invalid-password'
return
return
return
### record insertion, update & deletion methods ###
exports.addNewAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
if o
callback 'username-taken'
else
accounts.findOne { email: newData.email }, (e, o) ->
if o
callback 'email-taken'
else
saltAndHash newData.pass, (hash) ->
newData.pass = <PASSWORD>
# append date stamp when record was created //
newData.date = moment().format('MMMM Do YYYY, h:mm:ss a')
accounts.insert newData, { safe: true }, callback
return
return
return
return
exports.updateAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
o.name = newData.name
o.email = newData.email
o.country = newData.country
if newData.pass == ''
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
else
saltAndHash newData.pass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
return
return
return
exports.updatePassword = (email, newPass, callback) ->
accounts.findOne { email: email }, (e, o) ->
if e
callback e, null
else
saltAndHash newPass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, callback
return
return
return
### account lookup methods ###
exports.deleteAccount = (id, callback) ->
accounts.remove { _id: getObjectId(id) }, callback
return
exports.getAccountByEmail = (email, callback) ->
accounts.findOne { email: email }, (e, o) ->
callback o
return
return
exports.validateResetLink = (email, passHash, callback) ->
accounts.find { $and: [ {
email: email
pass: <PASSWORD>
} ] }, (e, o) ->
callback if o then 'ok' else null
return
return
exports.getAllRecords = (callback) ->
accounts.find().toArray (e, res) ->
if e
callback e
else
callback null, res
return
return
exports.delAllRecords = (callback) ->
accounts.remove {}, callback
# reset accounts collection for testing //
return
### private encryption & validation methods ###
generateSalt = ->
set = '0123456789abcdefghijklmnopqurstuvwxyzABCDEFGHIJKLMNOPQURSTUVWXYZ'
salt = ''
i = 0
while i < 10
p = Math.floor(Math.random() * set.length)
salt += set[p]
i++
salt
md5 = (str) ->
crypto.createHash('md5').update(str).digest 'hex'
saltAndHash = (pass, callback) ->
salt = generateSalt()
callback salt + md5(pass + salt)
return
validatePassword = (plainPass, hashedPass, callback) ->
salt = hashedPass.substr(0, 10)
validHash = salt + md5(plainPass + salt)
callback null, hashedPass == validHash
return
### auxiliary methods ###
getObjectId = (id) ->
accounts.db.bson_serializer.ObjectID.createFromHexString id
findById = (id, callback) ->
accounts.findOne { _id: getObjectId(id) }, (e, res) ->
if e
callback e
else
callback null, res
return
return
findByMultipleFields = (a, callback) ->
# this takes an array of name/val pairs to search against {fieldName : 'value'} //
accounts.find($or: a).toArray (e, results) ->
if e
callback e
else
callback null, results
return
return
| true | crypto = require('crypto')
MongoDB = require('mongodb').Db
Server = require('mongodb').Server
moment = require('moment')
dbPort = 27017
dbHost = 'localhost'
dbName = 'node-login'
### establish the database connection ###
db = new MongoDB(dbName, new Server(dbHost, dbPort, auto_reconnect: true), w: 1)
db.open (e, d) ->
if e
console.log e
else
console.log 'connected to database :: ' + dbName
return
accounts = db.collection('accounts')
### login validation methods ###
exports.autoLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o
if o.pass == pass then callback(o) else callback(null)
else
callback null
return
return
exports.manualLogin = (user, pass, callback) ->
accounts.findOne { user: user }, (e, o) ->
if o == null
callback 'user-not-found'
else
validatePassword pass, o.pass, (err, res) ->
if res
callback null, o
else
callback 'invalid-password'
return
return
return
### record insertion, update & deletion methods ###
exports.addNewAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
if o
callback 'username-taken'
else
accounts.findOne { email: newData.email }, (e, o) ->
if o
callback 'email-taken'
else
saltAndHash newData.pass, (hash) ->
newData.pass = PI:PASSWORD:<PASSWORD>END_PI
# append date stamp when record was created //
newData.date = moment().format('MMMM Do YYYY, h:mm:ss a')
accounts.insert newData, { safe: true }, callback
return
return
return
return
exports.updateAccount = (newData, callback) ->
accounts.findOne { user: newData.user }, (e, o) ->
o.name = newData.name
o.email = newData.email
o.country = newData.country
if newData.pass == ''
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
else
saltAndHash newData.pass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, (err) ->
if err
callback err
else
callback null, o
return
return
return
return
exports.updatePassword = (email, newPass, callback) ->
accounts.findOne { email: email }, (e, o) ->
if e
callback e, null
else
saltAndHash newPass, (hash) ->
o.pass = hash
accounts.save o, { safe: true }, callback
return
return
return
### account lookup methods ###
exports.deleteAccount = (id, callback) ->
accounts.remove { _id: getObjectId(id) }, callback
return
exports.getAccountByEmail = (email, callback) ->
accounts.findOne { email: email }, (e, o) ->
callback o
return
return
exports.validateResetLink = (email, passHash, callback) ->
accounts.find { $and: [ {
email: email
pass: PI:PASSWORD:<PASSWORD>END_PI
} ] }, (e, o) ->
callback if o then 'ok' else null
return
return
exports.getAllRecords = (callback) ->
accounts.find().toArray (e, res) ->
if e
callback e
else
callback null, res
return
return
exports.delAllRecords = (callback) ->
accounts.remove {}, callback
# reset accounts collection for testing //
return
### private encryption & validation methods ###
generateSalt = ->
set = '0123456789abcdefghijklmnopqurstuvwxyzABCDEFGHIJKLMNOPQURSTUVWXYZ'
salt = ''
i = 0
while i < 10
p = Math.floor(Math.random() * set.length)
salt += set[p]
i++
salt
md5 = (str) ->
crypto.createHash('md5').update(str).digest 'hex'
saltAndHash = (pass, callback) ->
salt = generateSalt()
callback salt + md5(pass + salt)
return
validatePassword = (plainPass, hashedPass, callback) ->
salt = hashedPass.substr(0, 10)
validHash = salt + md5(plainPass + salt)
callback null, hashedPass == validHash
return
### auxiliary methods ###
getObjectId = (id) ->
accounts.db.bson_serializer.ObjectID.createFromHexString id
findById = (id, callback) ->
accounts.findOne { _id: getObjectId(id) }, (e, res) ->
if e
callback e
else
callback null, res
return
return
findByMultipleFields = (a, callback) ->
# this takes an array of name/val pairs to search against {fieldName : 'value'} //
accounts.find($or: a).toArray (e, results) ->
if e
callback e
else
callback null, results
return
return
|
[
{
"context": "###\n\t网易音乐下载解析 By Jixun\n\t尝试使用 Coffee Script 写\n###\n\nid: 'music.163',\nname:",
"end": 22,
"score": 0.8951253890991211,
"start": 17,
"tag": "NAME",
"value": "Jixun"
},
{
"context": "arCode !! 参数2 的 index 会玩坏返回值\n\t(dfsid) ->\n\t\tkey = [ 51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 104,",
"end": 4405,
"score": 0.9432037472724915,
"start": 4405,
"tag": "KEY",
"value": ""
},
{
"context": "rCode !! 参数2 的 index 会玩坏返回值\n\t(dfsid) ->\n\t\tkey = [ 51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 104, 48, 107, 40, 50, 41, 50 ]\n\t\tfids = strToKeyCod",
"end": 4453,
"score": 0.9587746262550354,
"start": 4406,
"tag": "KEY",
"value": "51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 1"
},
{
"context": " [ 51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 104, 48, 107, 40, 50, 41, 50 ]\n\t\tfids = strToKeyCode",
"end": 4454,
"score": 0.4494905173778534,
"start": 4453,
"tag": "IP_ADDRESS",
"value": "0"
},
{
"context": "[ 51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 104, 48, 107, 40, 50, 41, 50 ]\n\t\tfids = strToKeyCodes(",
"end": 4456,
"score": 0.7611217498779297,
"start": 4454,
"tag": "KEY",
"value": "4,"
}
] | src/host/music.163.coffee | ningfei/cuwcl4c | 0 | ###
网易音乐下载解析 By Jixun
尝试使用 Coffee Script 写
###
id: 'music.163',
name: '网易音乐下载解析',
host: 'music.163.com',
noSubHost: yes,
noFrame: no,
dl_icon: yes,
css: `<% ~com.163.music.dl.css %>`,
onStart: ->
@regPlayer()
# 优先使用 HTML5 播放器, 如果没有再考虑 Flash 支援
unsafeExec ->
fakePlatForm = navigator.platform + "--Fake-mac"
Object.defineProperty navigator, "platform",
get: -> fakePlatForm
set: -> null
window.GRestrictive = false;
_doRemoval: ->
H.waitUntil 'nm.x', =>
hook1 = @searchFunction unsafeWindow.nej.e, 'nej.e', '.dataset;if'
hook2 = @searchFunction unsafeWindow.nm.x, 'nm.x', '.copyrightId=='
# 因为 nm.x.jC 后加载, 能保证 nej.e.bI 存在
H.waitUntil 'nm.x.' + hook2, ->
unsafeExec (bIsFrame, hook1, hook2)->
_bK = nej.e[hook1]
nej.e[hook1] = (z, name) ->
return 1 if name is 'copyright' or name is 'resCopyright'
_bK.apply this, arguments
nm.x[hook2] =-> false
# 完全忘了下面的是啥
#if bIsFrame and nm.m.c.xB::zB
# nm.m.c.xB::zB =-> true
, H.isFrame, hook1, hook2
, 7000, 500
searchFunction: (base, name, key) ->
for baseName, fn of base
if (fn && typeof fn == 'function')
fnStr = String(fn)
if fnStr.indexOf(key) != -1
H.info('Search %s, found: %s.%s', key, name, baseName);
return baseName
H.info('Search %s, found nothing.', key);
return null;
# 接收文件数据
regPlayer: ->
document.addEventListener H.scriptName, (e) =>
songObj = e.detail
@linkDownload
.attr
href: H.uri(@getUri(JSON.parse songObj.song), "#{songObj.name} [#{songObj.artist}].mp3")
title: '下载: ' + songObj.name
hookPlayer: ->
H.waitUntil 'nm.m.f', =>
playerHooks = null
for baseName, clsFn of unsafeWindow.nm.m.f
protoName = @searchFunction clsFn::, "nm.m.f.#{baseName}", '<em>00:00</em>'
if protoName
playerHooks = [baseName, protoName]
break;
unsafeExec (scriptName, playerHooks) ->
_bakPlayerUpdateUI = nm.m.f[playerHooks[0]]::[playerHooks[1]]
nm.m.f[playerHooks[0]]::[playerHooks[1]] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlayerUpdateUI.apply this, arguments
return
, H.scriptName, playerHooks
return
return
hookPlayerFm: ->
H.waitUntil 'nm.m.fO', =>
hook = @searchFunction unsafeWindow.nm.m.fO::, 'nm.x', '.mp3Url,true'
@linkDownload = $ '<a>'
.prependTo '.opts.f-cb>.f-fr'
.addClass 'icon icon-next'
.html ' '
.css 'transform', 'rotate(90deg)'
unsafeExec (scriptName, hook) ->
_bakPlaySong = nm.m.fO::[hook];
nm.m.fO::[hook] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlaySong.apply this, arguments
return
, H.scriptName, hook
return
onBody: ->
@_doRemoval()
return if H.isFrame
# 单曲下载
@linkDownload = $('<a>')
.addClass(H.defaultDlIcon)
.appendTo($ '.m-playbar .oper')
.attr
title: '播放音乐, 即刻解析'
.click (e)->
e.stopPropagation()
return
# 播放列表下载
@linkDownloadAll = $('<a>')
.addClass(H.defaultDlIcon)
.addClass('addall')
.text('全部下载')
.attr
title: '下载列表里的所有歌曲'
.click (e) =>
# 编译出来的代码量好大!
e.stopPropagation()
do (trackQueue = localStorage['track-queue'],
aria2 = new Aria2.BATCH(H.aria2, -> H.info arguments),
) =>
for i, track of JSON.parse trackQueue
aria2.add Aria2.fn.addUri, [@getUri track], H.buildAriaParam
out: "#{i}. #{track.name} [#{track.artists.map((artist) -> artist.name).join '、'}].mp3"
aria2.send yes
return
return
if H.config.dUriType is 2
H.captureAria @linkDownload
else
@linkDownloadAll.addClass('jx_hide')
H.waitUntil () -> $('.listhdc > .addall').length,
() =>
@linkDownloadAll
.insertBefore $('.m-playbar .listhdc .addall')
.after $('<a>').addClass 'line jx_dl_line'
return
, yes, 500
if location.pathname == '/demo/fm' then @hookPlayerFm() else @hookPlayer()
dfsHash: ( () ->
strToKeyCodes = (str) -> Array::slice.call(String(str).split '').map (e) -> e.charCodeAt()
# 还原:
# arr.map(function (e) { return String.fromCharCode(e) }).join('');
# 不能直接传 String.fromCharCode !! 参数2 的 index 会玩坏返回值
(dfsid) ->
key = [ 51, 103, 111, 56, 38, 36, 56, 42, 51, 42, 51, 104, 48, 107, 40, 50, 41, 50 ]
fids = strToKeyCodes(dfsid).map (fid, i) -> (fid ^ key[i % key.length]) & 0xFF
CryptoJS
.MD5(CryptoJS.lib.ByteArray(fids))
.toString(CryptoJS.enc.Base64)
.replace(/\//g, "_")
.replace(/\+/g, "-")
)()
getUri: (song) ->
dsfId = (song.hMusic || song.mMusic || song.lMusic).dfsId;
# 服务器 1 ~ 4; 但是貌似 1 ~ 2 的最稳定
randServer = Math.floor(Math.random() * 2) + 1
return "http://m#{randServer}.music.126.net/#{@dfsHash(dsfId)}/#{dsfId}.mp3";
| 3101 | ###
网易音乐下载解析 By <NAME>
尝试使用 Coffee Script 写
###
id: 'music.163',
name: '网易音乐下载解析',
host: 'music.163.com',
noSubHost: yes,
noFrame: no,
dl_icon: yes,
css: `<% ~com.163.music.dl.css %>`,
onStart: ->
@regPlayer()
# 优先使用 HTML5 播放器, 如果没有再考虑 Flash 支援
unsafeExec ->
fakePlatForm = navigator.platform + "--Fake-mac"
Object.defineProperty navigator, "platform",
get: -> fakePlatForm
set: -> null
window.GRestrictive = false;
_doRemoval: ->
H.waitUntil 'nm.x', =>
hook1 = @searchFunction unsafeWindow.nej.e, 'nej.e', '.dataset;if'
hook2 = @searchFunction unsafeWindow.nm.x, 'nm.x', '.copyrightId=='
# 因为 nm.x.jC 后加载, 能保证 nej.e.bI 存在
H.waitUntil 'nm.x.' + hook2, ->
unsafeExec (bIsFrame, hook1, hook2)->
_bK = nej.e[hook1]
nej.e[hook1] = (z, name) ->
return 1 if name is 'copyright' or name is 'resCopyright'
_bK.apply this, arguments
nm.x[hook2] =-> false
# 完全忘了下面的是啥
#if bIsFrame and nm.m.c.xB::zB
# nm.m.c.xB::zB =-> true
, H.isFrame, hook1, hook2
, 7000, 500
searchFunction: (base, name, key) ->
for baseName, fn of base
if (fn && typeof fn == 'function')
fnStr = String(fn)
if fnStr.indexOf(key) != -1
H.info('Search %s, found: %s.%s', key, name, baseName);
return baseName
H.info('Search %s, found nothing.', key);
return null;
# 接收文件数据
regPlayer: ->
document.addEventListener H.scriptName, (e) =>
songObj = e.detail
@linkDownload
.attr
href: H.uri(@getUri(JSON.parse songObj.song), "#{songObj.name} [#{songObj.artist}].mp3")
title: '下载: ' + songObj.name
hookPlayer: ->
H.waitUntil 'nm.m.f', =>
playerHooks = null
for baseName, clsFn of unsafeWindow.nm.m.f
protoName = @searchFunction clsFn::, "nm.m.f.#{baseName}", '<em>00:00</em>'
if protoName
playerHooks = [baseName, protoName]
break;
unsafeExec (scriptName, playerHooks) ->
_bakPlayerUpdateUI = nm.m.f[playerHooks[0]]::[playerHooks[1]]
nm.m.f[playerHooks[0]]::[playerHooks[1]] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlayerUpdateUI.apply this, arguments
return
, H.scriptName, playerHooks
return
return
hookPlayerFm: ->
H.waitUntil 'nm.m.fO', =>
hook = @searchFunction unsafeWindow.nm.m.fO::, 'nm.x', '.mp3Url,true'
@linkDownload = $ '<a>'
.prependTo '.opts.f-cb>.f-fr'
.addClass 'icon icon-next'
.html ' '
.css 'transform', 'rotate(90deg)'
unsafeExec (scriptName, hook) ->
_bakPlaySong = nm.m.fO::[hook];
nm.m.fO::[hook] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlaySong.apply this, arguments
return
, H.scriptName, hook
return
onBody: ->
@_doRemoval()
return if H.isFrame
# 单曲下载
@linkDownload = $('<a>')
.addClass(H.defaultDlIcon)
.appendTo($ '.m-playbar .oper')
.attr
title: '播放音乐, 即刻解析'
.click (e)->
e.stopPropagation()
return
# 播放列表下载
@linkDownloadAll = $('<a>')
.addClass(H.defaultDlIcon)
.addClass('addall')
.text('全部下载')
.attr
title: '下载列表里的所有歌曲'
.click (e) =>
# 编译出来的代码量好大!
e.stopPropagation()
do (trackQueue = localStorage['track-queue'],
aria2 = new Aria2.BATCH(H.aria2, -> H.info arguments),
) =>
for i, track of JSON.parse trackQueue
aria2.add Aria2.fn.addUri, [@getUri track], H.buildAriaParam
out: "#{i}. #{track.name} [#{track.artists.map((artist) -> artist.name).join '、'}].mp3"
aria2.send yes
return
return
if H.config.dUriType is 2
H.captureAria @linkDownload
else
@linkDownloadAll.addClass('jx_hide')
H.waitUntil () -> $('.listhdc > .addall').length,
() =>
@linkDownloadAll
.insertBefore $('.m-playbar .listhdc .addall')
.after $('<a>').addClass 'line jx_dl_line'
return
, yes, 500
if location.pathname == '/demo/fm' then @hookPlayerFm() else @hookPlayer()
dfsHash: ( () ->
strToKeyCodes = (str) -> Array::slice.call(String(str).split '').map (e) -> e.charCodeAt()
# 还原:
# arr.map(function (e) { return String.fromCharCode(e) }).join('');
# 不能直接传 String.fromCharCode !! 参数2 的 index 会玩坏返回值
(dfsid) ->
key = [<KEY> <KEY>0<KEY> 48, 107, 40, 50, 41, 50 ]
fids = strToKeyCodes(dfsid).map (fid, i) -> (fid ^ key[i % key.length]) & 0xFF
CryptoJS
.MD5(CryptoJS.lib.ByteArray(fids))
.toString(CryptoJS.enc.Base64)
.replace(/\//g, "_")
.replace(/\+/g, "-")
)()
getUri: (song) ->
dsfId = (song.hMusic || song.mMusic || song.lMusic).dfsId;
# 服务器 1 ~ 4; 但是貌似 1 ~ 2 的最稳定
randServer = Math.floor(Math.random() * 2) + 1
return "http://m#{randServer}.music.126.net/#{@dfsHash(dsfId)}/#{dsfId}.mp3";
| true | ###
网易音乐下载解析 By PI:NAME:<NAME>END_PI
尝试使用 Coffee Script 写
###
id: 'music.163',
name: '网易音乐下载解析',
host: 'music.163.com',
noSubHost: yes,
noFrame: no,
dl_icon: yes,
css: `<% ~com.163.music.dl.css %>`,
onStart: ->
@regPlayer()
# 优先使用 HTML5 播放器, 如果没有再考虑 Flash 支援
unsafeExec ->
fakePlatForm = navigator.platform + "--Fake-mac"
Object.defineProperty navigator, "platform",
get: -> fakePlatForm
set: -> null
window.GRestrictive = false;
_doRemoval: ->
H.waitUntil 'nm.x', =>
hook1 = @searchFunction unsafeWindow.nej.e, 'nej.e', '.dataset;if'
hook2 = @searchFunction unsafeWindow.nm.x, 'nm.x', '.copyrightId=='
# 因为 nm.x.jC 后加载, 能保证 nej.e.bI 存在
H.waitUntil 'nm.x.' + hook2, ->
unsafeExec (bIsFrame, hook1, hook2)->
_bK = nej.e[hook1]
nej.e[hook1] = (z, name) ->
return 1 if name is 'copyright' or name is 'resCopyright'
_bK.apply this, arguments
nm.x[hook2] =-> false
# 完全忘了下面的是啥
#if bIsFrame and nm.m.c.xB::zB
# nm.m.c.xB::zB =-> true
, H.isFrame, hook1, hook2
, 7000, 500
searchFunction: (base, name, key) ->
for baseName, fn of base
if (fn && typeof fn == 'function')
fnStr = String(fn)
if fnStr.indexOf(key) != -1
H.info('Search %s, found: %s.%s', key, name, baseName);
return baseName
H.info('Search %s, found nothing.', key);
return null;
# 接收文件数据
regPlayer: ->
document.addEventListener H.scriptName, (e) =>
songObj = e.detail
@linkDownload
.attr
href: H.uri(@getUri(JSON.parse songObj.song), "#{songObj.name} [#{songObj.artist}].mp3")
title: '下载: ' + songObj.name
hookPlayer: ->
H.waitUntil 'nm.m.f', =>
playerHooks = null
for baseName, clsFn of unsafeWindow.nm.m.f
protoName = @searchFunction clsFn::, "nm.m.f.#{baseName}", '<em>00:00</em>'
if protoName
playerHooks = [baseName, protoName]
break;
unsafeExec (scriptName, playerHooks) ->
_bakPlayerUpdateUI = nm.m.f[playerHooks[0]]::[playerHooks[1]]
nm.m.f[playerHooks[0]]::[playerHooks[1]] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlayerUpdateUI.apply this, arguments
return
, H.scriptName, playerHooks
return
return
hookPlayerFm: ->
H.waitUntil 'nm.m.fO', =>
hook = @searchFunction unsafeWindow.nm.m.fO::, 'nm.x', '.mp3Url,true'
@linkDownload = $ '<a>'
.prependTo '.opts.f-cb>.f-fr'
.addClass 'icon icon-next'
.html ' '
.css 'transform', 'rotate(90deg)'
unsafeExec (scriptName, hook) ->
_bakPlaySong = nm.m.fO::[hook];
nm.m.fO::[hook] = (songObj) ->
eveSongObj =
artist: songObj.artists.map((artist) -> artist.name).join '、'
name: songObj.name
song: JSON.stringify songObj
document.dispatchEvent new CustomEvent(scriptName, detail: eveSongObj);
_bakPlaySong.apply this, arguments
return
, H.scriptName, hook
return
onBody: ->
@_doRemoval()
return if H.isFrame
# 单曲下载
@linkDownload = $('<a>')
.addClass(H.defaultDlIcon)
.appendTo($ '.m-playbar .oper')
.attr
title: '播放音乐, 即刻解析'
.click (e)->
e.stopPropagation()
return
# 播放列表下载
@linkDownloadAll = $('<a>')
.addClass(H.defaultDlIcon)
.addClass('addall')
.text('全部下载')
.attr
title: '下载列表里的所有歌曲'
.click (e) =>
# 编译出来的代码量好大!
e.stopPropagation()
do (trackQueue = localStorage['track-queue'],
aria2 = new Aria2.BATCH(H.aria2, -> H.info arguments),
) =>
for i, track of JSON.parse trackQueue
aria2.add Aria2.fn.addUri, [@getUri track], H.buildAriaParam
out: "#{i}. #{track.name} [#{track.artists.map((artist) -> artist.name).join '、'}].mp3"
aria2.send yes
return
return
if H.config.dUriType is 2
H.captureAria @linkDownload
else
@linkDownloadAll.addClass('jx_hide')
H.waitUntil () -> $('.listhdc > .addall').length,
() =>
@linkDownloadAll
.insertBefore $('.m-playbar .listhdc .addall')
.after $('<a>').addClass 'line jx_dl_line'
return
, yes, 500
if location.pathname == '/demo/fm' then @hookPlayerFm() else @hookPlayer()
dfsHash: ( () ->
strToKeyCodes = (str) -> Array::slice.call(String(str).split '').map (e) -> e.charCodeAt()
# 还原:
# arr.map(function (e) { return String.fromCharCode(e) }).join('');
# 不能直接传 String.fromCharCode !! 参数2 的 index 会玩坏返回值
(dfsid) ->
key = [PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI0PI:KEY:<KEY>END_PI 48, 107, 40, 50, 41, 50 ]
fids = strToKeyCodes(dfsid).map (fid, i) -> (fid ^ key[i % key.length]) & 0xFF
CryptoJS
.MD5(CryptoJS.lib.ByteArray(fids))
.toString(CryptoJS.enc.Base64)
.replace(/\//g, "_")
.replace(/\+/g, "-")
)()
getUri: (song) ->
dsfId = (song.hMusic || song.mMusic || song.lMusic).dfsId;
# 服务器 1 ~ 4; 但是貌似 1 ~ 2 的最稳定
randServer = Math.floor(Math.random() * 2) + 1
return "http://m#{randServer}.music.126.net/#{@dfsHash(dsfId)}/#{dsfId}.mp3";
|
[
{
"context": "{\n id: 1\n name: \"brian\"",
"end": 24,
"score": 0.9997234344482422,
"start": 19,
"tag": "NAME",
"value": "brian"
}
] | system-tests/projects/todos/tests/_fixtures/bad_coffee.coffee | justinforbes/cypress | 39,773 | {
id: 1
name: "brian" | 101896 | {
id: 1
name: "<NAME>" | true | {
id: 1
name: "PI:NAME:<NAME>END_PI" |
[
{
"context": " 'mailto': (test)->\n check test, 'mailto:mduerst@ifi.unizh.ch'\n\n 'news': (test)->\n check test, ",
"end": 1237,
"score": 0.9999290704727173,
"start": 1217,
"tag": "EMAIL",
"value": "mduerst@ifi.unizh.ch"
},
{
"context": "eck test, 'foo://example.com:8042/over/there?name=ferret#nose', true\n\n 'rfc 2396':\n\n 'ftp': (t",
"end": 1797,
"score": 0.8614172339439392,
"start": 1791,
"tag": "NAME",
"value": "ferret"
},
{
"context": " 'mailto': (test)->\n check test, 'mailto:mduerst@ifi.unizh.ch', true\n\n 'news': (test)->\n check ",
"end": 2234,
"score": 0.9999229907989502,
"start": 2214,
"tag": "EMAIL",
"value": "mduerst@ifi.unizh.ch"
}
] | test.old/test-URL.coffee | minodisk/muon | 1 | {muon} = require '../lib/muon.js'
{URL} = muon.net
url = require 'url'
check = (test, urlStr, parseQueryString = false)->
obj = url.parse(urlStr, parseQueryString)
if parseQueryString and obj.search is ''
delete obj.search
obj.origin = "#{obj.protocol}#{if obj.slashes then '//' else ''}#{if obj.auth? then obj.auth + '@' else ''}#{obj.host}"
test.deepEqual URL.parse(urlStr, parseQueryString), obj
test.done()
exports.url =
parse:
'parseQueryString=false':
'without slash': (test)->
check test, 'http://example.com'
'with slash': (test)->
check test, 'http://example.com/'
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash'
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=ferret#nose'
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt'
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles'
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html'
'mailto': (test)->
check test, 'mailto:mduerst@ifi.unizh.ch'
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix'
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/'
'parseQueryString=true':
'without slash': (test)->
check test, 'http://example.com', true
'with slash': (test)->
check test, 'http://example.com/', true
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash', true
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=ferret#nose', true
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt', true
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles', true
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html', true
'mailto': (test)->
check test, 'mailto:mduerst@ifi.unizh.ch', true
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix', true
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/', true
| 193801 | {muon} = require '../lib/muon.js'
{URL} = muon.net
url = require 'url'
check = (test, urlStr, parseQueryString = false)->
obj = url.parse(urlStr, parseQueryString)
if parseQueryString and obj.search is ''
delete obj.search
obj.origin = "#{obj.protocol}#{if obj.slashes then '//' else ''}#{if obj.auth? then obj.auth + '@' else ''}#{obj.host}"
test.deepEqual URL.parse(urlStr, parseQueryString), obj
test.done()
exports.url =
parse:
'parseQueryString=false':
'without slash': (test)->
check test, 'http://example.com'
'with slash': (test)->
check test, 'http://example.com/'
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash'
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=ferret#nose'
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt'
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles'
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html'
'mailto': (test)->
check test, 'mailto:<EMAIL>'
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix'
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/'
'parseQueryString=true':
'without slash': (test)->
check test, 'http://example.com', true
'with slash': (test)->
check test, 'http://example.com/', true
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash', true
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=<NAME>#nose', true
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt', true
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles', true
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html', true
'mailto': (test)->
check test, 'mailto:<EMAIL>', true
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix', true
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/', true
| true | {muon} = require '../lib/muon.js'
{URL} = muon.net
url = require 'url'
check = (test, urlStr, parseQueryString = false)->
obj = url.parse(urlStr, parseQueryString)
if parseQueryString and obj.search is ''
delete obj.search
obj.origin = "#{obj.protocol}#{if obj.slashes then '//' else ''}#{if obj.auth? then obj.auth + '@' else ''}#{obj.host}"
test.deepEqual URL.parse(urlStr, parseQueryString), obj
test.done()
exports.url =
parse:
'parseQueryString=false':
'without slash': (test)->
check test, 'http://example.com'
'with slash': (test)->
check test, 'http://example.com/'
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash'
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=ferret#nose'
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt'
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles'
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html'
'mailto': (test)->
check test, 'mailto:PI:EMAIL:<EMAIL>END_PI'
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix'
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/'
'parseQueryString=true':
'without slash': (test)->
check test, 'http://example.com', true
'with slash': (test)->
check test, 'http://example.com/', true
'node': (test)->
check test, 'http://user:pass@host.com:8080/p/a/t/h?query=string#hash', true
'rfc 3986': (test)->
check test, 'foo://example.com:8042/over/there?name=PI:NAME:<NAME>END_PI#nose', true
'rfc 2396':
'ftp': (test)->
check test, 'ftp://ftp.is.co.za/rfc/rfc1808.txt', true
'gopher': (test)->
check test, 'gopher://spinaltap.micro.umn.edu/00/Weather/California/Los%20Angeles', true
'http': (test)->
check test, 'http://www.math.uio.no/faq/compression-faq/part1.html', true
'mailto': (test)->
check test, 'mailto:PI:EMAIL:<EMAIL>END_PI', true
'news': (test)->
check test, 'news:comp.infosystems.www.servers.unix', true
'telnet': (test)->
check test, 'telnet://melvyl.ucop.edu/', true
|
[
{
"context": "ngs.findOne({space: Session.get(\"spaceId\"), key: \"contacts_no_force_phone_users\"})\n\t\t\t\t\t\treturn setting?.values || []\n\t\t\t\toptiona",
"end": 288,
"score": 0.9699528813362122,
"start": 259,
"tag": "KEY",
"value": "contacts_no_force_phone_users"
}
] | creator/packages/steedos-creator/client/views/contacts_settings_no_force_phone_modal.coffee | yicone/steedos-platform | 42 | Template.contacts_settings_no_force_phone_modal.helpers
users_schema: ()->
fields =
users:
autoform:
type: 'selectuser'
multiple: true
defaultValue: ()->
setting = db.space_settings.findOne({space: Session.get("spaceId"), key: "contacts_no_force_phone_users"})
return setting?.values || []
optional: false
type: [ String ]
label: ''
return new SimpleSchema(fields)
Template.contacts_settings_no_force_phone_modal.events
'click .btn-save': (event, template)->
values = AutoForm.getFieldValue("users","contacts_settings_no_force_phone_users") || []
Meteor.call("set_space_settings", Session.get("spaceId"), "contacts_no_force_phone_users", values, false, ()->
Modal.hide(template);
toastr.success(t("saved_successfully"))
) | 161612 | Template.contacts_settings_no_force_phone_modal.helpers
users_schema: ()->
fields =
users:
autoform:
type: 'selectuser'
multiple: true
defaultValue: ()->
setting = db.space_settings.findOne({space: Session.get("spaceId"), key: "<KEY>"})
return setting?.values || []
optional: false
type: [ String ]
label: ''
return new SimpleSchema(fields)
Template.contacts_settings_no_force_phone_modal.events
'click .btn-save': (event, template)->
values = AutoForm.getFieldValue("users","contacts_settings_no_force_phone_users") || []
Meteor.call("set_space_settings", Session.get("spaceId"), "contacts_no_force_phone_users", values, false, ()->
Modal.hide(template);
toastr.success(t("saved_successfully"))
) | true | Template.contacts_settings_no_force_phone_modal.helpers
users_schema: ()->
fields =
users:
autoform:
type: 'selectuser'
multiple: true
defaultValue: ()->
setting = db.space_settings.findOne({space: Session.get("spaceId"), key: "PI:KEY:<KEY>END_PI"})
return setting?.values || []
optional: false
type: [ String ]
label: ''
return new SimpleSchema(fields)
Template.contacts_settings_no_force_phone_modal.events
'click .btn-save': (event, template)->
values = AutoForm.getFieldValue("users","contacts_settings_no_force_phone_users") || []
Meteor.call("set_space_settings", Session.get("spaceId"), "contacts_no_force_phone_users", values, false, ()->
Modal.hide(template);
toastr.success(t("saved_successfully"))
) |
[
{
"context": "y 500, ->\n counter = 3\n for key, i in ['zero', 'one', 'two']\n for time in [0...i]\n req = http",
"end": 738,
"score": 0.508097231388092,
"start": 735,
"tag": "KEY",
"value": "one"
},
{
"context": "->\n counter = 3\n for key, i in ['zero', 'one', 'two']\n for time in [0...i]\n req = http.get 'h",
"end": 745,
"score": 0.5770825743675232,
"start": 742,
"tag": "KEY",
"value": "two"
}
] | test/test-ping.coffee | Artear/pixel-ping | 0 | {parse} = require 'url'
http = require 'http'
querystring = require 'querystring'
{spawn} = require 'child_process'
server = http.createServer (req, res) ->
params = parse req.url, true
if params.path is '/endpoint'
req.on 'data', (chunk) ->
data = querystring.parse chunk.toString()
hits = JSON.parse data.json
if hits.one is 1 and hits.two is 2
console.log 'Test Succeeded'
else
console.log 'Test Failed ', hits
ping.kill 'SIGINT'
process.exit 0
res.end()
server.listen 6999, 'localhost'
ping = spawn 'node', ['bin/pixel-ping', 'test/config.json']
delay = (time, func) -> setTimeout func, time
delay 500, ->
counter = 3
for key, i in ['zero', 'one', 'two']
for time in [0...i]
req = http.get 'http://localhost:5999/pixel.gif?key=' + key, (resp) ->
counter -= 1
console.log 'sent ' + counter
if counter is 0
console.log 'all requests came back, forcing flush..'
ping.kill 'SIGUSR2'
req.on 'error', (e) ->
console.log 'ERROR', e
| 65641 | {parse} = require 'url'
http = require 'http'
querystring = require 'querystring'
{spawn} = require 'child_process'
server = http.createServer (req, res) ->
params = parse req.url, true
if params.path is '/endpoint'
req.on 'data', (chunk) ->
data = querystring.parse chunk.toString()
hits = JSON.parse data.json
if hits.one is 1 and hits.two is 2
console.log 'Test Succeeded'
else
console.log 'Test Failed ', hits
ping.kill 'SIGINT'
process.exit 0
res.end()
server.listen 6999, 'localhost'
ping = spawn 'node', ['bin/pixel-ping', 'test/config.json']
delay = (time, func) -> setTimeout func, time
delay 500, ->
counter = 3
for key, i in ['zero', '<KEY>', '<KEY>']
for time in [0...i]
req = http.get 'http://localhost:5999/pixel.gif?key=' + key, (resp) ->
counter -= 1
console.log 'sent ' + counter
if counter is 0
console.log 'all requests came back, forcing flush..'
ping.kill 'SIGUSR2'
req.on 'error', (e) ->
console.log 'ERROR', e
| true | {parse} = require 'url'
http = require 'http'
querystring = require 'querystring'
{spawn} = require 'child_process'
server = http.createServer (req, res) ->
params = parse req.url, true
if params.path is '/endpoint'
req.on 'data', (chunk) ->
data = querystring.parse chunk.toString()
hits = JSON.parse data.json
if hits.one is 1 and hits.two is 2
console.log 'Test Succeeded'
else
console.log 'Test Failed ', hits
ping.kill 'SIGINT'
process.exit 0
res.end()
server.listen 6999, 'localhost'
ping = spawn 'node', ['bin/pixel-ping', 'test/config.json']
delay = (time, func) -> setTimeout func, time
delay 500, ->
counter = 3
for key, i in ['zero', 'PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI']
for time in [0...i]
req = http.get 'http://localhost:5999/pixel.gif?key=' + key, (resp) ->
counter -= 1
console.log 'sent ' + counter
if counter is 0
console.log 'all requests came back, forcing flush..'
ping.kill 'SIGUSR2'
req.on 'error', (e) ->
console.log 'ERROR', e
|
[
{
"context": "nd style from the model', ->\n model = { name: 'red' }\n @render 'div[style:color=name]', model\n ",
"end": 149,
"score": 0.6412132382392883,
"start": 146,
"tag": "NAME",
"value": "red"
}
] | test/integration/styles.spec.coffee | varvet/serenade.js | 5 | require './../spec_helper'
describe 'Styles', ->
beforeEach ->
@setupDom()
it 'get bound style from the model', ->
model = { name: 'red' }
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
it 'changes bound style as they are changed', ->
model = Serenade(name: "red")
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
model.name = 'blue'
expect(@body.querySelector('div').style.color).to.eql('blue')
| 94106 | require './../spec_helper'
describe 'Styles', ->
beforeEach ->
@setupDom()
it 'get bound style from the model', ->
model = { name: '<NAME>' }
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
it 'changes bound style as they are changed', ->
model = Serenade(name: "red")
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
model.name = 'blue'
expect(@body.querySelector('div').style.color).to.eql('blue')
| true | require './../spec_helper'
describe 'Styles', ->
beforeEach ->
@setupDom()
it 'get bound style from the model', ->
model = { name: 'PI:NAME:<NAME>END_PI' }
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
it 'changes bound style as they are changed', ->
model = Serenade(name: "red")
@render 'div[style:color=name]', model
expect(@body.querySelector('div').style.color).to.eql('red')
model.name = 'blue'
expect(@body.querySelector('div').style.color).to.eql('blue')
|
[
{
"context": "er',\n '$scope'\n ($scope) ->\n $scope.aa = \"gonzalo\"\n",
"end": 109,
"score": 0.6720887422561646,
"start": 105,
"tag": "NAME",
"value": "zalo"
}
] | app/modules/products/controllers/productsCtrl.coffee | gonzadocarmo/mataderos-distrib | 0 | angular.module('products').controller 'ProductsController',
'$scope'
($scope) ->
$scope.aa = "gonzalo"
| 24939 | angular.module('products').controller 'ProductsController',
'$scope'
($scope) ->
$scope.aa = "gon<NAME>"
| true | angular.module('products').controller 'ProductsController',
'$scope'
($scope) ->
$scope.aa = "gonPI:NAME:<NAME>END_PI"
|
[
{
"context": "# @author alteredq / http://alteredqualia.com/\n# @author aladjev.and",
"end": 18,
"score": 0.9853934645652771,
"start": 10,
"tag": "USERNAME",
"value": "alteredq"
},
{
"context": "hor alteredq / http://alteredqualia.com/\n# @author aladjev.andrew@gmail.com\n\nclass _Math\n\n # Clamp value to range <a, b>\n @",
"end": 81,
"score": 0.9999136328697205,
"start": 57,
"tag": "EMAIL",
"value": "aladjev.andrew@gmail.com"
}
] | source/javascripts/new_src/core/math.coffee | andrew-aladev/three.js | 0 | # @author alteredq / http://alteredqualia.com/
# @author aladjev.andrew@gmail.com
class _Math
# Clamp value to range <a, b>
@clamp: (x, a, b) ->
if (x < a) then a else (if (x > b) then b else x)
# Clamp value to range <a, inf)
@clampBottom: (x, a) ->
if x < a then a else x
# Linear mapping from range <a1, a2> to range <b1, b2>
@mapLinear: (x, a1, a2, b1, b2) ->
b1 + (x - a1) * (b2 - b1) / (a2 - a1)
# Random float from <0, 1> with 16 bits of randomness
# (standard Math.random() creates repetitive patterns when applied over larger space)
@random16: ->
(65280 * Math.random() + 255 * Math.random()) / 65535
# Random integer from <low, high> interval
@randInt: (low, high) ->
low + Math.floor(Math.random() * (high - low + 1))
# Random float from <low, high> interval
@randFloat: (low, high) ->
low + Math.random() * (high - low)
# Random float from <-range/2, range/2> interval
@randFloatSpread: (range) ->
range * (0.5 - Math.random())
@sign: (x) ->
if (x < 0) then -1 else (if (x > 0) then 1 else 0)
namespace "THREE", (exports) ->
exports.Math = _Math | 1114 | # @author alteredq / http://alteredqualia.com/
# @author <EMAIL>
class _Math
# Clamp value to range <a, b>
@clamp: (x, a, b) ->
if (x < a) then a else (if (x > b) then b else x)
# Clamp value to range <a, inf)
@clampBottom: (x, a) ->
if x < a then a else x
# Linear mapping from range <a1, a2> to range <b1, b2>
@mapLinear: (x, a1, a2, b1, b2) ->
b1 + (x - a1) * (b2 - b1) / (a2 - a1)
# Random float from <0, 1> with 16 bits of randomness
# (standard Math.random() creates repetitive patterns when applied over larger space)
@random16: ->
(65280 * Math.random() + 255 * Math.random()) / 65535
# Random integer from <low, high> interval
@randInt: (low, high) ->
low + Math.floor(Math.random() * (high - low + 1))
# Random float from <low, high> interval
@randFloat: (low, high) ->
low + Math.random() * (high - low)
# Random float from <-range/2, range/2> interval
@randFloatSpread: (range) ->
range * (0.5 - Math.random())
@sign: (x) ->
if (x < 0) then -1 else (if (x > 0) then 1 else 0)
namespace "THREE", (exports) ->
exports.Math = _Math | true | # @author alteredq / http://alteredqualia.com/
# @author PI:EMAIL:<EMAIL>END_PI
class _Math
# Clamp value to range <a, b>
@clamp: (x, a, b) ->
if (x < a) then a else (if (x > b) then b else x)
# Clamp value to range <a, inf)
@clampBottom: (x, a) ->
if x < a then a else x
# Linear mapping from range <a1, a2> to range <b1, b2>
@mapLinear: (x, a1, a2, b1, b2) ->
b1 + (x - a1) * (b2 - b1) / (a2 - a1)
# Random float from <0, 1> with 16 bits of randomness
# (standard Math.random() creates repetitive patterns when applied over larger space)
@random16: ->
(65280 * Math.random() + 255 * Math.random()) / 65535
# Random integer from <low, high> interval
@randInt: (low, high) ->
low + Math.floor(Math.random() * (high - low + 1))
# Random float from <low, high> interval
@randFloat: (low, high) ->
low + Math.random() * (high - low)
# Random float from <-range/2, range/2> interval
@randFloatSpread: (range) ->
range * (0.5 - Math.random())
@sign: (x) ->
if (x < 0) then -1 else (if (x > 0) then 1 else 0)
namespace "THREE", (exports) ->
exports.Math = _Math |
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9990046620368958,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-fs-sync-fd-leak.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# ensure that (read|write|append)FileSync() closes the file descriptor
ensureThrows = (cb) ->
got_exception = false
close_called = 0
try
cb()
catch e
assert.equal e.message, "BAM"
got_exception = true
assert.equal close_called, 1
assert.equal got_exception, true
return
common = require("../common")
assert = require("assert")
fs = require("fs")
fs.openSync = ->
42
fs.closeSync = (fd) ->
assert.equal fd, 42
close_called++
return
fs.readSync = ->
throw new Error("BAM")return
fs.writeSync = ->
throw new Error("BAM")return
fs.fstatSync = ->
throw new Error("BAM")return
ensureThrows ->
fs.readFileSync "dummy"
return
ensureThrows ->
fs.writeFileSync "dummy", "xxx"
return
ensureThrows ->
fs.appendFileSync "dummy", "xxx"
return
close_called = 0
| 23760 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# ensure that (read|write|append)FileSync() closes the file descriptor
ensureThrows = (cb) ->
got_exception = false
close_called = 0
try
cb()
catch e
assert.equal e.message, "BAM"
got_exception = true
assert.equal close_called, 1
assert.equal got_exception, true
return
common = require("../common")
assert = require("assert")
fs = require("fs")
fs.openSync = ->
42
fs.closeSync = (fd) ->
assert.equal fd, 42
close_called++
return
fs.readSync = ->
throw new Error("BAM")return
fs.writeSync = ->
throw new Error("BAM")return
fs.fstatSync = ->
throw new Error("BAM")return
ensureThrows ->
fs.readFileSync "dummy"
return
ensureThrows ->
fs.writeFileSync "dummy", "xxx"
return
ensureThrows ->
fs.appendFileSync "dummy", "xxx"
return
close_called = 0
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# ensure that (read|write|append)FileSync() closes the file descriptor
ensureThrows = (cb) ->
got_exception = false
close_called = 0
try
cb()
catch e
assert.equal e.message, "BAM"
got_exception = true
assert.equal close_called, 1
assert.equal got_exception, true
return
common = require("../common")
assert = require("assert")
fs = require("fs")
fs.openSync = ->
42
fs.closeSync = (fd) ->
assert.equal fd, 42
close_called++
return
fs.readSync = ->
throw new Error("BAM")return
fs.writeSync = ->
throw new Error("BAM")return
fs.fstatSync = ->
throw new Error("BAM")return
ensureThrows ->
fs.readFileSync "dummy"
return
ensureThrows ->
fs.writeFileSync "dummy", "xxx"
return
ensureThrows ->
fs.appendFileSync "dummy", "xxx"
return
close_called = 0
|
[
{
"context": "moduleInst = null\n\ntestListA = [\n\t{ id: 1, name: \"A\" },\n\t{ id: 2, name: \"B\" },\n\t{ id: 3, name: \"C\" },",
"end": 110,
"score": 0.8653497099876404,
"start": 109,
"tag": "NAME",
"value": "A"
},
{
"context": "ListA = [\n\t{ id: 1, name: \"A\" },\n\t{ id: 2, name: \"B\" },\n\t{ id: 3, name: \"C\" },\n\t{ id: 4, name: \"A\" }\n",
"end": 133,
"score": 0.8880358934402466,
"start": 132,
"tag": "NAME",
"value": "B"
},
{
"context": "e: \"A\" },\n\t{ id: 2, name: \"B\" },\n\t{ id: 3, name: \"C\" },\n\t{ id: 4, name: \"A\" }\n]\n\ntestListB = [\n\t{ id:",
"end": 156,
"score": 0.7283305525779724,
"start": 155,
"tag": "NAME",
"value": "C"
},
{
"context": "4, name: \"A\" }\n]\n\ntestListB = [\n\t{ id: 13, name: \"Foo\", age: 42 },\n\t{ id: 1337, name: \"Bar\", age: 666 }",
"end": 221,
"score": 0.9618421196937561,
"start": 218,
"tag": "NAME",
"value": "Foo"
},
{
"context": ": 13, name: \"Foo\", age: 42 },\n\t{ id: 1337, name: \"Bar\", age: 666 },\n\t{ id: 42, name: \"Fizz\", age: 23 },",
"end": 258,
"score": 0.8489867448806763,
"start": 255,
"tag": "NAME",
"value": "Bar"
},
{
"context": " 1337, name: \"Bar\", age: 666 },\n\t{ id: 42, name: \"Fizz\", age: 23 },\n\t{ id: 23, name: \"Bar\", age: 23 },\n\t",
"end": 295,
"score": 0.990486204624176,
"start": 291,
"tag": "NAME",
"value": "Fizz"
},
{
"context": "d: 42, name: \"Fizz\", age: 23 },\n\t{ id: 23, name: \"Bar\", age: 23 },\n\t{ id: 666, name: \"Buzz\", age: 13 }\n",
"end": 330,
"score": 0.8892889022827148,
"start": 327,
"tag": "NAME",
"value": "Bar"
},
{
"context": "d: 23, name: \"Bar\", age: 23 },\n\t{ id: 666, name: \"Buzz\", age: 13 }\n\t{ id: 7, name: \"Bar\", age: 23 },\n]\n\n",
"end": 367,
"score": 0.6851544380187988,
"start": 363,
"tag": "NAME",
"value": "Buzz"
},
{
"context": "id: 666, name: \"Buzz\", age: 13 }\n\t{ id: 7, name: \"Bar\", age: 23 },\n]\n\nclass Model\n\tconstructor: ( data ",
"end": 400,
"score": 0.8773117065429688,
"start": 397,
"tag": "NAME",
"value": "Bar"
},
{
"context": " ->\n\t\ttest(\n\t\t\tlargeList.sort( sorter[0] ),\n\t\t\t[ \"Abbott Trujillo\", \"Abigail Nunez\", \"Adams Holman\", \"Adela Hawkins",
"end": 2065,
"score": 0.9998319149017334,
"start": 2050,
"tag": "NAME",
"value": "Abbott Trujillo"
},
{
"context": "eList.sort( sorter[0] ),\n\t\t\t[ \"Abbott Trujillo\", \"Abigail Nunez\", \"Adams Holman\", \"Adela Hawkins\" ],\n\t\t\t\"name\"\n\t\t",
"end": 2082,
"score": 0.9998411536216736,
"start": 2069,
"tag": "NAME",
"value": "Abigail Nunez"
},
{
"context": "r[0] ),\n\t\t\t[ \"Abbott Trujillo\", \"Abigail Nunez\", \"Adams Holman\", \"Adela Hawkins\" ],\n\t\t\t\"name\"\n\t\t)\n\t\treturn\n\t\n\tit",
"end": 2098,
"score": 0.9997994303703308,
"start": 2086,
"tag": "NAME",
"value": "Adams Holman"
},
{
"context": "bott Trujillo\", \"Abigail Nunez\", \"Adams Holman\", \"Adela Hawkins\" ],\n\t\t\t\"name\"\n\t\t)\n\t\treturn\n\t\n\tit \"sort with fnGet",
"end": 2115,
"score": 0.999845027923584,
"start": 2102,
"tag": "NAME",
"value": "Adela Hawkins"
}
] | _src/test/main.coffee | mpneuried/sortcoll | 2 | should = require('should')
sortcoll = require( "../." )
_moduleInst = null
testListA = [
{ id: 1, name: "A" },
{ id: 2, name: "B" },
{ id: 3, name: "C" },
{ id: 4, name: "A" }
]
testListB = [
{ id: 13, name: "Foo", age: 42 },
{ id: 1337, name: "Bar", age: 666 },
{ id: 42, name: "Fizz", age: 23 },
{ id: 23, name: "Bar", age: 23 },
{ id: 666, name: "Buzz", age: 13 }
{ id: 7, name: "Bar", age: 23 },
]
class Model
constructor: ( data )->
@data = {}
for _k, _v of data
@data[ _k ] = _v
return
get: ( key )->
return @data[ key ]
largeList = require( "../testdata.json" )
testColl = []
for el in testListB
testColl.push new Model( el )
test = ( list, exp, key="id" )->
#console.log "RES", list, exp
for el, idx in list
if not exp[idx]?
break
if el.get?
el.get( key ).should.eql( exp[ idx ] )
else
el[ key ].should.eql( exp[ idx ] )
return
describe "----- sortcoll TESTS -----", ->
sorter = []
before ( done )->
fnGet = ( el, key )->
return el.get( key )
sorter.push sortcoll( [ "name", "id" ] )
sorter.push sortcoll( [ "name", "id" ], false )
sorter.push sortcoll( "id" )
sorter.push sortcoll( [ "name", "id" ], true, fnGet )
sorter.push sortcoll( [ "name", "id" ], { name: false, id: true } )
sorter.push sortcoll( [ "age", "name", "id" ], { name: false, id: true, age: false } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true }, fnGet )
done()
return
# Implement tests cases here
it "by two keys", ->
test(
testListA.sort( sorter[0] )
[ 1,4,2,3 ]
)
return
# Implement tests cases here
it "forward false", ->
test(
testListA.sort( sorter[1] )
[ 3,2,4,1 ]
)
return
it "simple key", ->
test(
testListA.sort( sorter[2] )
[ 1,2,3,4 ]
)
return
it "reuse sorter", ->
test(
testListB.sort( sorter[0] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "large dataset", ->
test(
largeList.sort( sorter[0] ),
[ "Abbott Trujillo", "Abigail Nunez", "Adams Holman", "Adela Hawkins" ],
"name"
)
return
it "sort with fnGet", ->
test(
testColl.sort( sorter[3] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "sort with mixed forwards", ->
test(
testListB.sort( sorter[4] ),
[ 13, 42, 666, 7, 23, 1337]
)
return
it "sort with mixed forwards and 3 sort cols", ->
test(
testListB.sort( sorter[5] ),
[ 1337, 13, 42, 7, 23, 666 ]
)
return
it "sort with mixed fallback forward and 3 sort cols", ->
test(
testListB.sort( sorter[6] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
it "sort with mixed fallback forward, 3 sort cols and the fnGet", ->
test(
testColl.sort( sorter[7] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
return
| 225160 | should = require('should')
sortcoll = require( "../." )
_moduleInst = null
testListA = [
{ id: 1, name: "<NAME>" },
{ id: 2, name: "<NAME>" },
{ id: 3, name: "<NAME>" },
{ id: 4, name: "A" }
]
testListB = [
{ id: 13, name: "<NAME>", age: 42 },
{ id: 1337, name: "<NAME>", age: 666 },
{ id: 42, name: "<NAME>", age: 23 },
{ id: 23, name: "<NAME>", age: 23 },
{ id: 666, name: "<NAME>", age: 13 }
{ id: 7, name: "<NAME>", age: 23 },
]
class Model
constructor: ( data )->
@data = {}
for _k, _v of data
@data[ _k ] = _v
return
get: ( key )->
return @data[ key ]
largeList = require( "../testdata.json" )
testColl = []
for el in testListB
testColl.push new Model( el )
test = ( list, exp, key="id" )->
#console.log "RES", list, exp
for el, idx in list
if not exp[idx]?
break
if el.get?
el.get( key ).should.eql( exp[ idx ] )
else
el[ key ].should.eql( exp[ idx ] )
return
describe "----- sortcoll TESTS -----", ->
sorter = []
before ( done )->
fnGet = ( el, key )->
return el.get( key )
sorter.push sortcoll( [ "name", "id" ] )
sorter.push sortcoll( [ "name", "id" ], false )
sorter.push sortcoll( "id" )
sorter.push sortcoll( [ "name", "id" ], true, fnGet )
sorter.push sortcoll( [ "name", "id" ], { name: false, id: true } )
sorter.push sortcoll( [ "age", "name", "id" ], { name: false, id: true, age: false } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true }, fnGet )
done()
return
# Implement tests cases here
it "by two keys", ->
test(
testListA.sort( sorter[0] )
[ 1,4,2,3 ]
)
return
# Implement tests cases here
it "forward false", ->
test(
testListA.sort( sorter[1] )
[ 3,2,4,1 ]
)
return
it "simple key", ->
test(
testListA.sort( sorter[2] )
[ 1,2,3,4 ]
)
return
it "reuse sorter", ->
test(
testListB.sort( sorter[0] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "large dataset", ->
test(
largeList.sort( sorter[0] ),
[ "<NAME>", "<NAME>", "<NAME>", "<NAME>" ],
"name"
)
return
it "sort with fnGet", ->
test(
testColl.sort( sorter[3] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "sort with mixed forwards", ->
test(
testListB.sort( sorter[4] ),
[ 13, 42, 666, 7, 23, 1337]
)
return
it "sort with mixed forwards and 3 sort cols", ->
test(
testListB.sort( sorter[5] ),
[ 1337, 13, 42, 7, 23, 666 ]
)
return
it "sort with mixed fallback forward and 3 sort cols", ->
test(
testListB.sort( sorter[6] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
it "sort with mixed fallback forward, 3 sort cols and the fnGet", ->
test(
testColl.sort( sorter[7] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
return
| true | should = require('should')
sortcoll = require( "../." )
_moduleInst = null
testListA = [
{ id: 1, name: "PI:NAME:<NAME>END_PI" },
{ id: 2, name: "PI:NAME:<NAME>END_PI" },
{ id: 3, name: "PI:NAME:<NAME>END_PI" },
{ id: 4, name: "A" }
]
testListB = [
{ id: 13, name: "PI:NAME:<NAME>END_PI", age: 42 },
{ id: 1337, name: "PI:NAME:<NAME>END_PI", age: 666 },
{ id: 42, name: "PI:NAME:<NAME>END_PI", age: 23 },
{ id: 23, name: "PI:NAME:<NAME>END_PI", age: 23 },
{ id: 666, name: "PI:NAME:<NAME>END_PI", age: 13 }
{ id: 7, name: "PI:NAME:<NAME>END_PI", age: 23 },
]
class Model
constructor: ( data )->
@data = {}
for _k, _v of data
@data[ _k ] = _v
return
get: ( key )->
return @data[ key ]
largeList = require( "../testdata.json" )
testColl = []
for el in testListB
testColl.push new Model( el )
test = ( list, exp, key="id" )->
#console.log "RES", list, exp
for el, idx in list
if not exp[idx]?
break
if el.get?
el.get( key ).should.eql( exp[ idx ] )
else
el[ key ].should.eql( exp[ idx ] )
return
describe "----- sortcoll TESTS -----", ->
sorter = []
before ( done )->
fnGet = ( el, key )->
return el.get( key )
sorter.push sortcoll( [ "name", "id" ] )
sorter.push sortcoll( [ "name", "id" ], false )
sorter.push sortcoll( "id" )
sorter.push sortcoll( [ "name", "id" ], true, fnGet )
sorter.push sortcoll( [ "name", "id" ], { name: false, id: true } )
sorter.push sortcoll( [ "age", "name", "id" ], { name: false, id: true, age: false } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true } )
sorter.push sortcoll( [ "age", "name", "id" ], { age: false, "?": true }, fnGet )
done()
return
# Implement tests cases here
it "by two keys", ->
test(
testListA.sort( sorter[0] )
[ 1,4,2,3 ]
)
return
# Implement tests cases here
it "forward false", ->
test(
testListA.sort( sorter[1] )
[ 3,2,4,1 ]
)
return
it "simple key", ->
test(
testListA.sort( sorter[2] )
[ 1,2,3,4 ]
)
return
it "reuse sorter", ->
test(
testListB.sort( sorter[0] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "large dataset", ->
test(
largeList.sort( sorter[0] ),
[ "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI" ],
"name"
)
return
it "sort with fnGet", ->
test(
testColl.sort( sorter[3] ),
[ 7, 23, 1337, 666, 42, 13 ]
)
return
it "sort with mixed forwards", ->
test(
testListB.sort( sorter[4] ),
[ 13, 42, 666, 7, 23, 1337]
)
return
it "sort with mixed forwards and 3 sort cols", ->
test(
testListB.sort( sorter[5] ),
[ 1337, 13, 42, 7, 23, 666 ]
)
return
it "sort with mixed fallback forward and 3 sort cols", ->
test(
testListB.sort( sorter[6] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
it "sort with mixed fallback forward, 3 sort cols and the fnGet", ->
test(
testColl.sort( sorter[7] ),
[ 1337, 13, 7, 23, 42, 666 ]
)
return
return
|
[
{
"context": "[\n\tname: 'Testing'\n\tapiKey: 'ENV:TESTING_API_KEY'\n\tscripts:\n\t\t'WebS",
"end": 17,
"score": 0.9076297283172607,
"start": 10,
"tag": "NAME",
"value": "Testing"
}
] | testing.cson | shubs/api-store | 8 | [
name: 'Testing'
apiKey: 'ENV:TESTING_API_KEY'
scripts:
'WebSearch Tests.js': 'testing/WebSearch Tests.js'
] | 73281 | [
name: '<NAME>'
apiKey: 'ENV:TESTING_API_KEY'
scripts:
'WebSearch Tests.js': 'testing/WebSearch Tests.js'
] | true | [
name: 'PI:NAME:<NAME>END_PI'
apiKey: 'ENV:TESTING_API_KEY'
scripts:
'WebSearch Tests.js': 'testing/WebSearch Tests.js'
] |
[
{
"context": "###\n\nMasonJS\nAuthor: Drew Dahlman\nVersion: 2.0.3\nLicense: MIT\n\nCopyright (c) 2015 D",
"end": 33,
"score": 0.9998735189437866,
"start": 21,
"tag": "NAME",
"value": "Drew Dahlman"
},
{
"context": "an\nVersion: 2.0.3\nLicense: MIT\n\nCopyright (c) 2015 Drew Dahlman\n\nPermission is hereby granted, free of charge, to",
"end": 94,
"score": 0.99986332654953,
"start": 82,
"tag": "NAME",
"value": "Drew Dahlman"
},
{
"context": "---------------------------------------\n\t\t\t#\n\t\t\t#\tMason\n\t\t\t#\tDo the logic to place and fill out the m",
"end": 7029,
"score": 0.8855285048484802,
"start": 7028,
"tag": "NAME",
"value": "M"
},
{
"context": "--------------------------------------\n\t\t\t#\n\t\t\t#\tMason\n\t\t\t#\tDo the logic to place and fill out the matri",
"end": 7033,
"score": 0.4569651484489441,
"start": 7029,
"tag": "NAME",
"value": "ason"
}
] | src/mason.coffee | JackEasons/Mason | 429 | ###
MasonJS
Author: Drew Dahlman
Version: 2.0.3
License: MIT
Copyright (c) 2015 Drew Dahlman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
(($) ->
$.fn.mason = (options, complete) ->
#------------------------------------------------------------------------------
#
# Default options
#
#------------------------------------------------------------------------------
defaults = {
itemSelector: ''
ratio: 0
sizes: []
columns: [
[0, 480, 1],
[480, 780, 2],
[780, 1080, 3],
[1080, 1320, 4],
[1320, 1680, 5]
]
promoted: []
filler: {
itemSelector: options.itemSelector
filler_class: 'mason_filler'
keepDataAndEvents: false
}
randomSizes: false
randomFillers: false
layout: 'none'
gutter: 0
debug: false
}
$self = null
#------------------------------------------------------------------------------
#
# Debug Elements
#
#------------------------------------------------------------------------------
debug_elements = {
container: $("<div id='debug'></div>")
block: "<div class='mason-debug' style='background-color: rgba(244, 67, 54, .5); float: left;'></div>"
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
mason_clear = "<div class='mason_clear' style='clear:both; position:relative;'></div>"
#------------------------------------------------------------------------------
#
# Complete callback
# if the callback exists set up as options
#
#------------------------------------------------------------------------------
if complete
callback = {
complete: complete
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
elements = {
block: {
height: 0
width: 0
}
matrix: []
startWidth: 0
}
#------------------------------------------------------------------------------
#
# MasonJS Core
#
#------------------------------------------------------------------------------
@each ->
settings = $.extend(defaults, options)
callbacks = $.extend(callback, complete)
# create reference to the jQuery object
$self = $(@)
#------------------------------------------------------------------------------
#
# Setup
# Do inital setup to get sizing
#
#------------------------------------------------------------------------------
setup = ->
# console.log $self.width() - getScrollbarWidth()
if settings.debug
console.log "SETUP"
#
# Check to see if a clear is in place yet or not
# This is used for measurement - VERY IMPORTANT
#
if $self.children(".mason_clear").length < 1
$self.append(mason_clear)
#
# Set the element block height
#
elements.block.height = Math.round(parseFloat(($self.width() / columnSize()) / settings.ratio)).toFixed(2)
#
# Set the element block width
#
elements.block.width = Math.round(parseFloat(($self.width() / columnSize()))).toFixed(2)
#
# Set Start Width
#
elements.startWidth = $self.width()
sizeElements()
#
# If the debug flag is on create an element and fill it to show the grid
#
if settings.debug
console.log "############## Running In Debug Mode ##############"
debug()
#------------------------------------------------------------------------------
#
# Size Elements
# Size and setup inital placement
#
#------------------------------------------------------------------------------
sizeElements = ->
#
# If there is only 1 column ( mobile ) size all elements
#
if columnSize() == 1
$block = $self.children("#{settings.itemSelector}")
$block.height(elements.block.height - (settings.gutter * 2))
$block.width(elements.block.width - (settings.gutter * 2))
$block.css
'margin': settings.gutter
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#
# More than 1 column do some math fool!
#
else
#
# Loop over each element, size, place and fill out the matrix
#
$self.children("#{settings.itemSelector}", ".#{settings.filler.filler_class}").each ->
$block = $(@)
#
# Check to see if block is promoted and if so promote it
#
p = 0
promoted = false
promoted_size = 0
while p < settings.promoted.length
if $block.hasClass(settings.promoted[p][0])
promoted = true
promoted_size = p
p++
if promoted
size = settings.promoted[promoted_size]
#
# Assign the size to the block element
#
$block.data('size', promoted_size)
$block.data('promoted', true)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[2])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[1])).toFixed(0)
w = w - (settings.gutter * 2)
else
#
# Pick random number between 0 and the length of sizes
#
ran = Math.floor(Math.random() * settings.sizes.length)
size = settings.sizes[ran]
#
# Assign the size to the block element
#
$block.data('size', ran)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[1])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[0])).toFixed(0)
w = w - (settings.gutter * 2)
$block.height(h + 'px')
$block.width(w + 'px')
$block.css
'margin': settings.gutter
mason()
#------------------------------------------------------------------------------
#
# Mason
# Do the logic to place and fill out the matrix
#
#------------------------------------------------------------------------------
mason = ->
#
# Set some default sizes and numbers
#
col = columnSize()
el_h = $self.height()
block_h = Math.round(el_h / elements.block.height)
elements.matrix = []
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
r = 0
while r < block_h
# Create the row
elements.matrix[r] = []
c = 0
while c < col
# Create the columns
elements.matrix[r][c] = false
c++
r++
#
# Populate the matrix
#
$self.children("#{settings.itemSelector}").each ->
$block = $(@)
#
# Calculate position based around dimensions
# t - top
# l - left
# s - data size
#
l = Math.round($block.position().left / elements.block.width)
t = Math.round($block.position().top / elements.block.height)
s = parseFloat($block.data('size'))
#
# Get the element dimentions
# h - Height
# w - Width
# a - Area
#
if $block.data('promoted')
h = settings.promoted[s][2]
w = settings.promoted[s][1]
a = h * w
else
h = settings.sizes[s][1]
w = settings.sizes[s][0]
a = h * w
#
# Loop through the elements area and based on the size
# populate the matrix.
#
# NOTE: Star with rows then move to columns
#
r = 0
while r < a
bh = 0
while bh < h
bw = 0
elements.matrix[t + bh][l] = true
while bw < w
elements.matrix[t + bh][l + bw] = true
bw++
bh++
r++
layBricks()
#------------------------------------------------------------------------------
#
# Lay Bricks
# This is where mason fills in those gaps.
# If a filler has not been supplied Mason will use the current elements
#
#------------------------------------------------------------------------------
layBricks = ->
#
# r - Row index
# filler_index - The index of the filler object
#
r = 0
filler_total = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").length
filler_index = -1
# Loop over each row
while r < elements.matrix.length
# Loop over row columns
c = 0
while c < elements.matrix[r].length
# If the area is false in the matrix that means it is empty
# so we need to fill it.
if !elements.matrix[r][c]
#
# Calculate the height and width of the block
#
h = elements.block.height
w = elements.block.width
#
# Get the correct placement
#
x = ( r * h ) + settings.gutter
y = ( c * w ) + settings.gutter
#
# Adjust the height and width for the grid
#
h = h - settings.gutter * 2
w = w - settings.gutter * 2
#
# Check to see if a filler has been specified or random fillers are on
#
if settings.randomFillers
filler_index = Math.floor(Math.random() * filler_total)
else
if filler_index < filler_total
filler_index++
if filler_index == filler_total
filler_index = 0
#
# Assign filler
#
$filler = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").eq(filler_index).clone(settings.filler.keepDataAndEvents)
$filler.addClass(settings.filler.filler_class)
#
# Position the filler
#
$filler.css
position: 'absolute'
top: x + 'px'
left: y + 'px'
height: h + 'px'
width: w + 'px'
margin: '0px'
#
# Append filler
#
$filler.appendTo($self)
c++
r++
#
# Check start width and if different remeasure
#
if $self.width() < elements.startWidth
$(window, $self).trigger('resize')
else
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#------------------------------------------------------------------------------
#
# Column Size
# Determine the column size and count based on screen sizes and settings
#
#------------------------------------------------------------------------------
columnSize = ->
w = parseFloat($self.width())
cols = 0
colsCount = settings.columns.length - 1
#
# Determine the number of columns based on options
#
if w >= settings.columns[colsCount[1]]
cols = settings.columns[colsCount[2]]
else
i = 0
while i <= colsCount
if w > settings.columns[i][0] && settings.columns[i][1]
cols = settings.columns[i][2]
i++
return Math.floor(cols)
#------------------------------------------------------------------------------
#
# DEBUG
# Debug can be run by adding the 'debug' flag to true. This will draw out
# the area that mason understands it needs to fill.
#
#------------------------------------------------------------------------------
debug = ->
#
# Set some default sizes and numbers
#
$debug = $self.parent()
col = columnSize()
el_h = $self.height()
block_h = el_h / elements.block.height
# Copy over styles from the master grid
debug_elements.container.css
position: 'absolute'
top: '0'
left: '0'
height: $self.height()
width: $self.width()
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
i = 0
while i < block_h
c = 0
while c < col
block = $(debug_elements.block)
# Size the blocks
block.css
height: elements.block.height - ( settings.gutter * 2 )
width: elements.block.width - ( settings.gutter * 2 )
margin: settings.gutter
debug_elements.container.append(block)
c++
i++
# Place clearfix
debug_elements.container.append(mason_clear)
# Place the container
$debug.prepend(debug_elements.container)
#------------------------------------------------------------------------------
#
# Resize
#
#------------------------------------------------------------------------------
if settings.layout == "fluid"
resize = null
$(window, $self).on 'resize', (event) =>
$(".#{settings.filler.filler_class}").remove()
elements.matrix = []
clearTimeout(resize)
resize = null
resize = setTimeout( =>
setup()
,0)
# setup()
#------------------------------------------------------------------------------
#
# Let 'er rip!
#
#------------------------------------------------------------------------------
setup()
return {
destroy: () ->
$(window, $self).off 'resize'
}
) jQuery
| 18052 | ###
MasonJS
Author: <NAME>
Version: 2.0.3
License: MIT
Copyright (c) 2015 <NAME>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
(($) ->
$.fn.mason = (options, complete) ->
#------------------------------------------------------------------------------
#
# Default options
#
#------------------------------------------------------------------------------
defaults = {
itemSelector: ''
ratio: 0
sizes: []
columns: [
[0, 480, 1],
[480, 780, 2],
[780, 1080, 3],
[1080, 1320, 4],
[1320, 1680, 5]
]
promoted: []
filler: {
itemSelector: options.itemSelector
filler_class: 'mason_filler'
keepDataAndEvents: false
}
randomSizes: false
randomFillers: false
layout: 'none'
gutter: 0
debug: false
}
$self = null
#------------------------------------------------------------------------------
#
# Debug Elements
#
#------------------------------------------------------------------------------
debug_elements = {
container: $("<div id='debug'></div>")
block: "<div class='mason-debug' style='background-color: rgba(244, 67, 54, .5); float: left;'></div>"
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
mason_clear = "<div class='mason_clear' style='clear:both; position:relative;'></div>"
#------------------------------------------------------------------------------
#
# Complete callback
# if the callback exists set up as options
#
#------------------------------------------------------------------------------
if complete
callback = {
complete: complete
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
elements = {
block: {
height: 0
width: 0
}
matrix: []
startWidth: 0
}
#------------------------------------------------------------------------------
#
# MasonJS Core
#
#------------------------------------------------------------------------------
@each ->
settings = $.extend(defaults, options)
callbacks = $.extend(callback, complete)
# create reference to the jQuery object
$self = $(@)
#------------------------------------------------------------------------------
#
# Setup
# Do inital setup to get sizing
#
#------------------------------------------------------------------------------
setup = ->
# console.log $self.width() - getScrollbarWidth()
if settings.debug
console.log "SETUP"
#
# Check to see if a clear is in place yet or not
# This is used for measurement - VERY IMPORTANT
#
if $self.children(".mason_clear").length < 1
$self.append(mason_clear)
#
# Set the element block height
#
elements.block.height = Math.round(parseFloat(($self.width() / columnSize()) / settings.ratio)).toFixed(2)
#
# Set the element block width
#
elements.block.width = Math.round(parseFloat(($self.width() / columnSize()))).toFixed(2)
#
# Set Start Width
#
elements.startWidth = $self.width()
sizeElements()
#
# If the debug flag is on create an element and fill it to show the grid
#
if settings.debug
console.log "############## Running In Debug Mode ##############"
debug()
#------------------------------------------------------------------------------
#
# Size Elements
# Size and setup inital placement
#
#------------------------------------------------------------------------------
sizeElements = ->
#
# If there is only 1 column ( mobile ) size all elements
#
if columnSize() == 1
$block = $self.children("#{settings.itemSelector}")
$block.height(elements.block.height - (settings.gutter * 2))
$block.width(elements.block.width - (settings.gutter * 2))
$block.css
'margin': settings.gutter
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#
# More than 1 column do some math fool!
#
else
#
# Loop over each element, size, place and fill out the matrix
#
$self.children("#{settings.itemSelector}", ".#{settings.filler.filler_class}").each ->
$block = $(@)
#
# Check to see if block is promoted and if so promote it
#
p = 0
promoted = false
promoted_size = 0
while p < settings.promoted.length
if $block.hasClass(settings.promoted[p][0])
promoted = true
promoted_size = p
p++
if promoted
size = settings.promoted[promoted_size]
#
# Assign the size to the block element
#
$block.data('size', promoted_size)
$block.data('promoted', true)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[2])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[1])).toFixed(0)
w = w - (settings.gutter * 2)
else
#
# Pick random number between 0 and the length of sizes
#
ran = Math.floor(Math.random() * settings.sizes.length)
size = settings.sizes[ran]
#
# Assign the size to the block element
#
$block.data('size', ran)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[1])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[0])).toFixed(0)
w = w - (settings.gutter * 2)
$block.height(h + 'px')
$block.width(w + 'px')
$block.css
'margin': settings.gutter
mason()
#------------------------------------------------------------------------------
#
# <NAME> <NAME>
# Do the logic to place and fill out the matrix
#
#------------------------------------------------------------------------------
mason = ->
#
# Set some default sizes and numbers
#
col = columnSize()
el_h = $self.height()
block_h = Math.round(el_h / elements.block.height)
elements.matrix = []
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
r = 0
while r < block_h
# Create the row
elements.matrix[r] = []
c = 0
while c < col
# Create the columns
elements.matrix[r][c] = false
c++
r++
#
# Populate the matrix
#
$self.children("#{settings.itemSelector}").each ->
$block = $(@)
#
# Calculate position based around dimensions
# t - top
# l - left
# s - data size
#
l = Math.round($block.position().left / elements.block.width)
t = Math.round($block.position().top / elements.block.height)
s = parseFloat($block.data('size'))
#
# Get the element dimentions
# h - Height
# w - Width
# a - Area
#
if $block.data('promoted')
h = settings.promoted[s][2]
w = settings.promoted[s][1]
a = h * w
else
h = settings.sizes[s][1]
w = settings.sizes[s][0]
a = h * w
#
# Loop through the elements area and based on the size
# populate the matrix.
#
# NOTE: Star with rows then move to columns
#
r = 0
while r < a
bh = 0
while bh < h
bw = 0
elements.matrix[t + bh][l] = true
while bw < w
elements.matrix[t + bh][l + bw] = true
bw++
bh++
r++
layBricks()
#------------------------------------------------------------------------------
#
# Lay Bricks
# This is where mason fills in those gaps.
# If a filler has not been supplied Mason will use the current elements
#
#------------------------------------------------------------------------------
layBricks = ->
#
# r - Row index
# filler_index - The index of the filler object
#
r = 0
filler_total = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").length
filler_index = -1
# Loop over each row
while r < elements.matrix.length
# Loop over row columns
c = 0
while c < elements.matrix[r].length
# If the area is false in the matrix that means it is empty
# so we need to fill it.
if !elements.matrix[r][c]
#
# Calculate the height and width of the block
#
h = elements.block.height
w = elements.block.width
#
# Get the correct placement
#
x = ( r * h ) + settings.gutter
y = ( c * w ) + settings.gutter
#
# Adjust the height and width for the grid
#
h = h - settings.gutter * 2
w = w - settings.gutter * 2
#
# Check to see if a filler has been specified or random fillers are on
#
if settings.randomFillers
filler_index = Math.floor(Math.random() * filler_total)
else
if filler_index < filler_total
filler_index++
if filler_index == filler_total
filler_index = 0
#
# Assign filler
#
$filler = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").eq(filler_index).clone(settings.filler.keepDataAndEvents)
$filler.addClass(settings.filler.filler_class)
#
# Position the filler
#
$filler.css
position: 'absolute'
top: x + 'px'
left: y + 'px'
height: h + 'px'
width: w + 'px'
margin: '0px'
#
# Append filler
#
$filler.appendTo($self)
c++
r++
#
# Check start width and if different remeasure
#
if $self.width() < elements.startWidth
$(window, $self).trigger('resize')
else
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#------------------------------------------------------------------------------
#
# Column Size
# Determine the column size and count based on screen sizes and settings
#
#------------------------------------------------------------------------------
columnSize = ->
w = parseFloat($self.width())
cols = 0
colsCount = settings.columns.length - 1
#
# Determine the number of columns based on options
#
if w >= settings.columns[colsCount[1]]
cols = settings.columns[colsCount[2]]
else
i = 0
while i <= colsCount
if w > settings.columns[i][0] && settings.columns[i][1]
cols = settings.columns[i][2]
i++
return Math.floor(cols)
#------------------------------------------------------------------------------
#
# DEBUG
# Debug can be run by adding the 'debug' flag to true. This will draw out
# the area that mason understands it needs to fill.
#
#------------------------------------------------------------------------------
debug = ->
#
# Set some default sizes and numbers
#
$debug = $self.parent()
col = columnSize()
el_h = $self.height()
block_h = el_h / elements.block.height
# Copy over styles from the master grid
debug_elements.container.css
position: 'absolute'
top: '0'
left: '0'
height: $self.height()
width: $self.width()
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
i = 0
while i < block_h
c = 0
while c < col
block = $(debug_elements.block)
# Size the blocks
block.css
height: elements.block.height - ( settings.gutter * 2 )
width: elements.block.width - ( settings.gutter * 2 )
margin: settings.gutter
debug_elements.container.append(block)
c++
i++
# Place clearfix
debug_elements.container.append(mason_clear)
# Place the container
$debug.prepend(debug_elements.container)
#------------------------------------------------------------------------------
#
# Resize
#
#------------------------------------------------------------------------------
if settings.layout == "fluid"
resize = null
$(window, $self).on 'resize', (event) =>
$(".#{settings.filler.filler_class}").remove()
elements.matrix = []
clearTimeout(resize)
resize = null
resize = setTimeout( =>
setup()
,0)
# setup()
#------------------------------------------------------------------------------
#
# Let 'er rip!
#
#------------------------------------------------------------------------------
setup()
return {
destroy: () ->
$(window, $self).off 'resize'
}
) jQuery
| true | ###
MasonJS
Author: PI:NAME:<NAME>END_PI
Version: 2.0.3
License: MIT
Copyright (c) 2015 PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
(($) ->
$.fn.mason = (options, complete) ->
#------------------------------------------------------------------------------
#
# Default options
#
#------------------------------------------------------------------------------
defaults = {
itemSelector: ''
ratio: 0
sizes: []
columns: [
[0, 480, 1],
[480, 780, 2],
[780, 1080, 3],
[1080, 1320, 4],
[1320, 1680, 5]
]
promoted: []
filler: {
itemSelector: options.itemSelector
filler_class: 'mason_filler'
keepDataAndEvents: false
}
randomSizes: false
randomFillers: false
layout: 'none'
gutter: 0
debug: false
}
$self = null
#------------------------------------------------------------------------------
#
# Debug Elements
#
#------------------------------------------------------------------------------
debug_elements = {
container: $("<div id='debug'></div>")
block: "<div class='mason-debug' style='background-color: rgba(244, 67, 54, .5); float: left;'></div>"
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
mason_clear = "<div class='mason_clear' style='clear:both; position:relative;'></div>"
#------------------------------------------------------------------------------
#
# Complete callback
# if the callback exists set up as options
#
#------------------------------------------------------------------------------
if complete
callback = {
complete: complete
}
#------------------------------------------------------------------------------
#
# Elements
#
#------------------------------------------------------------------------------
elements = {
block: {
height: 0
width: 0
}
matrix: []
startWidth: 0
}
#------------------------------------------------------------------------------
#
# MasonJS Core
#
#------------------------------------------------------------------------------
@each ->
settings = $.extend(defaults, options)
callbacks = $.extend(callback, complete)
# create reference to the jQuery object
$self = $(@)
#------------------------------------------------------------------------------
#
# Setup
# Do inital setup to get sizing
#
#------------------------------------------------------------------------------
setup = ->
# console.log $self.width() - getScrollbarWidth()
if settings.debug
console.log "SETUP"
#
# Check to see if a clear is in place yet or not
# This is used for measurement - VERY IMPORTANT
#
if $self.children(".mason_clear").length < 1
$self.append(mason_clear)
#
# Set the element block height
#
elements.block.height = Math.round(parseFloat(($self.width() / columnSize()) / settings.ratio)).toFixed(2)
#
# Set the element block width
#
elements.block.width = Math.round(parseFloat(($self.width() / columnSize()))).toFixed(2)
#
# Set Start Width
#
elements.startWidth = $self.width()
sizeElements()
#
# If the debug flag is on create an element and fill it to show the grid
#
if settings.debug
console.log "############## Running In Debug Mode ##############"
debug()
#------------------------------------------------------------------------------
#
# Size Elements
# Size and setup inital placement
#
#------------------------------------------------------------------------------
sizeElements = ->
#
# If there is only 1 column ( mobile ) size all elements
#
if columnSize() == 1
$block = $self.children("#{settings.itemSelector}")
$block.height(elements.block.height - (settings.gutter * 2))
$block.width(elements.block.width - (settings.gutter * 2))
$block.css
'margin': settings.gutter
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#
# More than 1 column do some math fool!
#
else
#
# Loop over each element, size, place and fill out the matrix
#
$self.children("#{settings.itemSelector}", ".#{settings.filler.filler_class}").each ->
$block = $(@)
#
# Check to see if block is promoted and if so promote it
#
p = 0
promoted = false
promoted_size = 0
while p < settings.promoted.length
if $block.hasClass(settings.promoted[p][0])
promoted = true
promoted_size = p
p++
if promoted
size = settings.promoted[promoted_size]
#
# Assign the size to the block element
#
$block.data('size', promoted_size)
$block.data('promoted', true)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[2])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[1])).toFixed(0)
w = w - (settings.gutter * 2)
else
#
# Pick random number between 0 and the length of sizes
#
ran = Math.floor(Math.random() * settings.sizes.length)
size = settings.sizes[ran]
#
# Assign the size to the block element
#
$block.data('size', ran)
#
# Calculate the height and width of the block
#
h = parseFloat((elements.block.height * size[1])).toFixed(0)
h = h - (settings.gutter * 2)
w = parseFloat((elements.block.width * size[0])).toFixed(0)
w = w - (settings.gutter * 2)
$block.height(h + 'px')
$block.width(w + 'px')
$block.css
'margin': settings.gutter
mason()
#------------------------------------------------------------------------------
#
# PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI
# Do the logic to place and fill out the matrix
#
#------------------------------------------------------------------------------
mason = ->
#
# Set some default sizes and numbers
#
col = columnSize()
el_h = $self.height()
block_h = Math.round(el_h / elements.block.height)
elements.matrix = []
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
r = 0
while r < block_h
# Create the row
elements.matrix[r] = []
c = 0
while c < col
# Create the columns
elements.matrix[r][c] = false
c++
r++
#
# Populate the matrix
#
$self.children("#{settings.itemSelector}").each ->
$block = $(@)
#
# Calculate position based around dimensions
# t - top
# l - left
# s - data size
#
l = Math.round($block.position().left / elements.block.width)
t = Math.round($block.position().top / elements.block.height)
s = parseFloat($block.data('size'))
#
# Get the element dimentions
# h - Height
# w - Width
# a - Area
#
if $block.data('promoted')
h = settings.promoted[s][2]
w = settings.promoted[s][1]
a = h * w
else
h = settings.sizes[s][1]
w = settings.sizes[s][0]
a = h * w
#
# Loop through the elements area and based on the size
# populate the matrix.
#
# NOTE: Star with rows then move to columns
#
r = 0
while r < a
bh = 0
while bh < h
bw = 0
elements.matrix[t + bh][l] = true
while bw < w
elements.matrix[t + bh][l + bw] = true
bw++
bh++
r++
layBricks()
#------------------------------------------------------------------------------
#
# Lay Bricks
# This is where mason fills in those gaps.
# If a filler has not been supplied Mason will use the current elements
#
#------------------------------------------------------------------------------
layBricks = ->
#
# r - Row index
# filler_index - The index of the filler object
#
r = 0
filler_total = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").length
filler_index = -1
# Loop over each row
while r < elements.matrix.length
# Loop over row columns
c = 0
while c < elements.matrix[r].length
# If the area is false in the matrix that means it is empty
# so we need to fill it.
if !elements.matrix[r][c]
#
# Calculate the height and width of the block
#
h = elements.block.height
w = elements.block.width
#
# Get the correct placement
#
x = ( r * h ) + settings.gutter
y = ( c * w ) + settings.gutter
#
# Adjust the height and width for the grid
#
h = h - settings.gutter * 2
w = w - settings.gutter * 2
#
# Check to see if a filler has been specified or random fillers are on
#
if settings.randomFillers
filler_index = Math.floor(Math.random() * filler_total)
else
if filler_index < filler_total
filler_index++
if filler_index == filler_total
filler_index = 0
#
# Assign filler
#
$filler = $("#{settings.filler.itemSelector}").not(".#{settings.filler.filler_class}").eq(filler_index).clone(settings.filler.keepDataAndEvents)
$filler.addClass(settings.filler.filler_class)
#
# Position the filler
#
$filler.css
position: 'absolute'
top: x + 'px'
left: y + 'px'
height: h + 'px'
width: w + 'px'
margin: '0px'
#
# Append filler
#
$filler.appendTo($self)
c++
r++
#
# Check start width and if different remeasure
#
if $self.width() < elements.startWidth
$(window, $self).trigger('resize')
else
#
# Complete Callback
#
if typeof callbacks.complete != "undefined"
callbacks.complete()
#------------------------------------------------------------------------------
#
# Column Size
# Determine the column size and count based on screen sizes and settings
#
#------------------------------------------------------------------------------
columnSize = ->
w = parseFloat($self.width())
cols = 0
colsCount = settings.columns.length - 1
#
# Determine the number of columns based on options
#
if w >= settings.columns[colsCount[1]]
cols = settings.columns[colsCount[2]]
else
i = 0
while i <= colsCount
if w > settings.columns[i][0] && settings.columns[i][1]
cols = settings.columns[i][2]
i++
return Math.floor(cols)
#------------------------------------------------------------------------------
#
# DEBUG
# Debug can be run by adding the 'debug' flag to true. This will draw out
# the area that mason understands it needs to fill.
#
#------------------------------------------------------------------------------
debug = ->
#
# Set some default sizes and numbers
#
$debug = $self.parent()
col = columnSize()
el_h = $self.height()
block_h = el_h / elements.block.height
# Copy over styles from the master grid
debug_elements.container.css
position: 'absolute'
top: '0'
left: '0'
height: $self.height()
width: $self.width()
#
# Loop over blocks and fill out the matrix with booleans
# Defaults to false first then we will do logic to set true
# based on the position of the blocks.
#
i = 0
while i < block_h
c = 0
while c < col
block = $(debug_elements.block)
# Size the blocks
block.css
height: elements.block.height - ( settings.gutter * 2 )
width: elements.block.width - ( settings.gutter * 2 )
margin: settings.gutter
debug_elements.container.append(block)
c++
i++
# Place clearfix
debug_elements.container.append(mason_clear)
# Place the container
$debug.prepend(debug_elements.container)
#------------------------------------------------------------------------------
#
# Resize
#
#------------------------------------------------------------------------------
if settings.layout == "fluid"
resize = null
$(window, $self).on 'resize', (event) =>
$(".#{settings.filler.filler_class}").remove()
elements.matrix = []
clearTimeout(resize)
resize = null
resize = setTimeout( =>
setup()
,0)
# setup()
#------------------------------------------------------------------------------
#
# Let 'er rip!
#
#------------------------------------------------------------------------------
setup()
return {
destroy: () ->
$(window, $self).off 'resize'
}
) jQuery
|
[
{
"context": "er.connect\n user: config.username\n password: config.password\n host: config.smtp.host\n ssl: config.smtp.s",
"end": 430,
"score": 0.9992981553077698,
"start": 415,
"tag": "PASSWORD",
"value": "config.password"
},
{
"context": "il}>\"\n to: \"#{config.name} <#{config.email}>, 'dan sell' <dan.s.sell@gmail.com>\"\n subject: \"Testing No",
"end": 669,
"score": 0.9974796772003174,
"start": 661,
"tag": "NAME",
"value": "dan sell"
},
{
"context": "o: \"#{config.name} <#{config.email}>, 'dan sell' <dan.s.sell@gmail.com>\"\n subject: \"Testing Node.js email capabilitie",
"end": 692,
"score": 0.9999310970306396,
"start": 672,
"tag": "EMAIL",
"value": "dan.s.sell@gmail.com"
}
] | src/email/send.coffee | Michaellino/Angular | 0 | # This script will send an image as an email attachment to the
# user himself. The receiving part of this is in read.coffee
# Install EmailJS with `npm install emailjs`
email = require "emailjs"
# You need a config file with your email settings
fs = require "fs"
config = JSON.parse fs.readFileSync "#{process.cwd()}/config.json", "utf-8"
server = email.server.connect
user: config.username
password: config.password
host: config.smtp.host
ssl: config.smtp.ssl
message = email.message.create
text: "This is test of the OpenRecess mail server"
from: "#{config.name} <#{config.email}>"
to: "#{config.name} <#{config.email}>, 'dan sell' <dan.s.sell@gmail.com>"
subject: "Testing Node.js email capabilities for OpenRecess"
# message.attach "reading.png", "image/png", "reading-image.png"
server.send message, (err, message) ->
return console.error err if err
console.log "Message sent with id #{message['header']['message-id']}" | 2209 | # This script will send an image as an email attachment to the
# user himself. The receiving part of this is in read.coffee
# Install EmailJS with `npm install emailjs`
email = require "emailjs"
# You need a config file with your email settings
fs = require "fs"
config = JSON.parse fs.readFileSync "#{process.cwd()}/config.json", "utf-8"
server = email.server.connect
user: config.username
password: <PASSWORD>
host: config.smtp.host
ssl: config.smtp.ssl
message = email.message.create
text: "This is test of the OpenRecess mail server"
from: "#{config.name} <#{config.email}>"
to: "#{config.name} <#{config.email}>, '<NAME>' <<EMAIL>>"
subject: "Testing Node.js email capabilities for OpenRecess"
# message.attach "reading.png", "image/png", "reading-image.png"
server.send message, (err, message) ->
return console.error err if err
console.log "Message sent with id #{message['header']['message-id']}" | true | # This script will send an image as an email attachment to the
# user himself. The receiving part of this is in read.coffee
# Install EmailJS with `npm install emailjs`
email = require "emailjs"
# You need a config file with your email settings
fs = require "fs"
config = JSON.parse fs.readFileSync "#{process.cwd()}/config.json", "utf-8"
server = email.server.connect
user: config.username
password: PI:PASSWORD:<PASSWORD>END_PI
host: config.smtp.host
ssl: config.smtp.ssl
message = email.message.create
text: "This is test of the OpenRecess mail server"
from: "#{config.name} <#{config.email}>"
to: "#{config.name} <#{config.email}>, 'PI:NAME:<NAME>END_PI' <PI:EMAIL:<EMAIL>END_PI>"
subject: "Testing Node.js email capabilities for OpenRecess"
# message.attach "reading.png", "image/png", "reading-image.png"
server.send message, (err, message) ->
return console.error err if err
console.log "Message sent with id #{message['header']['message-id']}" |
[
{
"context": "mon')\n\ntest 'sparse-test', [\n {\n Name: 'Bob'\n Location: 'Sweden'\n Age: 14\n }",
"end": 74,
"score": 0.9998664855957031,
"start": 71,
"tag": "NAME",
"value": "Bob"
},
{
"context": "weden'\n Age: 14\n }\n {\n Name: 'Alice'\n }\n]\n",
"end": 151,
"score": 0.9998681545257568,
"start": 146,
"tag": "NAME",
"value": "Alice"
}
] | test/sparse_test.coffee | SBeyeMHP/node-xlsx-writestream | 42 | test = require('./common')
test 'sparse-test', [
{
Name: 'Bob'
Location: 'Sweden'
Age: 14
}
{
Name: 'Alice'
}
]
| 31449 | test = require('./common')
test 'sparse-test', [
{
Name: '<NAME>'
Location: 'Sweden'
Age: 14
}
{
Name: '<NAME>'
}
]
| true | test = require('./common')
test 'sparse-test', [
{
Name: 'PI:NAME:<NAME>END_PI'
Location: 'Sweden'
Age: 14
}
{
Name: 'PI:NAME:<NAME>END_PI'
}
]
|
[
{
"context": "###\n# Copyright 2013, 2014, 2015 Simon Lydell\n# X11 (“MIT”) Licensed. (See LICENSE.)\n###\n\nautop",
"end": 45,
"score": 0.9998388290405273,
"start": 33,
"tag": "NAME",
"value": "Simon Lydell"
}
] | node_modules/autoprefixer-brunch/src/index.coffee | sheriffderek/basic-style | 0 | ###
# Copyright 2013, 2014, 2015 Simon Lydell
# X11 (“MIT”) Licensed. (See LICENSE.)
###
autoprefixer = require "autoprefixer-core"
module.exports = class Autoprefixer
brunchPlugin: yes
type: "stylesheet"
extension: "css"
defaultEnv: "*"
constructor: (@config)->
options = @config.plugins.autoprefixer ? {}
if "options" of options
console.warn "`config.plugins.autoprefixer.options` is deprecated. Put
the options directly in `config.plugins.autoprefixer` instead"
oldOptions = options.options
options = {browsers: options.browsers}
for own key, value of oldOptions
options[key] = value
@compiler = autoprefixer(options)
optimize: ({data, path, map}, callback)->
try
result = @compiler.process(data,
from: path, to: path
map:
prev: map.toJSON()
annotation: false
sourcesContent: false
)
catch error
return callback error
callback null, {data: result.css, map: result.map.toJSON()}
| 194830 | ###
# Copyright 2013, 2014, 2015 <NAME>
# X11 (“MIT”) Licensed. (See LICENSE.)
###
autoprefixer = require "autoprefixer-core"
module.exports = class Autoprefixer
brunchPlugin: yes
type: "stylesheet"
extension: "css"
defaultEnv: "*"
constructor: (@config)->
options = @config.plugins.autoprefixer ? {}
if "options" of options
console.warn "`config.plugins.autoprefixer.options` is deprecated. Put
the options directly in `config.plugins.autoprefixer` instead"
oldOptions = options.options
options = {browsers: options.browsers}
for own key, value of oldOptions
options[key] = value
@compiler = autoprefixer(options)
optimize: ({data, path, map}, callback)->
try
result = @compiler.process(data,
from: path, to: path
map:
prev: map.toJSON()
annotation: false
sourcesContent: false
)
catch error
return callback error
callback null, {data: result.css, map: result.map.toJSON()}
| true | ###
# Copyright 2013, 2014, 2015 PI:NAME:<NAME>END_PI
# X11 (“MIT”) Licensed. (See LICENSE.)
###
autoprefixer = require "autoprefixer-core"
module.exports = class Autoprefixer
brunchPlugin: yes
type: "stylesheet"
extension: "css"
defaultEnv: "*"
constructor: (@config)->
options = @config.plugins.autoprefixer ? {}
if "options" of options
console.warn "`config.plugins.autoprefixer.options` is deprecated. Put
the options directly in `config.plugins.autoprefixer` instead"
oldOptions = options.options
options = {browsers: options.browsers}
for own key, value of oldOptions
options[key] = value
@compiler = autoprefixer(options)
optimize: ({data, path, map}, callback)->
try
result = @compiler.process(data,
from: path, to: path
map:
prev: map.toJSON()
annotation: false
sourcesContent: false
)
catch error
return callback error
callback null, {data: result.css, map: result.map.toJSON()}
|
[
{
"context": " id = Accounts.createUser\n username: \"admin\"\n email: \"jho.xray@gmail.com\"\n pass",
"end": 628,
"score": 0.9472655057907104,
"start": 623,
"tag": "USERNAME",
"value": "admin"
},
{
"context": "ateUser\n username: \"admin\"\n email: \"jho.xray@gmail.com\"\n password: \"password\"\n securityPro",
"end": 664,
"score": 0.9999268054962158,
"start": 646,
"tag": "EMAIL",
"value": "jho.xray@gmail.com"
},
{
"context": " email: \"jho.xray@gmail.com\"\n password: \"password\"\n securityProfile:\n globalRole: \"",
"end": 693,
"score": 0.9992958307266235,
"start": 685,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "e: \"admin\"\n profile:\n firstName: \"Jay\"\n lastName: \"Ho\"\n tl.info(\"Admin us",
"end": 792,
"score": 0.9990750551223755,
"start": 789,
"tag": "NAME",
"value": "Jay"
},
{
"context": ":\n firstName: \"Jay\"\n lastName: \"Ho\"\n tl.info(\"Admin user created with id: \" + i",
"end": 817,
"score": 0.9860208630561829,
"start": 815,
"tag": "NAME",
"value": "Ho"
},
{
"context": " constructor: ->\n @name = 'Sunny'\n super\n \"\"\"\n ",
"end": 2354,
"score": 0.9991961717605591,
"start": 2349,
"tag": "NAME",
"value": "Sunny"
}
] | main.coffee | superstringsoftware/meteorology | 0 | tl = TLog.getLogger()
@Posts = new Meteor.Collection 'posts'
@allowAdmin = (uid)-> if Meteor.users.findOne(uid)?.securityProfile?.globalRole is "admin" then true else false
if Meteor.isServer
au = Meteor.users.find({"securityProfile.globalRole": "admin"}).count()
Posts.allow
insert: allowAdmin
update: allowAdmin
remove: allowAdmin
tl.info "Found " + au + " admin users"
# accounts setup for initial admin user
# removal rights on the logs
TLog.allowRemove allowAdmin
if au < 1
tl.warn("No admin users found, creating default...")
try
id = Accounts.createUser
username: "admin"
email: "jho.xray@gmail.com"
password: "password"
securityProfile:
globalRole: "admin"
profile:
firstName: "Jay"
lastName: "Ho"
tl.info("Admin user created with id: " + id)
catch err
tl.error("Admin account creation failed with error " + err.name + ", message: " + err.message + "<br/>\n" + err.stack)
# Publishing
Meteor.publish 'allPosts',->
tl.debug "Publishing All Posts for user #{@userId}"
Posts.find {}, {sort: {createdAt: -1}}
# publishing roles
Meteor.publish "userData", ->
tl.debug "Publishing user info for user #{@userId}"
Meteor.users.find {_id: @userId}, {fields: {securityProfile: 1}}
# if no posts exist, add some.
if Posts.find().count() is 0
posts = [
title: "Meteor Source Maps have arrived!"
tagline: "...and you will love it"
body: [
content: "You can now map to your CoffeScript source files from the browser."
type: "text"
,
content: "Aint't it cool?"
type: "text"
]
,
title: "Bootstrap 3 Goes Mobile First!"
tagline: "testing preformatted text"
body: [
content: "With Bootstrap 3, <p>mobile devices</p> will load <i>only</i> necessary Styles and Content."
type: "html"
,
content: """
Some preformatted markdown!
---------------------------
This is how we roll:
* one
* two
* three
"""
type: "markdown"
,
content: """
class Horse extends Animal
constructor: ->
@name = 'Sunny'
super
"""
type: "coffeescript"
]
]
for postData in posts
Posts.insert
title: postData.title
tagline: postData.tagline
body: postData.body
createdAt: new Date
if Meteor.isClient
Router.configure
layout: "layout"
notFoundTemplate: "notFound"
loadingTemplate: "loading"
Observatory.logCollection()
#Observatory.logTemplates()
CommonController.subscribe 'allPosts'
CommonController.subscribe 'userData'
Session.set 'codemirrorTypes', ['htmlmixed','markdown','javascript','coffeescript','css','xml']
Handlebars.registerHelper "getSession", (name)-> Session.get name
Handlebars.registerHelper "formatDate", (timestamp)-> timestamp?.toDateString()
| 77783 | tl = TLog.getLogger()
@Posts = new Meteor.Collection 'posts'
@allowAdmin = (uid)-> if Meteor.users.findOne(uid)?.securityProfile?.globalRole is "admin" then true else false
if Meteor.isServer
au = Meteor.users.find({"securityProfile.globalRole": "admin"}).count()
Posts.allow
insert: allowAdmin
update: allowAdmin
remove: allowAdmin
tl.info "Found " + au + " admin users"
# accounts setup for initial admin user
# removal rights on the logs
TLog.allowRemove allowAdmin
if au < 1
tl.warn("No admin users found, creating default...")
try
id = Accounts.createUser
username: "admin"
email: "<EMAIL>"
password: "<PASSWORD>"
securityProfile:
globalRole: "admin"
profile:
firstName: "<NAME>"
lastName: "<NAME>"
tl.info("Admin user created with id: " + id)
catch err
tl.error("Admin account creation failed with error " + err.name + ", message: " + err.message + "<br/>\n" + err.stack)
# Publishing
Meteor.publish 'allPosts',->
tl.debug "Publishing All Posts for user #{@userId}"
Posts.find {}, {sort: {createdAt: -1}}
# publishing roles
Meteor.publish "userData", ->
tl.debug "Publishing user info for user #{@userId}"
Meteor.users.find {_id: @userId}, {fields: {securityProfile: 1}}
# if no posts exist, add some.
if Posts.find().count() is 0
posts = [
title: "Meteor Source Maps have arrived!"
tagline: "...and you will love it"
body: [
content: "You can now map to your CoffeScript source files from the browser."
type: "text"
,
content: "Aint't it cool?"
type: "text"
]
,
title: "Bootstrap 3 Goes Mobile First!"
tagline: "testing preformatted text"
body: [
content: "With Bootstrap 3, <p>mobile devices</p> will load <i>only</i> necessary Styles and Content."
type: "html"
,
content: """
Some preformatted markdown!
---------------------------
This is how we roll:
* one
* two
* three
"""
type: "markdown"
,
content: """
class Horse extends Animal
constructor: ->
@name = '<NAME>'
super
"""
type: "coffeescript"
]
]
for postData in posts
Posts.insert
title: postData.title
tagline: postData.tagline
body: postData.body
createdAt: new Date
if Meteor.isClient
Router.configure
layout: "layout"
notFoundTemplate: "notFound"
loadingTemplate: "loading"
Observatory.logCollection()
#Observatory.logTemplates()
CommonController.subscribe 'allPosts'
CommonController.subscribe 'userData'
Session.set 'codemirrorTypes', ['htmlmixed','markdown','javascript','coffeescript','css','xml']
Handlebars.registerHelper "getSession", (name)-> Session.get name
Handlebars.registerHelper "formatDate", (timestamp)-> timestamp?.toDateString()
| true | tl = TLog.getLogger()
@Posts = new Meteor.Collection 'posts'
@allowAdmin = (uid)-> if Meteor.users.findOne(uid)?.securityProfile?.globalRole is "admin" then true else false
if Meteor.isServer
au = Meteor.users.find({"securityProfile.globalRole": "admin"}).count()
Posts.allow
insert: allowAdmin
update: allowAdmin
remove: allowAdmin
tl.info "Found " + au + " admin users"
# accounts setup for initial admin user
# removal rights on the logs
TLog.allowRemove allowAdmin
if au < 1
tl.warn("No admin users found, creating default...")
try
id = Accounts.createUser
username: "admin"
email: "PI:EMAIL:<EMAIL>END_PI"
password: "PI:PASSWORD:<PASSWORD>END_PI"
securityProfile:
globalRole: "admin"
profile:
firstName: "PI:NAME:<NAME>END_PI"
lastName: "PI:NAME:<NAME>END_PI"
tl.info("Admin user created with id: " + id)
catch err
tl.error("Admin account creation failed with error " + err.name + ", message: " + err.message + "<br/>\n" + err.stack)
# Publishing
Meteor.publish 'allPosts',->
tl.debug "Publishing All Posts for user #{@userId}"
Posts.find {}, {sort: {createdAt: -1}}
# publishing roles
Meteor.publish "userData", ->
tl.debug "Publishing user info for user #{@userId}"
Meteor.users.find {_id: @userId}, {fields: {securityProfile: 1}}
# if no posts exist, add some.
if Posts.find().count() is 0
posts = [
title: "Meteor Source Maps have arrived!"
tagline: "...and you will love it"
body: [
content: "You can now map to your CoffeScript source files from the browser."
type: "text"
,
content: "Aint't it cool?"
type: "text"
]
,
title: "Bootstrap 3 Goes Mobile First!"
tagline: "testing preformatted text"
body: [
content: "With Bootstrap 3, <p>mobile devices</p> will load <i>only</i> necessary Styles and Content."
type: "html"
,
content: """
Some preformatted markdown!
---------------------------
This is how we roll:
* one
* two
* three
"""
type: "markdown"
,
content: """
class Horse extends Animal
constructor: ->
@name = 'PI:NAME:<NAME>END_PI'
super
"""
type: "coffeescript"
]
]
for postData in posts
Posts.insert
title: postData.title
tagline: postData.tagline
body: postData.body
createdAt: new Date
if Meteor.isClient
Router.configure
layout: "layout"
notFoundTemplate: "notFound"
loadingTemplate: "loading"
Observatory.logCollection()
#Observatory.logTemplates()
CommonController.subscribe 'allPosts'
CommonController.subscribe 'userData'
Session.set 'codemirrorTypes', ['htmlmixed','markdown','javascript','coffeescript','css','xml']
Handlebars.registerHelper "getSession", (name)-> Session.get name
Handlebars.registerHelper "formatDate", (timestamp)-> timestamp?.toDateString()
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9994263052940369,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-domain-multi.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Tests of multiple domains happening at once.
common = require("../common")
assert = require("assert")
domain = require("domain")
events = require("events")
caughtA = false
caughtB = false
caughtC = false
a = domain.create()
a.enter() # this will be our "root" domain
a.on "error", (er) ->
caughtA = true
console.log "This should not happen"
throw erreturn
http = require("http")
# child domain of a.
# treat these EE objects as if they are a part of the b domain
# so, an 'error' event on them propagates to the domain, rather
# than being thrown.
# res.writeHead(500), res.destroy, etc.
# XXX this bind should not be necessary.
# the write cb behavior in http/net should use an
# event so that it picks up the domain handling.
server = http.createServer((req, res) ->
b = domain.create()
a.add b
b.add req
b.add res
b.on "error", (er) ->
caughtB = true
console.error "Error encountered", er
if res
res.writeHead 500
res.end "An error occurred"
server.close()
return
res.write "HELLO\n", b.bind(->
throw new Error("this kills domain B, not A")return
)
return
).listen(common.PORT)
c = domain.create()
req = http.get(
host: "localhost"
port: common.PORT
)
# add the request to the C domain
c.add req
req.on "response", (res) ->
console.error "got response"
# add the response object to the C domain
c.add res
res.pipe process.stdout
return
c.on "error", (er) ->
caughtC = true
console.error "Error on c", er.message
return
process.on "exit", ->
assert.equal caughtA, false
assert.equal caughtB, true
assert.equal caughtC, true
console.log "ok - Errors went where they were supposed to go"
return
| 7697 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Tests of multiple domains happening at once.
common = require("../common")
assert = require("assert")
domain = require("domain")
events = require("events")
caughtA = false
caughtB = false
caughtC = false
a = domain.create()
a.enter() # this will be our "root" domain
a.on "error", (er) ->
caughtA = true
console.log "This should not happen"
throw erreturn
http = require("http")
# child domain of a.
# treat these EE objects as if they are a part of the b domain
# so, an 'error' event on them propagates to the domain, rather
# than being thrown.
# res.writeHead(500), res.destroy, etc.
# XXX this bind should not be necessary.
# the write cb behavior in http/net should use an
# event so that it picks up the domain handling.
server = http.createServer((req, res) ->
b = domain.create()
a.add b
b.add req
b.add res
b.on "error", (er) ->
caughtB = true
console.error "Error encountered", er
if res
res.writeHead 500
res.end "An error occurred"
server.close()
return
res.write "HELLO\n", b.bind(->
throw new Error("this kills domain B, not A")return
)
return
).listen(common.PORT)
c = domain.create()
req = http.get(
host: "localhost"
port: common.PORT
)
# add the request to the C domain
c.add req
req.on "response", (res) ->
console.error "got response"
# add the response object to the C domain
c.add res
res.pipe process.stdout
return
c.on "error", (er) ->
caughtC = true
console.error "Error on c", er.message
return
process.on "exit", ->
assert.equal caughtA, false
assert.equal caughtB, true
assert.equal caughtC, true
console.log "ok - Errors went where they were supposed to go"
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Tests of multiple domains happening at once.
common = require("../common")
assert = require("assert")
domain = require("domain")
events = require("events")
caughtA = false
caughtB = false
caughtC = false
a = domain.create()
a.enter() # this will be our "root" domain
a.on "error", (er) ->
caughtA = true
console.log "This should not happen"
throw erreturn
http = require("http")
# child domain of a.
# treat these EE objects as if they are a part of the b domain
# so, an 'error' event on them propagates to the domain, rather
# than being thrown.
# res.writeHead(500), res.destroy, etc.
# XXX this bind should not be necessary.
# the write cb behavior in http/net should use an
# event so that it picks up the domain handling.
server = http.createServer((req, res) ->
b = domain.create()
a.add b
b.add req
b.add res
b.on "error", (er) ->
caughtB = true
console.error "Error encountered", er
if res
res.writeHead 500
res.end "An error occurred"
server.close()
return
res.write "HELLO\n", b.bind(->
throw new Error("this kills domain B, not A")return
)
return
).listen(common.PORT)
c = domain.create()
req = http.get(
host: "localhost"
port: common.PORT
)
# add the request to the C domain
c.add req
req.on "response", (res) ->
console.error "got response"
# add the response object to the C domain
c.add res
res.pipe process.stdout
return
c.on "error", (er) ->
caughtC = true
console.error "Error on c", er.message
return
process.on "exit", ->
assert.equal caughtA, false
assert.equal caughtB, true
assert.equal caughtC, true
console.log "ok - Errors went where they were supposed to go"
return
|
[
{
"context": " ->\n repos =\n getOriginURL: -> 'git@some-url.com:some/project'\n\n integration.handleReposito",
"end": 581,
"score": 0.9820050597190857,
"start": 565,
"tag": "EMAIL",
"value": "git@some-url.com"
},
{
"context": "s)\n\n repos =\n getOriginURL: -> 'git@some-url.com:some/project.git'\n\n integration.handleRepo",
"end": 915,
"score": 0.9916706681251526,
"start": 899,
"tag": "EMAIL",
"value": "git@some-url.com"
},
{
"context": " ->\n repos =\n getOriginURL: -> 'git@some-url.com:SenSiTiVe/ProJecT'\n\n integration.handleRep",
"end": 3475,
"score": 0.9489001631736755,
"start": 3459,
"tag": "EMAIL",
"value": "git@some-url.com"
},
{
"context": " =\n getOriginURL: -> 'git@some-url.com:SenSiTiVe/ProJecT'\n\n integration.handleRepository pr",
"end": 3485,
"score": 0.9528452754020691,
"start": 3476,
"tag": "USERNAME",
"value": "SenSiTiVe"
}
] | spec/gitlab-integration-spec.coffee | blakawk/gitlab-integration | 22 | nock = require 'nock'
describe 'GitLab Integration', ->
integration = null
project = null
beforeEach ->
project =
getPath: -> '/some/project'
integration = require '../lib/gitlab-integration'
integration.gitlab =
jasmine.createSpyObj 'gitlab', [
'watch',
]
integration.view = jasmine.createSpyObj 'view', [
'onProjectChange',
]
integration.projects = {}
it 'correctly handles Git URL', ->
repos =
getOriginURL: -> 'git@some-url.com:some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'git@some-url.com:some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles HTTP URL', ->
repos =
getOriginURL: -> 'http://some-url.com/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles non-standard port', ->
repos =
getOriginURL: -> 'ssh://git@some-url.com:1234/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'http://some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
it 'correctly ignores case for projects name', ->
repos =
getOriginURL: -> 'git@some-url.com:SenSiTiVe/ProJecT'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('sensitive/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'sensitive/project', repos)
| 68220 | nock = require 'nock'
describe 'GitLab Integration', ->
integration = null
project = null
beforeEach ->
project =
getPath: -> '/some/project'
integration = require '../lib/gitlab-integration'
integration.gitlab =
jasmine.createSpyObj 'gitlab', [
'watch',
]
integration.view = jasmine.createSpyObj 'view', [
'onProjectChange',
]
integration.projects = {}
it 'correctly handles Git URL', ->
repos =
getOriginURL: -> '<EMAIL>:some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> '<EMAIL>:some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles HTTP URL', ->
repos =
getOriginURL: -> 'http://some-url.com/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles non-standard port', ->
repos =
getOriginURL: -> 'ssh://git@some-url.com:1234/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'http://some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
it 'correctly ignores case for projects name', ->
repos =
getOriginURL: -> '<EMAIL>:SenSiTiVe/ProJecT'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('sensitive/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'sensitive/project', repos)
| true | nock = require 'nock'
describe 'GitLab Integration', ->
integration = null
project = null
beforeEach ->
project =
getPath: -> '/some/project'
integration = require '../lib/gitlab-integration'
integration.gitlab =
jasmine.createSpyObj 'gitlab', [
'watch',
]
integration.view = jasmine.createSpyObj 'view', [
'onProjectChange',
]
integration.projects = {}
it 'correctly handles Git URL', ->
repos =
getOriginURL: -> 'PI:EMAIL:<EMAIL>END_PI:some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'PI:EMAIL:<EMAIL>END_PI:some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles HTTP URL', ->
repos =
getOriginURL: -> 'http://some-url.com/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
it 'correctly handles non-standard port', ->
repos =
getOriginURL: -> 'ssh://git@some-url.com:1234/some/project'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'some/project', repos)
repos =
getOriginURL: -> 'http://some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
repos =
getOriginURL: -> 'https://test@some-url.com:1234/some/project.git'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('some/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com:1234', 'some/project', repos)
it 'correctly ignores case for projects name', ->
repos =
getOriginURL: -> 'PI:EMAIL:<EMAIL>END_PI:SenSiTiVe/ProJecT'
integration.handleRepository project, repos
expect(integration.projects['/some/project'])
.toBe('sensitive/project')
expect(integration.gitlab.watch)
.toHaveBeenCalledWith('some-url.com', 'sensitive/project', repos)
|
[
{
"context": "oBe 'http://1.2.3.4'\n expect buildUrl 'http', '1:2::3:4'\n .toBe 'http://[1:2::3:4]'\n it 'should h",
"end": 1284,
"score": 0.9994689226150513,
"start": 1278,
"tag": "IP_ADDRESS",
"value": "1:2::3"
},
{
"context": " buildUrl 'http', '1:2::3:4'\n .toBe 'http://[1:2::3:4]'\n it 'should handle bogus', ->\n expect bui",
"end": 1315,
"score": 0.9990140795707703,
"start": 1309,
"tag": "IP_ADDRESS",
"value": "1:2::3"
},
{
"context": "it 'parses IPv6', ->\n expect Parse.address '[1:2::a:f]:4444'\n .toEqual { host: '1:2::a:f', por",
"end": 2696,
"score": 0.9739167094230652,
"start": 2690,
"tag": "IP_ADDRESS",
"value": "1:2::a"
},
{
"context": "dress '[1:2::a:f]:4444'\n .toEqual { host: '1:2::a:f', port: 4444 }\n expect Parse.address '[1:2",
"end": 2738,
"score": 0.9990113973617554,
"start": 2732,
"tag": "IP_ADDRESS",
"value": "1:2::a"
},
{
"context": "::a:f', port: 4444 }\n expect Parse.address '[1:2::a:f]'\n .toBeNull()\n expect Parse.addres",
"end": 2791,
"score": 0.897299587726593,
"start": 2785,
"tag": "IP_ADDRESS",
"value": "1:2::a"
},
{
"context": "\n .toBeNull()\n expect Parse.address '[1:2::a:f]:0x1111'\n .toBeNull()\n expect Parse",
"end": 2851,
"score": 0.9638658761978149,
"start": 2845,
"tag": "IP_ADDRESS",
"value": "1:2::a"
},
{
"context": " .toBeNull()\n expect Parse.address '[1:2::a:f]:-4444'\n .toBeNull()\n expect Parse.",
"end": 2918,
"score": 0.9724998474121094,
"start": 2914,
"tag": "IP_ADDRESS",
"value": "2::a"
},
{
"context": "\n .toBeNull()\n expect Parse.address '[1:2::a:f]:65536'\n .toBeNull()\n expect Parse.",
"end": 2984,
"score": 0.9456287622451782,
"start": 2978,
"tag": "IP_ADDRESS",
"value": "1:2::a"
},
{
"context": "\n .toBeNull()\n expect Parse.address '[1:2::ffff:1.2.3.4]:4444'\n .toEqual { host: '1:2::fff",
"end": 3053,
"score": 0.7000082731246948,
"start": 3044,
"tag": "IP_ADDRESS",
"value": "1:2::ffff"
},
{
"context": ":2::ffff:1.2.3.4]:4444'\n .toEqual { host: '1:2::ffff:1.2.3.4', port: 4444 }\n\n describe 'ipFromSDP', -",
"end": 3104,
"score": 0.8723855018615723,
"start": 3095,
"tag": "IP_ADDRESS",
"value": "1:2::ffff"
},
{
"context": "=0\n o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5\n s=SDP Seminar\n i=A Seminar o",
"end": 3315,
"score": 0.9995619058609009,
"start": 3305,
"tag": "IP_ADDRESS",
"value": "10.47.16.5"
},
{
"context": "p://www.example.com/seminars/sdp.pdf\n e=j.doe@example.com (Jane Doe)\n c=IN IP4 224.2.17.12/127\n ",
"end": 3483,
"score": 0.9998419880867004,
"start": 3466,
"tag": "EMAIL",
"value": "j.doe@example.com"
},
{
"context": "/seminars/sdp.pdf\n e=j.doe@example.com (Jane Doe)\n c=IN IP4 224.2.17.12/127\n t",
"end": 3493,
"score": 0.999843418598175,
"start": 3485,
"tag": "NAME",
"value": "Jane Doe"
},
{
"context": "=j.doe@example.com (Jane Doe)\n c=IN IP4 224.2.17.12/127\n t=2873397496 2873404696\n ",
"end": 3526,
"score": 0.9996996521949768,
"start": 3515,
"tag": "IP_ADDRESS",
"value": "224.2.17.12"
},
{
"context": "9 h263-1998/90000\n \"\"\"\n expected: '224.2.17.12'\n ,\n # Missing c= line\n sdp: \"\"\"\n ",
"end": 3741,
"score": 0.9996656775474548,
"start": 3730,
"tag": "IP_ADDRESS",
"value": "224.2.17.12"
},
{
"context": "=0\n o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5\n s=SDP Seminar\n i=A Seminar o",
"end": 3860,
"score": 0.9995967745780945,
"start": 3850,
"tag": "IP_ADDRESS",
"value": "10.47.16.5"
},
{
"context": "p://www.example.com/seminars/sdp.pdf\n e=j.doe@example.com (Jane Doe)\n t=2873397496 2873404696\n ",
"end": 4028,
"score": 0.9998716115951538,
"start": 4011,
"tag": "EMAIL",
"value": "j.doe@example.com"
},
{
"context": "/seminars/sdp.pdf\n e=j.doe@example.com (Jane Doe)\n t=2873397496 2873404696\n a=",
"end": 4038,
"score": 0.9998409748077393,
"start": 4030,
"tag": "NAME",
"value": "Jane Doe"
},
{
"context": "Single line, IP address only\n sdp: \"c=IN IP4 224.2.1.1\\n\"\n expected: '224.2.1.1'\n ,\n # Same",
"end": 4321,
"score": 0.9995390772819519,
"start": 4312,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": " sdp: \"c=IN IP4 224.2.1.1\\n\"\n expected: '224.2.1.1'\n ,\n # Same, with TTL\n sdp: \"c=IN IP",
"end": 4351,
"score": 0.9996588826179504,
"start": 4342,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": " ,\n # Same, with TTL\n sdp: \"c=IN IP4 224.2.1.1/127\\n\"\n expected: '224.2.1.1'\n ,\n # ",
"end": 4412,
"score": 0.9996805191040039,
"start": 4403,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": " sdp: \"c=IN IP4 224.2.1.1/127\\n\"\n expected: '224.2.1.1'\n ,\n # Same, with TTL and multicast addre",
"end": 4446,
"score": 0.9996737241744995,
"start": 4437,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": " TTL and multicast addresses\n sdp: \"c=IN IP4 224.2.1.1/127/3\\n\"\n expected: '224.2.1.1'\n ,\n ",
"end": 4531,
"score": 0.9996268153190613,
"start": 4522,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": "dp: \"c=IN IP4 224.2.1.1/127/3\\n\"\n expected: '224.2.1.1'\n ,\n # IPv6, address only\n sdp: \"c=I",
"end": 4567,
"score": 0.9996916651725769,
"start": 4558,
"tag": "IP_ADDRESS",
"value": "224.2.1.1"
},
{
"context": " ,\n # IPv6, address only\n sdp: \"c=IN IP6 FF15::101\\n\"\n expected: 'ff15::101'\n ,\n # Same",
"end": 4632,
"score": 0.9779644012451172,
"start": 4623,
"tag": "IP_ADDRESS",
"value": "FF15::101"
},
{
"context": " sdp: \"c=IN IP6 FF15::101\\n\"\n expected: 'ff15::101'\n ,\n # Same, with multicast addresses\n ",
"end": 4662,
"score": 0.9986714720726013,
"start": 4653,
"tag": "IP_ADDRESS",
"value": "ff15::101"
},
{
"context": "ame, with multicast addresses\n sdp: \"c=IN IP6 FF15::101/3\\n\"\n expected: 'ff15::101'\n ,\n # Mu",
"end": 4739,
"score": 0.9255005717277527,
"start": 4730,
"tag": "IP_ADDRESS",
"value": "FF15::101"
},
{
"context": " sdp: \"c=IN IP6 FF15::101/3\\n\"\n expected: 'ff15::101'\n ,\n # Multiple c= lines\n sdp: \"\"\"\n ",
"end": 4771,
"score": 0.9991247057914734,
"start": 4762,
"tag": "IP_ADDRESS",
"value": "ff15::101"
},
{
"context": " v=0\n o=- 7860378660295630295 2 IN IP4 127.0.0.1\n s=-\n t=0 0\n a=grou",
"end": 5058,
"score": 0.9990180134773254,
"start": 5049,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " a=candidate:3581707038 1 udp 2122260223 192.168.0.1 54653 typ host generation 0 network-id 1 network-",
"end": 5290,
"score": 0.9997110366821289,
"start": 5279,
"tag": "IP_ADDRESS",
"value": "192.168.0.1"
},
{
"context": " a=candidate:2617212910 1 tcp 1518280447 192.168.0.1 59673 typ host tcptype passive generation 0 netwo",
"end": 5410,
"score": 0.9997047185897827,
"start": 5399,
"tag": "IP_ADDRESS",
"value": "192.168.0.1"
},
{
"context": "19 1 udp 1686052607 1.2.3.4 54653 typ srflx raddr 192.168.0.1 rport 54653 generation 0 network-id 1 network-cos",
"end": 5576,
"score": 0.9997119903564453,
"start": 5565,
"tag": "IP_ADDRESS",
"value": "192.168.0.1"
},
{
"context": "\n a=ice-ufrag:IBdf\n a=ice-pwd:G3lTrrC9gmhQx481AowtkhYz\n a=fingerprint:sha-256 53:F8:84:D9:3C:1",
"end": 5704,
"score": 0.9975214004516602,
"start": 5680,
"tag": "PASSWORD",
"value": "G3lTrrC9gmhQx481AowtkhYz"
},
{
"context": "gmhQx481AowtkhYz\n a=fingerprint:sha-256 53:F8:84:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n ",
"end": 5794,
"score": 0.8797488808631897,
"start": 5738,
"tag": "IP_ADDRESS",
"value": "53:F8:84:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17"
},
{
"context": "4:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=",
"end": 5797,
"score": 0.7404502630233765,
"start": 5795,
"tag": "IP_ADDRESS",
"value": "41"
},
{
"context": "9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=set",
"end": 5800,
"score": 0.7366467118263245,
"start": 5798,
"tag": "IP_ADDRESS",
"value": "06"
},
{
"context": "C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:",
"end": 5803,
"score": 0.7227845191955566,
"start": 5801,
"tag": "IP_ADDRESS",
"value": "F9"
},
{
"context": "F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:act",
"end": 5806,
"score": 0.8287947773933411,
"start": 5804,
"tag": "IP_ADDRESS",
"value": "9C"
},
{
"context": "0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:actpass\n ",
"end": 5812,
"score": 0.6912768483161926,
"start": 5807,
"tag": "IP_ADDRESS",
"value": "10:D8"
},
{
"context": ":D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:actpass\n ",
"end": 5815,
"score": 0.7653364539146423,
"start": 5814,
"tag": "IP_ADDRESS",
"value": "8"
},
{
"context": ":3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:actpass\n ",
"end": 5818,
"score": 0.6645402908325195,
"start": 5817,
"tag": "IP_ADDRESS",
"value": "A"
},
{
"context": ":65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1\n a=setup:actpass\n ",
"end": 5821,
"score": 0.5613031387329102,
"start": 5820,
"tag": "IP_ADDRESS",
"value": "8"
},
{
"context": "r within IPv4\n sdp: \"\"\"\n c=IN IP4 224.2z.1.1\n \"\"\"\n expected: undefined\n ,\n ",
"end": 6064,
"score": 0.6358760595321655,
"start": 6055,
"tag": "IP_ADDRESS",
"value": "24.2z.1.1"
},
{
"context": "ter within IPv6\n sdp: \"\"\"\n c=IN IP6 ff15:g::101\n \"\"\"\n expected: undefined\n ,\n ",
"end": 6197,
"score": 0.9924992322921753,
"start": 6186,
"tag": "IP_ADDRESS",
"value": "ff15:g::101"
}
] | proxy/spec/util.spec.coffee | ahf/snowflake | 1 | ###
jasmine tests for Snowflake utils
###
describe 'BuildUrl', ->
it 'should parse just protocol and host', ->
expect(buildUrl('http', 'example.com')).toBe 'http://example.com'
it 'should handle different ports', ->
expect buildUrl 'http', 'example.com', 80
.toBe 'http://example.com'
expect buildUrl 'http', 'example.com', 81
.toBe 'http://example.com:81'
expect buildUrl 'http', 'example.com', 443
.toBe 'http://example.com:443'
expect buildUrl 'http', 'example.com', 444
.toBe 'http://example.com:444'
it 'should handle paths', ->
expect buildUrl 'http', 'example.com', 80, '/'
.toBe 'http://example.com/'
expect buildUrl 'http', 'example.com', 80,'/test?k=%#v'
.toBe 'http://example.com/test%3Fk%3D%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test'
.toBe 'http://example.com/test'
it 'should handle params', ->
expect buildUrl 'http', 'example.com', 80, '/test', [['k', '%#v']]
.toBe 'http://example.com/test?k=%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test', [['a', 'b'], ['c', 'd']]
.toBe 'http://example.com/test?a=b&c=d'
it 'should handle ips', ->
expect buildUrl 'http', '1.2.3.4'
.toBe 'http://1.2.3.4'
expect buildUrl 'http', '1:2::3:4'
.toBe 'http://[1:2::3:4]'
it 'should handle bogus', ->
expect buildUrl 'http', 'bog][us'
.toBe 'http://bog%5D%5Bus'
expect buildUrl 'http', 'bog:u]s'
.toBe 'http://bog%3Au%5Ds'
describe 'Parse', ->
describe 'cookie', ->
it 'parses correctly', ->
expect Parse.cookie ''
.toEqual {}
expect Parse.cookie 'a=b'
.toEqual { a: 'b' }
expect Parse.cookie 'a=b=c'
.toEqual { a: 'b=c' }
expect Parse.cookie 'a=b; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a=b ; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a= b'
.toEqual { a: 'b' }
expect Parse.cookie 'a='
.toEqual { a: '' }
expect Parse.cookie 'key'
.toBeNull()
expect Parse.cookie 'key=%26%20'
.toEqual { key: '& ' }
expect Parse.cookie 'a=\'\''
.toEqual { a: '\'\'' }
describe 'address', ->
it 'parses IPv4', ->
expect Parse.address ''
.toBeNull()
expect Parse.address '3.3.3.3:4444'
.toEqual { host: '3.3.3.3', port: 4444 }
expect Parse.address '3.3.3.3'
.toBeNull()
expect Parse.address '3.3.3.3:0x1111'
.toBeNull()
expect Parse.address '3.3.3.3:-4444'
.toBeNull()
expect Parse.address '3.3.3.3:65536'
.toBeNull()
it 'parses IPv6', ->
expect Parse.address '[1:2::a:f]:4444'
.toEqual { host: '1:2::a:f', port: 4444 }
expect Parse.address '[1:2::a:f]'
.toBeNull()
expect Parse.address '[1:2::a:f]:0x1111'
.toBeNull()
expect Parse.address '[1:2::a:f]:-4444'
.toBeNull()
expect Parse.address '[1:2::a:f]:65536'
.toBeNull()
expect Parse.address '[1:2::ffff:1.2.3.4]:4444'
.toEqual { host: '1:2::ffff:1.2.3.4', port: 4444 }
describe 'ipFromSDP', ->
testCases = [
# https://tools.ietf.org/html/rfc4566#section-5
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=j.doe@example.com (Jane Doe)
c=IN IP4 224.2.17.12/127
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: '224.2.17.12'
,
# Missing c= line
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=j.doe@example.com (Jane Doe)
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: undefined
,
# Single line, IP address only
sdp: "c=IN IP4 224.2.1.1\n"
expected: '224.2.1.1'
,
# Same, with TTL
sdp: "c=IN IP4 224.2.1.1/127\n"
expected: '224.2.1.1'
,
# Same, with TTL and multicast addresses
sdp: "c=IN IP4 224.2.1.1/127/3\n"
expected: '224.2.1.1'
,
# IPv6, address only
sdp: "c=IN IP6 FF15::101\n"
expected: 'ff15::101'
,
# Same, with multicast addresses
sdp: "c=IN IP6 FF15::101/3\n"
expected: 'ff15::101'
,
# Multiple c= lines
sdp: """
c=IN IP4 1.2.3.4
c=IN IP4 5.6.7.8
"""
expected: '1.2.3.4'
,
# Modified from SDP sent by snowflake-client.
sdp: """
v=0
o=- 7860378660295630295 2 IN IP4 127.0.0.1
s=-
t=0 0
a=group:BUNDLE data
a=msid-semantic: WMS
m=application 54653 DTLS/SCTP 5000
c=IN IP4 1.2.3.4
a=candidate:3581707038 1 udp 2122260223 192.168.0.1 54653 typ host generation 0 network-id 1 network-cost 50
a=candidate:2617212910 1 tcp 1518280447 192.168.0.1 59673 typ host tcptype passive generation 0 network-id 1 network-cost 50
a=candidate:2082671819 1 udp 1686052607 1.2.3.4 54653 typ srflx raddr 192.168.0.1 rport 54653 generation 0 network-id 1 network-cost 50
a=ice-ufrag:IBdf
a=ice-pwd:G3lTrrC9gmhQx481AowtkhYz
a=fingerprint:sha-256 53:F8:84:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1
a=setup:actpass
a=mid:data
a=sctpmap:5000 webrtc-datachannel 1024
"""
expected: '1.2.3.4'
,
# Improper character within IPv4
sdp: """
c=IN IP4 224.2z.1.1
"""
expected: undefined
,
# Improper character within IPv6
sdp: """
c=IN IP6 ff15:g::101
"""
expected: undefined
,
# Bogus "IP7" addrtype
sdp: "c=IN IP7 1.2.3.4\n"
expected: undefined
]
it 'parses SDP', ->
for test in testCases
# https://tools.ietf.org/html/rfc4566#section-5: "The sequence # CRLF
# (0x0d0a) is used to end a record, although parsers SHOULD be tolerant
# and also accept records terminated with a single newline character."
# We represent the test cases with LF line endings for convenience, and
# test them both that way and with CRLF line endings.
expect(Parse.ipFromSDP(test.sdp)?.toLowerCase()).toEqual(test.expected)
expect(Parse.ipFromSDP(test.sdp.replace(/\n/, "\r\n"))?.toLowerCase()).toEqual(test.expected)
describe 'query string', ->
it 'should parse correctly', ->
expect Query.parse ''
.toEqual {}
expect Query.parse 'a=b'
.toEqual { a: 'b' }
expect Query.parse 'a=b=c'
.toEqual { a: 'b=c' }
expect Query.parse 'a=b&c=d'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'client=&relay=1.2.3.4%3A9001'
.toEqual { client: '', relay: '1.2.3.4:9001' }
expect Query.parse 'a=b%26c=d'
.toEqual { a: 'b&c=d' }
expect Query.parse 'a%3db=d'
.toEqual { 'a=b': 'd' }
expect Query.parse 'a=b+c%20d'
.toEqual { 'a': 'b c d' }
expect Query.parse 'a=b+c%2bd'
.toEqual { 'a': 'b c+d' }
expect Query.parse 'a+b=c'
.toEqual { 'a b': 'c' }
expect Query.parse 'a=b+c+d'
.toEqual { a: 'b c d' }
it 'uses the first appearance of duplicate key', ->
expect Query.parse 'a=b&c=d&a=e'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'a'
.toEqual { a: '' }
expect Query.parse '=b'
.toEqual { '': 'b' }
expect Query.parse '&a=b'
.toEqual { '': '', a: 'b' }
expect Query.parse 'a=b&'
.toEqual { a: 'b', '':'' }
expect Query.parse 'a=b&&c=d'
.toEqual { a: 'b', '':'', c: 'd' }
describe 'Params', ->
describe 'bool', ->
getBool = (query) ->
Params.getBool (Query.parse query), 'param', false
it 'parses correctly', ->
expect(getBool 'param=true').toBe true
expect(getBool 'param').toBe true
expect(getBool 'param=').toBe true
expect(getBool 'param=1').toBe true
expect(getBool 'param=0').toBe false
expect(getBool 'param=false').toBe false
expect(getBool 'param=unexpected').toBeNull()
expect(getBool 'pram=true').toBe false
describe 'address', ->
DEFAULT = { host: '1.1.1.1', port: 2222 }
getAddress = (query) ->
Params.getAddress query, 'addr', DEFAULT
it 'parses correctly', ->
expect(getAddress {}).toEqual DEFAULT
expect getAddress { addr: '3.3.3.3:4444' }
.toEqual { host: '3.3.3.3', port: 4444 }
expect getAddress { x: '3.3.3.3:4444' }
.toEqual DEFAULT
expect getAddress { addr: '---' }
.toBeNull()
| 102642 | ###
jasmine tests for Snowflake utils
###
describe 'BuildUrl', ->
it 'should parse just protocol and host', ->
expect(buildUrl('http', 'example.com')).toBe 'http://example.com'
it 'should handle different ports', ->
expect buildUrl 'http', 'example.com', 80
.toBe 'http://example.com'
expect buildUrl 'http', 'example.com', 81
.toBe 'http://example.com:81'
expect buildUrl 'http', 'example.com', 443
.toBe 'http://example.com:443'
expect buildUrl 'http', 'example.com', 444
.toBe 'http://example.com:444'
it 'should handle paths', ->
expect buildUrl 'http', 'example.com', 80, '/'
.toBe 'http://example.com/'
expect buildUrl 'http', 'example.com', 80,'/test?k=%#v'
.toBe 'http://example.com/test%3Fk%3D%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test'
.toBe 'http://example.com/test'
it 'should handle params', ->
expect buildUrl 'http', 'example.com', 80, '/test', [['k', '%#v']]
.toBe 'http://example.com/test?k=%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test', [['a', 'b'], ['c', 'd']]
.toBe 'http://example.com/test?a=b&c=d'
it 'should handle ips', ->
expect buildUrl 'http', '1.2.3.4'
.toBe 'http://1.2.3.4'
expect buildUrl 'http', 'fdf8:f53e:61e4::18:4'
.toBe 'http://[fdf8:f53e:61e4::18:4]'
it 'should handle bogus', ->
expect buildUrl 'http', 'bog][us'
.toBe 'http://bog%5D%5Bus'
expect buildUrl 'http', 'bog:u]s'
.toBe 'http://bog%3Au%5Ds'
describe 'Parse', ->
describe 'cookie', ->
it 'parses correctly', ->
expect Parse.cookie ''
.toEqual {}
expect Parse.cookie 'a=b'
.toEqual { a: 'b' }
expect Parse.cookie 'a=b=c'
.toEqual { a: 'b=c' }
expect Parse.cookie 'a=b; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a=b ; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a= b'
.toEqual { a: 'b' }
expect Parse.cookie 'a='
.toEqual { a: '' }
expect Parse.cookie 'key'
.toBeNull()
expect Parse.cookie 'key=%26%20'
.toEqual { key: '& ' }
expect Parse.cookie 'a=\'\''
.toEqual { a: '\'\'' }
describe 'address', ->
it 'parses IPv4', ->
expect Parse.address ''
.toBeNull()
expect Parse.address '3.3.3.3:4444'
.toEqual { host: '3.3.3.3', port: 4444 }
expect Parse.address '3.3.3.3'
.toBeNull()
expect Parse.address '3.3.3.3:0x1111'
.toBeNull()
expect Parse.address '3.3.3.3:-4444'
.toBeNull()
expect Parse.address '3.3.3.3:65536'
.toBeNull()
it 'parses IPv6', ->
expect Parse.address '[fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:f]:4444'
.toEqual { host: 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:f', port: 4444 }
expect Parse.address '[fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:f]'
.toBeNull()
expect Parse.address '[fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:f]:0x1111'
.toBeNull()
expect Parse.address '[1:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:f]:-4444'
.toBeNull()
expect Parse.address '[fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:f]:65536'
.toBeNull()
expect Parse.address '[fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:1.2.3.4]:4444'
.toEqual { host: 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:1.2.3.4', port: 4444 }
describe 'ipFromSDP', ->
testCases = [
# https://tools.ietf.org/html/rfc4566#section-5
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=<EMAIL> (<NAME>)
c=IN IP4 172.16.58.3/127
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: '172.16.58.3'
,
# Missing c= line
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=<EMAIL> (<NAME>)
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: undefined
,
# Single line, IP address only
sdp: "c=IN IP4 172.16.17.32\n"
expected: '172.16.17.32'
,
# Same, with TTL
sdp: "c=IN IP4 172.16.17.32/127\n"
expected: '172.16.17.32'
,
# Same, with TTL and multicast addresses
sdp: "c=IN IP4 172.16.17.32/127/3\n"
expected: '172.16.17.32'
,
# IPv6, address only
sdp: "c=IN IP6 fdf8:f53e:61e4::18\n"
expected: 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'
,
# Same, with multicast addresses
sdp: "c=IN IP6 fdf8:f53e:61e4::18/3\n"
expected: 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'
,
# Multiple c= lines
sdp: """
c=IN IP4 1.2.3.4
c=IN IP4 5.6.7.8
"""
expected: '1.2.3.4'
,
# Modified from SDP sent by snowflake-client.
sdp: """
v=0
o=- 7860378660295630295 2 IN IP4 127.0.0.1
s=-
t=0 0
a=group:BUNDLE data
a=msid-semantic: WMS
m=application 54653 DTLS/SCTP 5000
c=IN IP4 1.2.3.4
a=candidate:3581707038 1 udp 2122260223 192.168.0.1 54653 typ host generation 0 network-id 1 network-cost 50
a=candidate:2617212910 1 tcp 1518280447 192.168.0.1 59673 typ host tcptype passive generation 0 network-id 1 network-cost 50
a=candidate:2082671819 1 udp 1686052607 1.2.3.4 54653 typ srflx raddr 192.168.0.1 rport 54653 generation 0 network-id 1 network-cost 50
a=ice-ufrag:IBdf
a=ice-pwd:<PASSWORD>
a=fingerprint:sha-256 53:F8:84:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1
a=setup:actpass
a=mid:data
a=sctpmap:5000 webrtc-datachannel 1024
"""
expected: '1.2.3.4'
,
# Improper character within IPv4
sdp: """
c=IN IP4 224.2z.1.1
"""
expected: undefined
,
# Improper character within IPv6
sdp: """
c=IN IP6 ff15:g::101
"""
expected: undefined
,
# Bogus "IP7" addrtype
sdp: "c=IN IP7 1.2.3.4\n"
expected: undefined
]
it 'parses SDP', ->
for test in testCases
# https://tools.ietf.org/html/rfc4566#section-5: "The sequence # CRLF
# (0x0d0a) is used to end a record, although parsers SHOULD be tolerant
# and also accept records terminated with a single newline character."
# We represent the test cases with LF line endings for convenience, and
# test them both that way and with CRLF line endings.
expect(Parse.ipFromSDP(test.sdp)?.toLowerCase()).toEqual(test.expected)
expect(Parse.ipFromSDP(test.sdp.replace(/\n/, "\r\n"))?.toLowerCase()).toEqual(test.expected)
describe 'query string', ->
it 'should parse correctly', ->
expect Query.parse ''
.toEqual {}
expect Query.parse 'a=b'
.toEqual { a: 'b' }
expect Query.parse 'a=b=c'
.toEqual { a: 'b=c' }
expect Query.parse 'a=b&c=d'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'client=&relay=1.2.3.4%3A9001'
.toEqual { client: '', relay: '1.2.3.4:9001' }
expect Query.parse 'a=b%26c=d'
.toEqual { a: 'b&c=d' }
expect Query.parse 'a%3db=d'
.toEqual { 'a=b': 'd' }
expect Query.parse 'a=b+c%20d'
.toEqual { 'a': 'b c d' }
expect Query.parse 'a=b+c%2bd'
.toEqual { 'a': 'b c+d' }
expect Query.parse 'a+b=c'
.toEqual { 'a b': 'c' }
expect Query.parse 'a=b+c+d'
.toEqual { a: 'b c d' }
it 'uses the first appearance of duplicate key', ->
expect Query.parse 'a=b&c=d&a=e'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'a'
.toEqual { a: '' }
expect Query.parse '=b'
.toEqual { '': 'b' }
expect Query.parse '&a=b'
.toEqual { '': '', a: 'b' }
expect Query.parse 'a=b&'
.toEqual { a: 'b', '':'' }
expect Query.parse 'a=b&&c=d'
.toEqual { a: 'b', '':'', c: 'd' }
describe 'Params', ->
describe 'bool', ->
getBool = (query) ->
Params.getBool (Query.parse query), 'param', false
it 'parses correctly', ->
expect(getBool 'param=true').toBe true
expect(getBool 'param').toBe true
expect(getBool 'param=').toBe true
expect(getBool 'param=1').toBe true
expect(getBool 'param=0').toBe false
expect(getBool 'param=false').toBe false
expect(getBool 'param=unexpected').toBeNull()
expect(getBool 'pram=true').toBe false
describe 'address', ->
DEFAULT = { host: '1.1.1.1', port: 2222 }
getAddress = (query) ->
Params.getAddress query, 'addr', DEFAULT
it 'parses correctly', ->
expect(getAddress {}).toEqual DEFAULT
expect getAddress { addr: '3.3.3.3:4444' }
.toEqual { host: '3.3.3.3', port: 4444 }
expect getAddress { x: '3.3.3.3:4444' }
.toEqual DEFAULT
expect getAddress { addr: '---' }
.toBeNull()
| true | ###
jasmine tests for Snowflake utils
###
describe 'BuildUrl', ->
it 'should parse just protocol and host', ->
expect(buildUrl('http', 'example.com')).toBe 'http://example.com'
it 'should handle different ports', ->
expect buildUrl 'http', 'example.com', 80
.toBe 'http://example.com'
expect buildUrl 'http', 'example.com', 81
.toBe 'http://example.com:81'
expect buildUrl 'http', 'example.com', 443
.toBe 'http://example.com:443'
expect buildUrl 'http', 'example.com', 444
.toBe 'http://example.com:444'
it 'should handle paths', ->
expect buildUrl 'http', 'example.com', 80, '/'
.toBe 'http://example.com/'
expect buildUrl 'http', 'example.com', 80,'/test?k=%#v'
.toBe 'http://example.com/test%3Fk%3D%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test'
.toBe 'http://example.com/test'
it 'should handle params', ->
expect buildUrl 'http', 'example.com', 80, '/test', [['k', '%#v']]
.toBe 'http://example.com/test?k=%25%23v'
expect buildUrl 'http', 'example.com', 80, '/test', [['a', 'b'], ['c', 'd']]
.toBe 'http://example.com/test?a=b&c=d'
it 'should handle ips', ->
expect buildUrl 'http', '1.2.3.4'
.toBe 'http://1.2.3.4'
expect buildUrl 'http', 'PI:IP_ADDRESS:fdf8:f53e:61e4::18END_PI:4'
.toBe 'http://[PI:IP_ADDRESS:fdf8:f53e:61e4::18END_PI:4]'
it 'should handle bogus', ->
expect buildUrl 'http', 'bog][us'
.toBe 'http://bog%5D%5Bus'
expect buildUrl 'http', 'bog:u]s'
.toBe 'http://bog%3Au%5Ds'
describe 'Parse', ->
describe 'cookie', ->
it 'parses correctly', ->
expect Parse.cookie ''
.toEqual {}
expect Parse.cookie 'a=b'
.toEqual { a: 'b' }
expect Parse.cookie 'a=b=c'
.toEqual { a: 'b=c' }
expect Parse.cookie 'a=b; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a=b ; c=d'
.toEqual { a: 'b', c: 'd' }
expect Parse.cookie 'a= b'
.toEqual { a: 'b' }
expect Parse.cookie 'a='
.toEqual { a: '' }
expect Parse.cookie 'key'
.toBeNull()
expect Parse.cookie 'key=%26%20'
.toEqual { key: '& ' }
expect Parse.cookie 'a=\'\''
.toEqual { a: '\'\'' }
describe 'address', ->
it 'parses IPv4', ->
expect Parse.address ''
.toBeNull()
expect Parse.address '3.3.3.3:4444'
.toEqual { host: '3.3.3.3', port: 4444 }
expect Parse.address '3.3.3.3'
.toBeNull()
expect Parse.address '3.3.3.3:0x1111'
.toBeNull()
expect Parse.address '3.3.3.3:-4444'
.toBeNull()
expect Parse.address '3.3.3.3:65536'
.toBeNull()
it 'parses IPv6', ->
expect Parse.address '[PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI:f]:4444'
.toEqual { host: 'PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI:f', port: 4444 }
expect Parse.address '[PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI:f]'
.toBeNull()
expect Parse.address '[PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI:f]:0x1111'
.toBeNull()
expect Parse.address '[1:PI:IP_ADDRESS:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3bEND_PI:f]:-4444'
.toBeNull()
expect Parse.address '[PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI:f]:65536'
.toBeNull()
expect Parse.address '[PI:IP_ADDRESS:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3bEND_PI:1.2.3.4]:4444'
.toEqual { host: 'PI:IP_ADDRESS:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3bEND_PI:1.2.3.4', port: 4444 }
describe 'ipFromSDP', ->
testCases = [
# https://tools.ietf.org/html/rfc4566#section-5
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=PI:EMAIL:<EMAIL>END_PI (PI:NAME:<NAME>END_PI)
c=IN IP4 PI:IP_ADDRESS:172.16.58.3END_PI/127
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: 'PI:IP_ADDRESS:172.16.58.3END_PI'
,
# Missing c= line
sdp: """
v=0
o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5
s=SDP Seminar
i=A Seminar on the session description protocol
u=http://www.example.com/seminars/sdp.pdf
e=PI:EMAIL:<EMAIL>END_PI (PI:NAME:<NAME>END_PI)
t=2873397496 2873404696
a=recvonly
m=audio 49170 RTP/AVP 0
m=video 51372 RTP/AVP 99
a=rtpmap:99 h263-1998/90000
"""
expected: undefined
,
# Single line, IP address only
sdp: "c=IN IP4 PI:IP_ADDRESS:172.16.17.32END_PI\n"
expected: 'PI:IP_ADDRESS:172.16.17.32END_PI'
,
# Same, with TTL
sdp: "c=IN IP4 PI:IP_ADDRESS:172.16.17.32END_PI/127\n"
expected: 'PI:IP_ADDRESS:172.16.17.32END_PI'
,
# Same, with TTL and multicast addresses
sdp: "c=IN IP4 PI:IP_ADDRESS:172.16.17.32END_PI/127/3\n"
expected: 'PI:IP_ADDRESS:172.16.17.32END_PI'
,
# IPv6, address only
sdp: "c=IN IP6 PI:IP_ADDRESS:fdf8:f53e:61e4::18END_PI\n"
expected: 'PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI'
,
# Same, with multicast addresses
sdp: "c=IN IP6 PI:IP_ADDRESS:fdf8:f53e:61e4::18END_PI/3\n"
expected: 'PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI'
,
# Multiple c= lines
sdp: """
c=IN IP4 1.2.3.4
c=IN IP4 5.6.7.8
"""
expected: '1.2.3.4'
,
# Modified from SDP sent by snowflake-client.
sdp: """
v=0
o=- 7860378660295630295 2 IN IP4 127.0.0.1
s=-
t=0 0
a=group:BUNDLE data
a=msid-semantic: WMS
m=application 54653 DTLS/SCTP 5000
c=IN IP4 1.2.3.4
a=candidate:3581707038 1 udp 2122260223 192.168.0.1 54653 typ host generation 0 network-id 1 network-cost 50
a=candidate:2617212910 1 tcp 1518280447 192.168.0.1 59673 typ host tcptype passive generation 0 network-id 1 network-cost 50
a=candidate:2082671819 1 udp 1686052607 1.2.3.4 54653 typ srflx raddr 192.168.0.1 rport 54653 generation 0 network-id 1 network-cost 50
a=ice-ufrag:IBdf
a=ice-pwd:PI:PASSWORD:<PASSWORD>END_PI
a=fingerprint:sha-256 53:F8:84:D9:3C:1F:A0:44:AA:D6:3C:65:80:D3:CB:6F:23:90:17:41:06:F9:9C:10:D8:48:4A:A8:B6:FA:14:A1
a=setup:actpass
a=mid:data
a=sctpmap:5000 webrtc-datachannel 1024
"""
expected: '1.2.3.4'
,
# Improper character within IPv4
sdp: """
c=IN IP4 224.2z.1.1
"""
expected: undefined
,
# Improper character within IPv6
sdp: """
c=IN IP6 ff15:g::101
"""
expected: undefined
,
# Bogus "IP7" addrtype
sdp: "c=IN IP7 1.2.3.4\n"
expected: undefined
]
it 'parses SDP', ->
for test in testCases
# https://tools.ietf.org/html/rfc4566#section-5: "The sequence # CRLF
# (0x0d0a) is used to end a record, although parsers SHOULD be tolerant
# and also accept records terminated with a single newline character."
# We represent the test cases with LF line endings for convenience, and
# test them both that way and with CRLF line endings.
expect(Parse.ipFromSDP(test.sdp)?.toLowerCase()).toEqual(test.expected)
expect(Parse.ipFromSDP(test.sdp.replace(/\n/, "\r\n"))?.toLowerCase()).toEqual(test.expected)
describe 'query string', ->
it 'should parse correctly', ->
expect Query.parse ''
.toEqual {}
expect Query.parse 'a=b'
.toEqual { a: 'b' }
expect Query.parse 'a=b=c'
.toEqual { a: 'b=c' }
expect Query.parse 'a=b&c=d'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'client=&relay=1.2.3.4%3A9001'
.toEqual { client: '', relay: '1.2.3.4:9001' }
expect Query.parse 'a=b%26c=d'
.toEqual { a: 'b&c=d' }
expect Query.parse 'a%3db=d'
.toEqual { 'a=b': 'd' }
expect Query.parse 'a=b+c%20d'
.toEqual { 'a': 'b c d' }
expect Query.parse 'a=b+c%2bd'
.toEqual { 'a': 'b c+d' }
expect Query.parse 'a+b=c'
.toEqual { 'a b': 'c' }
expect Query.parse 'a=b+c+d'
.toEqual { a: 'b c d' }
it 'uses the first appearance of duplicate key', ->
expect Query.parse 'a=b&c=d&a=e'
.toEqual { a: 'b', c: 'd' }
expect Query.parse 'a'
.toEqual { a: '' }
expect Query.parse '=b'
.toEqual { '': 'b' }
expect Query.parse '&a=b'
.toEqual { '': '', a: 'b' }
expect Query.parse 'a=b&'
.toEqual { a: 'b', '':'' }
expect Query.parse 'a=b&&c=d'
.toEqual { a: 'b', '':'', c: 'd' }
describe 'Params', ->
describe 'bool', ->
getBool = (query) ->
Params.getBool (Query.parse query), 'param', false
it 'parses correctly', ->
expect(getBool 'param=true').toBe true
expect(getBool 'param').toBe true
expect(getBool 'param=').toBe true
expect(getBool 'param=1').toBe true
expect(getBool 'param=0').toBe false
expect(getBool 'param=false').toBe false
expect(getBool 'param=unexpected').toBeNull()
expect(getBool 'pram=true').toBe false
describe 'address', ->
DEFAULT = { host: '1.1.1.1', port: 2222 }
getAddress = (query) ->
Params.getAddress query, 'addr', DEFAULT
it 'parses correctly', ->
expect(getAddress {}).toEqual DEFAULT
expect getAddress { addr: '3.3.3.3:4444' }
.toEqual { host: '3.3.3.3', port: 4444 }
expect getAddress { x: '3.3.3.3:4444' }
.toEqual DEFAULT
expect getAddress { addr: '---' }
.toBeNull()
|
[
{
"context": "for lazy loading images\r\n\r\nCopyright (c) 2007-2013 Mika Tuupola\r\n\r\nLicensed under the MIT license:\r\nhttp://www.op",
"end": 106,
"score": 0.9998944401741028,
"start": 94,
"tag": "NAME",
"value": "Mika Tuupola"
}
] | dev/coffee/main/inclusion-jquery.lazy.coffee | ADHDboy/testwev | 27 | ###
$Lazy load
Lazy Load - jQuery plugin for lazy loading images
Copyright (c) 2007-2013 Mika Tuupola
Licensed under the MIT license:
http://www.opensource.org/licenses/mit-license.php
Project home:
http://www.appelsiini.net/projects/lazyload
Version: 1.9.3
###
(($, window, document, undefined_) ->
$window = $(window)
$.fn.lazyload = (options) ->
update = ->
counter = 0
elements.each ->
$this = $(this)
return if settings.skip_invisible and not $this.is(":visible")
if $.abovethetop(this, settings) or $.leftofbegin(this, settings)
# Nothing.
else if not $.belowthefold(this, settings) and not $.rightoffold(this, settings)
$this.trigger "appear"
# if we found an image we'll load, reset the counter
counter = 0
else
false if ++counter > settings.failure_limit
elements = this
$container = undefined
settings =
threshold: 0
failure_limit: 0
event: "scroll"
effect: "show"
container: window
data_attribute: "src"
skip_invisible: true
appear: null
load: null
placeholder: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsQAAA7EAZUrDhsAAAANSURBVBhXYzh8+PB/AAffA0nNPuCLAAAAAElFTkSuQmCC"
if options
# Maintain BC for a couple of versions.
if `undefined` isnt options.failurelimit
options.failure_limit = options.failurelimit
delete options.failurelimit
if `undefined` isnt options.effectspeed
options.effect_speed = options.effectspeed
delete options.effectspeed
$.extend settings, options
# Cache container as jQuery as object.
$container = (if (settings.container is `undefined` or settings.container is window) then $window else $(settings.container))
# Fire one scroll event per scroll. Not one scroll event per image.
if 0 is settings.event.indexOf("scroll")
$container.bind settings.event, ->
update()
@each ->
self = this
$self = $(self)
self.loaded = false
# If no src attribute given use data:uri.
$self.attr "src", settings.placeholder if $self.is("img") if $self.attr("src") is `undefined` or $self.attr("src") is false
# When appear is triggered load original image.
$self.one "appear", ->
unless @loaded
if settings.appear
elements_left = elements.length
settings.appear.call self, elements_left, settings
# Remove image from array so it is not looped next time.
$("<img />").bind("load", ->
original = $self.attr("data-" + settings.data_attribute)
$self.hide()
if $self.is("img")
$self.attr "src", original
else
$self.css "background-image", "url('" + original + "')"
$self[settings.effect] settings.effect_speed
self.loaded = true
temp = $.grep(elements, (element) ->
not element.loaded
)
elements = $(temp)
if settings.load
elements_left = elements.length
settings.load.call self, elements_left, settings
).attr "src", $self.attr("data-" + settings.data_attribute)
# When wanted event is triggered load original image
# by triggering appear.
if 0 isnt settings.event.indexOf("scroll")
$self.bind settings.event, ->
$self.trigger "appear" unless self.loaded
# Check if something appears when window is resized.
$window.bind "resize", ->
update()
# With IOS5 force loading images when navigating with back button.
# Non optimal workaround.
if (/(?:iphone|ipod|ipad).*os 5/g).test(navigator.appVersion)
$window.bind "pageshow", (event) ->
if event.originalEvent and event.originalEvent.persisted
elements.each ->
$(this).trigger "appear"
# Force initial check if images should appear.
$(document).ready ->
update()
this
# Convenience methods in jQuery namespace.
# Use as $.belowthefold(element, {threshold : 100, container : window})
$.belowthefold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = ((if window.innerHeight then window.innerHeight else $window.height())) + $window.scrollTop()
else
fold = $(settings.container).offset().top + $(settings.container).height()
fold <= $(element).offset().top - settings.threshold
$.rightoffold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.width() + $window.scrollLeft()
else
fold = $(settings.container).offset().left + $(settings.container).width()
fold <= $(element).offset().left - settings.threshold
$.abovethetop = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollTop()
else
fold = $(settings.container).offset().top
fold >= $(element).offset().top + settings.threshold + $(element).height()
$.leftofbegin = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollLeft()
else
fold = $(settings.container).offset().left
fold >= $(element).offset().left + settings.threshold + $(element).width()
$.inviewport = (element, settings) ->
not $.rightoffold(element, settings) and not $.leftofbegin(element, settings) and not $.belowthefold(element, settings) and not $.abovethetop(element, settings)
# Custom selectors for your convenience.
# Use as $("img:below-the-fold").something() or
# $("img").filter(":below-the-fold").something() which is faster
$.extend $.expr[":"],
"below-the-fold": (a) ->
$.belowthefold a,
threshold: 0
"above-the-top": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-screen": (a) ->
$.rightoffold a,
threshold: 0
"left-of-screen": (a) ->
not $.rightoffold(a,
threshold: 0
)
"in-viewport": (a) ->
$.inviewport a,
threshold: 0
# Maintain BC for couple of versions.
"above-the-fold": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-fold": (a) ->
$.rightoffold a,
threshold: 0
"left-of-fold": (a) ->
not $.rightoffold(a,
threshold: 0
)
) jQuery, window, document | 93669 | ###
$Lazy load
Lazy Load - jQuery plugin for lazy loading images
Copyright (c) 2007-2013 <NAME>
Licensed under the MIT license:
http://www.opensource.org/licenses/mit-license.php
Project home:
http://www.appelsiini.net/projects/lazyload
Version: 1.9.3
###
(($, window, document, undefined_) ->
$window = $(window)
$.fn.lazyload = (options) ->
update = ->
counter = 0
elements.each ->
$this = $(this)
return if settings.skip_invisible and not $this.is(":visible")
if $.abovethetop(this, settings) or $.leftofbegin(this, settings)
# Nothing.
else if not $.belowthefold(this, settings) and not $.rightoffold(this, settings)
$this.trigger "appear"
# if we found an image we'll load, reset the counter
counter = 0
else
false if ++counter > settings.failure_limit
elements = this
$container = undefined
settings =
threshold: 0
failure_limit: 0
event: "scroll"
effect: "show"
container: window
data_attribute: "src"
skip_invisible: true
appear: null
load: null
placeholder: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsQAAA7EAZUrDhsAAAANSURBVBhXYzh8+PB/AAffA0nNPuCLAAAAAElFTkSuQmCC"
if options
# Maintain BC for a couple of versions.
if `undefined` isnt options.failurelimit
options.failure_limit = options.failurelimit
delete options.failurelimit
if `undefined` isnt options.effectspeed
options.effect_speed = options.effectspeed
delete options.effectspeed
$.extend settings, options
# Cache container as jQuery as object.
$container = (if (settings.container is `undefined` or settings.container is window) then $window else $(settings.container))
# Fire one scroll event per scroll. Not one scroll event per image.
if 0 is settings.event.indexOf("scroll")
$container.bind settings.event, ->
update()
@each ->
self = this
$self = $(self)
self.loaded = false
# If no src attribute given use data:uri.
$self.attr "src", settings.placeholder if $self.is("img") if $self.attr("src") is `undefined` or $self.attr("src") is false
# When appear is triggered load original image.
$self.one "appear", ->
unless @loaded
if settings.appear
elements_left = elements.length
settings.appear.call self, elements_left, settings
# Remove image from array so it is not looped next time.
$("<img />").bind("load", ->
original = $self.attr("data-" + settings.data_attribute)
$self.hide()
if $self.is("img")
$self.attr "src", original
else
$self.css "background-image", "url('" + original + "')"
$self[settings.effect] settings.effect_speed
self.loaded = true
temp = $.grep(elements, (element) ->
not element.loaded
)
elements = $(temp)
if settings.load
elements_left = elements.length
settings.load.call self, elements_left, settings
).attr "src", $self.attr("data-" + settings.data_attribute)
# When wanted event is triggered load original image
# by triggering appear.
if 0 isnt settings.event.indexOf("scroll")
$self.bind settings.event, ->
$self.trigger "appear" unless self.loaded
# Check if something appears when window is resized.
$window.bind "resize", ->
update()
# With IOS5 force loading images when navigating with back button.
# Non optimal workaround.
if (/(?:iphone|ipod|ipad).*os 5/g).test(navigator.appVersion)
$window.bind "pageshow", (event) ->
if event.originalEvent and event.originalEvent.persisted
elements.each ->
$(this).trigger "appear"
# Force initial check if images should appear.
$(document).ready ->
update()
this
# Convenience methods in jQuery namespace.
# Use as $.belowthefold(element, {threshold : 100, container : window})
$.belowthefold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = ((if window.innerHeight then window.innerHeight else $window.height())) + $window.scrollTop()
else
fold = $(settings.container).offset().top + $(settings.container).height()
fold <= $(element).offset().top - settings.threshold
$.rightoffold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.width() + $window.scrollLeft()
else
fold = $(settings.container).offset().left + $(settings.container).width()
fold <= $(element).offset().left - settings.threshold
$.abovethetop = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollTop()
else
fold = $(settings.container).offset().top
fold >= $(element).offset().top + settings.threshold + $(element).height()
$.leftofbegin = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollLeft()
else
fold = $(settings.container).offset().left
fold >= $(element).offset().left + settings.threshold + $(element).width()
$.inviewport = (element, settings) ->
not $.rightoffold(element, settings) and not $.leftofbegin(element, settings) and not $.belowthefold(element, settings) and not $.abovethetop(element, settings)
# Custom selectors for your convenience.
# Use as $("img:below-the-fold").something() or
# $("img").filter(":below-the-fold").something() which is faster
$.extend $.expr[":"],
"below-the-fold": (a) ->
$.belowthefold a,
threshold: 0
"above-the-top": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-screen": (a) ->
$.rightoffold a,
threshold: 0
"left-of-screen": (a) ->
not $.rightoffold(a,
threshold: 0
)
"in-viewport": (a) ->
$.inviewport a,
threshold: 0
# Maintain BC for couple of versions.
"above-the-fold": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-fold": (a) ->
$.rightoffold a,
threshold: 0
"left-of-fold": (a) ->
not $.rightoffold(a,
threshold: 0
)
) jQuery, window, document | true | ###
$Lazy load
Lazy Load - jQuery plugin for lazy loading images
Copyright (c) 2007-2013 PI:NAME:<NAME>END_PI
Licensed under the MIT license:
http://www.opensource.org/licenses/mit-license.php
Project home:
http://www.appelsiini.net/projects/lazyload
Version: 1.9.3
###
(($, window, document, undefined_) ->
$window = $(window)
$.fn.lazyload = (options) ->
update = ->
counter = 0
elements.each ->
$this = $(this)
return if settings.skip_invisible and not $this.is(":visible")
if $.abovethetop(this, settings) or $.leftofbegin(this, settings)
# Nothing.
else if not $.belowthefold(this, settings) and not $.rightoffold(this, settings)
$this.trigger "appear"
# if we found an image we'll load, reset the counter
counter = 0
else
false if ++counter > settings.failure_limit
elements = this
$container = undefined
settings =
threshold: 0
failure_limit: 0
event: "scroll"
effect: "show"
container: window
data_attribute: "src"
skip_invisible: true
appear: null
load: null
placeholder: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsQAAA7EAZUrDhsAAAANSURBVBhXYzh8+PB/AAffA0nNPuCLAAAAAElFTkSuQmCC"
if options
# Maintain BC for a couple of versions.
if `undefined` isnt options.failurelimit
options.failure_limit = options.failurelimit
delete options.failurelimit
if `undefined` isnt options.effectspeed
options.effect_speed = options.effectspeed
delete options.effectspeed
$.extend settings, options
# Cache container as jQuery as object.
$container = (if (settings.container is `undefined` or settings.container is window) then $window else $(settings.container))
# Fire one scroll event per scroll. Not one scroll event per image.
if 0 is settings.event.indexOf("scroll")
$container.bind settings.event, ->
update()
@each ->
self = this
$self = $(self)
self.loaded = false
# If no src attribute given use data:uri.
$self.attr "src", settings.placeholder if $self.is("img") if $self.attr("src") is `undefined` or $self.attr("src") is false
# When appear is triggered load original image.
$self.one "appear", ->
unless @loaded
if settings.appear
elements_left = elements.length
settings.appear.call self, elements_left, settings
# Remove image from array so it is not looped next time.
$("<img />").bind("load", ->
original = $self.attr("data-" + settings.data_attribute)
$self.hide()
if $self.is("img")
$self.attr "src", original
else
$self.css "background-image", "url('" + original + "')"
$self[settings.effect] settings.effect_speed
self.loaded = true
temp = $.grep(elements, (element) ->
not element.loaded
)
elements = $(temp)
if settings.load
elements_left = elements.length
settings.load.call self, elements_left, settings
).attr "src", $self.attr("data-" + settings.data_attribute)
# When wanted event is triggered load original image
# by triggering appear.
if 0 isnt settings.event.indexOf("scroll")
$self.bind settings.event, ->
$self.trigger "appear" unless self.loaded
# Check if something appears when window is resized.
$window.bind "resize", ->
update()
# With IOS5 force loading images when navigating with back button.
# Non optimal workaround.
if (/(?:iphone|ipod|ipad).*os 5/g).test(navigator.appVersion)
$window.bind "pageshow", (event) ->
if event.originalEvent and event.originalEvent.persisted
elements.each ->
$(this).trigger "appear"
# Force initial check if images should appear.
$(document).ready ->
update()
this
# Convenience methods in jQuery namespace.
# Use as $.belowthefold(element, {threshold : 100, container : window})
$.belowthefold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = ((if window.innerHeight then window.innerHeight else $window.height())) + $window.scrollTop()
else
fold = $(settings.container).offset().top + $(settings.container).height()
fold <= $(element).offset().top - settings.threshold
$.rightoffold = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.width() + $window.scrollLeft()
else
fold = $(settings.container).offset().left + $(settings.container).width()
fold <= $(element).offset().left - settings.threshold
$.abovethetop = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollTop()
else
fold = $(settings.container).offset().top
fold >= $(element).offset().top + settings.threshold + $(element).height()
$.leftofbegin = (element, settings) ->
fold = undefined
if settings.container is `undefined` or settings.container is window
fold = $window.scrollLeft()
else
fold = $(settings.container).offset().left
fold >= $(element).offset().left + settings.threshold + $(element).width()
$.inviewport = (element, settings) ->
not $.rightoffold(element, settings) and not $.leftofbegin(element, settings) and not $.belowthefold(element, settings) and not $.abovethetop(element, settings)
# Custom selectors for your convenience.
# Use as $("img:below-the-fold").something() or
# $("img").filter(":below-the-fold").something() which is faster
$.extend $.expr[":"],
"below-the-fold": (a) ->
$.belowthefold a,
threshold: 0
"above-the-top": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-screen": (a) ->
$.rightoffold a,
threshold: 0
"left-of-screen": (a) ->
not $.rightoffold(a,
threshold: 0
)
"in-viewport": (a) ->
$.inviewport a,
threshold: 0
# Maintain BC for couple of versions.
"above-the-fold": (a) ->
not $.belowthefold(a,
threshold: 0
)
"right-of-fold": (a) ->
$.rightoffold a,
threshold: 0
"left-of-fold": (a) ->
not $.rightoffold(a,
threshold: 0
)
) jQuery, window, document |
[
{
"context": "e in conjunction with the scss mixin\n#\n# @author Olivier Bossel <olivier.bossel@gmail.com>\n# @created 20.01.16\n#",
"end": 184,
"score": 0.9998512864112854,
"start": 170,
"tag": "NAME",
"value": "Olivier Bossel"
},
{
"context": "with the scss mixin\n#\n# @author Olivier Bossel <olivier.bossel@gmail.com>\n# @created 20.01.16\n# @updated 20.01.16\n# @ver",
"end": 210,
"score": 0.9999327659606934,
"start": 186,
"tag": "EMAIL",
"value": "olivier.bossel@gmail.com"
}
] | node_modules/sugarcss/coffee/sugar-DOMNodeInserted.coffee | hagsey/nlpt2 | 0 | ###
# Sugar-domnodeinserted.js
#
# This little js file allow you to detect when an element has been inserted in the page in conjunction with the scss mixin
#
# @author Olivier Bossel <olivier.bossel@gmail.com>
# @created 20.01.16
# @updated 20.01.16
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarDOMNodeInserted =
# track if already inited
_inited : false
# enabled
enabled : true
###
Init
###
init : () ->
# update inited state
@_inited = true
# wait until the dom is loaded
if document.readyState == 'interactive' then @_init()
else document.addEventListener 'DOMContentLoaded', (e) => @_init()
###
Internal init
###
_init : ->
# do nothing if not enabled
return if not @enabled
# listen animations start
document.addEventListener("animationstart", @_onAnimationStart, false);
document.addEventListener("MSAnimationStart", @_onAnimationStart, false);
document.addEventListener("webkitAnimationStart", @_onAnimationStart, false);
###
On animation start
###
_onAnimationStart : (e) ->
if e.animationName == 's-DOMNodeInserted'
e.target.dispatchEvent(new CustomEvent('DOMNodeInserted', {
bubbles : true,
cancelable : true
}));
# init the filter
SugarDOMNodeInserted.init()
# return the Sugar object
SugarDOMNodeInserted | 178147 | ###
# Sugar-domnodeinserted.js
#
# This little js file allow you to detect when an element has been inserted in the page in conjunction with the scss mixin
#
# @author <NAME> <<EMAIL>>
# @created 20.01.16
# @updated 20.01.16
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarDOMNodeInserted =
# track if already inited
_inited : false
# enabled
enabled : true
###
Init
###
init : () ->
# update inited state
@_inited = true
# wait until the dom is loaded
if document.readyState == 'interactive' then @_init()
else document.addEventListener 'DOMContentLoaded', (e) => @_init()
###
Internal init
###
_init : ->
# do nothing if not enabled
return if not @enabled
# listen animations start
document.addEventListener("animationstart", @_onAnimationStart, false);
document.addEventListener("MSAnimationStart", @_onAnimationStart, false);
document.addEventListener("webkitAnimationStart", @_onAnimationStart, false);
###
On animation start
###
_onAnimationStart : (e) ->
if e.animationName == 's-DOMNodeInserted'
e.target.dispatchEvent(new CustomEvent('DOMNodeInserted', {
bubbles : true,
cancelable : true
}));
# init the filter
SugarDOMNodeInserted.init()
# return the Sugar object
SugarDOMNodeInserted | true | ###
# Sugar-domnodeinserted.js
#
# This little js file allow you to detect when an element has been inserted in the page in conjunction with the scss mixin
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @created 20.01.16
# @updated 20.01.16
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarDOMNodeInserted =
# track if already inited
_inited : false
# enabled
enabled : true
###
Init
###
init : () ->
# update inited state
@_inited = true
# wait until the dom is loaded
if document.readyState == 'interactive' then @_init()
else document.addEventListener 'DOMContentLoaded', (e) => @_init()
###
Internal init
###
_init : ->
# do nothing if not enabled
return if not @enabled
# listen animations start
document.addEventListener("animationstart", @_onAnimationStart, false);
document.addEventListener("MSAnimationStart", @_onAnimationStart, false);
document.addEventListener("webkitAnimationStart", @_onAnimationStart, false);
###
On animation start
###
_onAnimationStart : (e) ->
if e.animationName == 's-DOMNodeInserted'
e.target.dispatchEvent(new CustomEvent('DOMNodeInserted', {
bubbles : true,
cancelable : true
}));
# init the filter
SugarDOMNodeInserted.init()
# return the Sugar object
SugarDOMNodeInserted |
[
{
"context": "log error\n\nif Meteor.isServer\n\tS3.config =\n\t\tkey:\"yourkey\"\n\t\tsecret:\"yoursecret\"\n\t\tbucket:\"yourbucket\"\n\t\t# ",
"end": 526,
"score": 0.5707787275314331,
"start": 519,
"tag": "KEY",
"value": "yourkey"
},
{
"context": "r.isServer\n\tS3.config =\n\t\tkey:\"yourkey\"\n\t\tsecret:\"yoursecret\"\n\t\tbucket:\"yourbucket\"\n\t\t# region:\"us-standard\" #",
"end": 548,
"score": 0.9889526963233948,
"start": 538,
"tag": "KEY",
"value": "yoursecret"
}
] | example/basic/basic.coffee | jeanfredrik/meteor-s3 | 219 | if Meteor.isClient
Template.basic.helpers
"files": -> S3.collection.find()
Template.basic.events
"click button.upload": (event) ->
S3.upload
files:$("input.file_bag")[0].files
(error,result) ->
if error
console.log "Unable to upload"
else
console.log result
"click button.delete": (event) ->
S3.delete @relative_url, (error,res) =>
if not error
console.log res
S3.collection.remove @_id
else
console.log error
if Meteor.isServer
S3.config =
key:"yourkey"
secret:"yoursecret"
bucket:"yourbucket"
# region:"us-standard" #default
| 200990 | if Meteor.isClient
Template.basic.helpers
"files": -> S3.collection.find()
Template.basic.events
"click button.upload": (event) ->
S3.upload
files:$("input.file_bag")[0].files
(error,result) ->
if error
console.log "Unable to upload"
else
console.log result
"click button.delete": (event) ->
S3.delete @relative_url, (error,res) =>
if not error
console.log res
S3.collection.remove @_id
else
console.log error
if Meteor.isServer
S3.config =
key:"<KEY>"
secret:"<KEY>"
bucket:"yourbucket"
# region:"us-standard" #default
| true | if Meteor.isClient
Template.basic.helpers
"files": -> S3.collection.find()
Template.basic.events
"click button.upload": (event) ->
S3.upload
files:$("input.file_bag")[0].files
(error,result) ->
if error
console.log "Unable to upload"
else
console.log result
"click button.delete": (event) ->
S3.delete @relative_url, (error,res) =>
if not error
console.log res
S3.collection.remove @_id
else
console.log error
if Meteor.isServer
S3.config =
key:"PI:KEY:<KEY>END_PI"
secret:"PI:KEY:<KEY>END_PI"
bucket:"yourbucket"
# region:"us-standard" #default
|
[
{
"context": "analytics.graphs\n\n margin = 20\n keyHeight = 20\n xAxisHeight = 20\n yAxisWidth = 40\n cont",
"end": 10178,
"score": 0.6249411106109619,
"start": 10176,
"tag": "KEY",
"value": "20"
}
] | app/views/editor/campaign/CampaignLevelView.coffee | cihatislamdede/codecombat | 4,858 | require('app/styles/editor/campaign/campaign-level-view.sass')
CocoView = require 'views/core/CocoView'
Level = require 'models/Level'
LevelSession = require 'models/LevelSession'
ModelModal = require 'views/modal/ModelModal'
User = require 'models/User'
utils = require 'core/utils'
module.exports = class CampaignLevelView extends CocoView
id: 'campaign-level-view'
template: require 'templates/editor/campaign/campaign-level-view'
events:
'change .line-graph-checkbox': 'updateGraphCheckbox'
'click .close': 'onClickClose'
'click #reload-button': 'onClickReloadButton'
'dblclick .recent-session': 'onDblClickRecentSession'
'mouseenter .graph-point': 'onMouseEnterPoint'
'mouseleave .graph-point': 'onMouseLeavePoint'
'click .replay-button': 'onClickReplay'
'click #recent-button': 'onClickRecentButton'
limit: 100
constructor: (options, @level) ->
super(options)
@fullLevel = new Level _id: @level.id
@fullLevel.fetch()
@listenToOnce @fullLevel, 'sync', => @render?()
@levelSlug = @level.get('slug')
@getAnalytics()
getRenderData: ->
c = super()
c.level = if @fullLevel.loaded then @fullLevel else @level
c.analytics = @analytics
c
afterRender: ->
super()
$("#input-startday").datepicker dateFormat: "yy-mm-dd"
$("#input-endday").datepicker dateFormat: "yy-mm-dd"
# TODO: Why does this have to be called from afterRender() instead of getRenderData()?
@updateAnalyticsGraphs()
updateGraphCheckbox: (e) ->
lineID = $(e.target).data('lineid')
checked = $(e.target).prop('checked')
for graph in @analytics.graphs
for line in graph.lines
if line.lineID is lineID
line.enabled = checked
return @render()
onClickClose: ->
@$el.addClass('hidden')
@trigger 'hidden'
onClickReloadButton: () =>
startDay = $('#input-startday').val()
endDay = $('#input-endday').val()
@getAnalytics startDay, endDay
onDblClickRecentSession: (e) ->
# Admin view of players' code
return unless me.isAdmin()
row = $(e.target).parent()
player = new User _id: row.data 'player-id'
session = new LevelSession _id: row.data 'session-id'
@openModalView new ModelModal models: [session, player]
onMouseEnterPoint: (e) ->
pointID = $(e.target).data('pointid')
container = @$el.find(".graph-point-info-container[data-pointid=#{pointID}]").show()
margin = 20
width = container.outerWidth()
height = container.outerHeight()
container.css('left', e.offsetX - width / 2)
container.css('top', e.offsetY - height - margin)
onMouseLeavePoint: (e) ->
pointID = $(e.target).data('pointid')
@$el.find(".graph-point-info-container[data-pointid=#{pointID}]").hide()
onClickReplay: (e) ->
sessionID = $(e.target).closest('tr').data 'session-id'
session = _.find @analytics.recentSessions.data, _id: sessionID
url = "/play/level/#{@level.get('slug')}?session=#{sessionID}&observing=true"
if session.isForClassroom
url += '&course=560f1a9f22961295f9427742'
window.open url, '_blank'
onClickRecentButton: (event) ->
event.preventDefault()
@limit = @$('#input-session-num').val()
@analytics.recentSessions = {data: [], loading: true}
@render() # Hide old session data while we fetch new sessions
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
makeFinishDataFetch: (data) =>
return =>
return if @destroyed
@updateAnalyticsGraphData()
data.loading = false
@render()
updateAnalyticsGraphData: ->
# console.log 'updateAnalyticsGraphData'
# Build graphs based on available @analytics data
# Currently only one graph
@analytics.graphs = [graphID: 'level-completions', lines: []]
# TODO: Where should this metadata live?
# TODO: lineIDs assumed to be unique across graphs
completionLineID = 'level-completions'
playtimeLineID = 'level-playtime'
helpsLineID = 'helps-clicked'
videosLineID = 'help-videos'
lineMetadata = {}
lineMetadata[completionLineID] =
description: 'Level Completion (%)'
color: 'red'
lineMetadata[playtimeLineID] =
description: 'Average Playtime (s)'
color: 'green'
lineMetadata[helpsLineID] =
description: 'Help click rate (%)'
color: 'blue'
lineMetadata[videosLineID] =
description: 'Help video rate (%)'
color: 'purple'
# Use this days aggregate to fill in missing days from the analytics data
days = {}
days["#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"] = true for day in @analytics.levelCompletions.data if @analytics?.levelCompletions?.data?
days[day.created] = true for day in @analytics.levelPlaytimes.data if @analytics?.levelPlaytimes?.data?
days["#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"] = true for day in @analytics.levelHelps.data if @analytics?.levelHelps?.data?
days = Object.keys(days).sort (a, b) -> if a < b then -1 else 1
if days.length > 0
currentIndex = 0
currentDay = days[currentIndex]
currentDate = new Date(currentDay + "T00:00:00.000Z")
lastDay = days[days.length - 1]
while currentDay isnt lastDay
days.splice currentIndex, 0, currentDay if days[currentIndex] isnt currentDay
currentIndex++
currentDate.setUTCDate(currentDate.getUTCDate() + 1)
currentDay = currentDate.toISOString().substr(0, 10)
# Update level completion graph data
dayStartedMap = {}
if @analytics?.levelCompletions?.data?.length > 0
# Build line data
levelPoints = []
for day, i in @analytics.levelCompletions.data
dayStartedMap[day.created] = day.started
rate = parseFloat(day.rate)
levelPoints.push
x: i
y: rate
started: day.started
day: "#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"
pointID: "#{completionLineID}#{i}"
values: ["Started: #{day.started}", "Finished: #{day.finished}", "Completion rate: #{rate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if levelPoints.length <= i or levelPoints[i].day isnt day
levelPoints.splice i, 0,
y: 0.0
day: day
values: []
levelPoints[i].x = i
levelPoints[i].pointID = "#{completionLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: completionLineID
enabled: true
points: levelPoints
description: lineMetadata[completionLineID].description
lineColor: lineMetadata[completionLineID].color
min: 0
max: 100.0
# Update average playtime graph data
if @analytics?.levelPlaytimes?.data?.length > 0
# Build line data
playtimePoints = []
for day, i in @analytics.levelPlaytimes.data
avg = parseFloat(day.average)
playtimePoints.push
x: i
y: avg
day: day.created
pointID: "#{playtimeLineID}#{i}"
values: ["Average playtime: #{avg.toFixed(2)}s"]
# Ensure points for each day
for day, i in days
if playtimePoints.length <= i or playtimePoints[i].day isnt day
playtimePoints.splice i, 0,
y: 0.0
day: day
values: []
playtimePoints[i].x = i
playtimePoints[i].pointID = "#{playtimeLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: playtimeLineID
enabled: true
points: playtimePoints
description: lineMetadata[playtimeLineID].description
lineColor: lineMetadata[playtimeLineID].color
min: 0
max: d3.max(playtimePoints, (d) -> d.y)
# Update help graph data
if @analytics?.levelHelps?.data?.length > 0
# Build line data
helpPoints = []
videoPoints = []
for day, i in @analytics.levelHelps.data
helpCount = day.alertHelps + day.paletteHelps
started = dayStartedMap[day.day] ? 0
clickRate = if started > 0 then helpCount / started * 100 else 0
videoRate = day.videoStarts / helpCount * 100
helpPoints.push
x: i
y: clickRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{helpsLineID}#{i}"
values: ["Helps clicked: #{helpCount}", "Helps click clickRate: #{clickRate.toFixed(2)}%"]
videoPoints.push
x: i
y: videoRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{videosLineID}#{i}"
values: ["Help videos started: #{day.videoStarts}", "Help videos start rate: #{videoRate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if helpPoints.length <= i or helpPoints[i].day isnt day
helpPoints.splice i, 0,
y: 0.0
day: day
values: []
helpPoints[i].x = i
helpPoints[i].pointID = "#{helpsLineID}#{i}"
if videoPoints.length <= i or videoPoints[i].day isnt day
videoPoints.splice i, 0,
y: 0.0
day: day
values: []
videoPoints[i].x = i
videoPoints[i].pointID = "#{videosLineID}#{i}"
if d3.max(helpPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: helpsLineID
enabled: true
points: helpPoints
description: lineMetadata[helpsLineID].description
lineColor: lineMetadata[helpsLineID].color
min: 0
max: 100.0
if d3.max(videoPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: videosLineID
enabled: true
points: videoPoints
description: lineMetadata[videosLineID].description
lineColor: lineMetadata[videosLineID].color
min: 0
max: 100.0
updateAnalyticsGraphs: ->
# Build d3 graphs
return unless @analytics?.graphs?.length > 0
containerSelector = '.line-graph-container'
# console.log 'updateAnalyticsGraphs', containerSelector, @analytics.graphs
margin = 20
keyHeight = 20
xAxisHeight = 20
yAxisWidth = 40
containerWidth = $(containerSelector).width()
containerHeight = $(containerSelector).height()
for graph in @analytics.graphs
graphLineCount = _.reduce graph.lines, ((sum, item) -> if item.enabled then sum + 1 else sum), 0
svg = d3.select(containerSelector).append("svg")
.attr("width", containerWidth)
.attr("height", containerHeight)
width = containerWidth - margin * 2 - yAxisWidth * graphLineCount
height = containerHeight - margin * 2 - xAxisHeight - keyHeight * graphLineCount
currentLine = 0
for line in graph.lines
continue unless line.enabled
xRange = d3.scale.linear().range([0, width]).domain([d3.min(line.points, (d) -> d.x), d3.max(line.points, (d) -> d.x)])
yRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
# x-Axis and guideline once
if currentLine is 0
startDay = new Date(line.points[0].day)
endDay = new Date(line.points[line.points.length - 1].day)
xAxisRange = d3.time.scale()
.domain([startDay, endDay])
.range([0, width])
xAxis = d3.svg.axis()
.scale(xAxisRange)
svg.append("g")
.attr("class", "x axis")
.call(xAxis)
.selectAll("text")
.attr("dy", ".35em")
.attr("transform", "translate(" + (margin + yAxisWidth * (graphLineCount - 1)) + "," + (height + margin) + ")")
.style("text-anchor", "start")
# Horizontal guidelines
svg.selectAll(".line")
.data([10, 30, 50, 70, 90])
.enter()
.append("line")
.attr("x1", margin + yAxisWidth * graphLineCount)
.attr("y1", (d) -> margin + yRange(d))
.attr("x2", margin + yAxisWidth * graphLineCount + width)
.attr("y2", (d) -> margin + yRange(d))
.attr("stroke", line.lineColor)
.style("opacity", "0.5")
# y-Axis
yAxisRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
yAxis = d3.svg.axis()
.scale(yRange)
.orient("left")
svg.append("g")
.attr("class", "y axis")
.attr("transform", "translate(" + (margin + yAxisWidth * currentLine) + "," + margin + ")")
.style("color", line.lineColor)
.call(yAxis)
.selectAll("text")
.attr("y", 0)
.attr("x", 0)
.attr("fill", line.lineColor)
.style("text-anchor", "start")
# Key
svg.append("line")
.attr("x1", margin)
.attr("y1", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("x2", margin + 40)
.attr("y2", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("stroke", line.lineColor)
.attr("class", "key-line")
svg.append("text")
.attr("x", margin + 40 + 10)
.attr("y", margin + height + xAxisHeight + keyHeight * currentLine + (keyHeight + 10) / 2)
.attr("fill", line.lineColor)
.attr("class", "key-text")
.text(line.description)
# Path and points
svg.selectAll(".circle")
.data(line.points)
.enter()
.append("circle")
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.attr("cx", (d) -> xRange(d.x))
.attr("cy", (d) -> yRange(d.y))
.attr("r", (d) -> if d.started then Math.max(3, Math.min(10, Math.log(parseInt(d.started)))) + 2 else 6)
.attr("fill", line.lineColor)
.attr("stroke-width", 1)
.attr("class", "graph-point")
.attr("data-pointid", (d) -> "#{line.lineID}#{d.x}")
d3line = d3.svg.line()
.x((d) -> xRange(d.x))
.y((d) -> yRange(d.y))
.interpolate("linear")
svg.append("path")
.attr("d", d3line(line.points))
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.style("stroke-width", 1)
.style("stroke", line.lineColor)
.style("fill", "none")
currentLine++
getAnalytics: (startDay, endDay) =>
# Analytics APIs use 2 different day formats
if startDay?
startDayDashed = startDay
startDay = startDay.replace(/-/g, '')
else
startDay = utils.getUTCDay -14
startDayDashed = "#{startDay[0..3]}-#{startDay[4..5]}-#{startDay[6..7]}"
if endDay?
endDayDashed = endDay
endDay = endDay.replace(/-/g, '')
else
endDay = utils.getUTCDay -1
endDayDashed = "#{endDay[0..3]}-#{endDay[4..5]}-#{endDay[6..7]}"
# Initialize
@analytics =
startDay: startDayDashed
endDay: endDayDashed
commonProblems: {data: [], loading: true}
levelCompletions: {data: [], loading: true}
levelHelps: {data: [], loading: true}
levelPlaytimes: {data: [], loading: true}
recentSessions: {data: [], loading: true}
graphs: []
@render() # Hide old analytics data while we fetch new data
@getCommonLevelProblems startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.commonProblems)
@getLevelCompletions startDay, endDay, @makeFinishDataFetch(@analytics.levelCompletions)
@getLevelHelps startDay, endDay, @makeFinishDataFetch(@analytics.levelHelps)
@getLevelPlaytimes startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.levelPlaytimes)
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
getCommonLevelProblems: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getCommonLevelProblems', data
@analytics.commonProblems.data = data
doneCallback()
request = @supermodel.addRequestResource 'common_problems', {
url: '/db/user.code.problem/-/common_problems'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelCompletions: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelCompletions', data
data.sort (a, b) -> if a.created < b.created then -1 else 1
mapFn = (item) ->
item.rate = if item.started > 0 then item.finished / item.started * 100 else 0
item
@analytics.levelCompletions.data = _.map data, mapFn, @
doneCallback()
request = @supermodel.addRequestResource 'level_completions', {
url: '/db/analytics_perday/-/level_completions'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelHelps: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelHelps', data
@analytics.levelHelps.data = data.sort (a, b) -> if a.day < b.day then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'level_helps', {
url: '/db/analytics_perday/-/level_helps'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getLevelPlaytimes: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelPlaytimes', data
@analytics.levelPlaytimes.data = data.sort (a, b) -> if a.created < b.created then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'playtime_averages', {
url: '/db/level/-/playtime_averages'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getRecentSessions: (doneCallback) ->
# limit = 100
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getRecentSessions', data
@analytics.recentSessions.data = data
doneCallback()
request = @supermodel.addRequestResource 'level_sessions_recent', {
url: "/db/level.session/-/recent"
data: {slug: @levelSlug, limit: @limit}
method: 'POST'
success: success
}, 0
request.load()
| 14661 | require('app/styles/editor/campaign/campaign-level-view.sass')
CocoView = require 'views/core/CocoView'
Level = require 'models/Level'
LevelSession = require 'models/LevelSession'
ModelModal = require 'views/modal/ModelModal'
User = require 'models/User'
utils = require 'core/utils'
module.exports = class CampaignLevelView extends CocoView
id: 'campaign-level-view'
template: require 'templates/editor/campaign/campaign-level-view'
events:
'change .line-graph-checkbox': 'updateGraphCheckbox'
'click .close': 'onClickClose'
'click #reload-button': 'onClickReloadButton'
'dblclick .recent-session': 'onDblClickRecentSession'
'mouseenter .graph-point': 'onMouseEnterPoint'
'mouseleave .graph-point': 'onMouseLeavePoint'
'click .replay-button': 'onClickReplay'
'click #recent-button': 'onClickRecentButton'
limit: 100
constructor: (options, @level) ->
super(options)
@fullLevel = new Level _id: @level.id
@fullLevel.fetch()
@listenToOnce @fullLevel, 'sync', => @render?()
@levelSlug = @level.get('slug')
@getAnalytics()
getRenderData: ->
c = super()
c.level = if @fullLevel.loaded then @fullLevel else @level
c.analytics = @analytics
c
afterRender: ->
super()
$("#input-startday").datepicker dateFormat: "yy-mm-dd"
$("#input-endday").datepicker dateFormat: "yy-mm-dd"
# TODO: Why does this have to be called from afterRender() instead of getRenderData()?
@updateAnalyticsGraphs()
updateGraphCheckbox: (e) ->
lineID = $(e.target).data('lineid')
checked = $(e.target).prop('checked')
for graph in @analytics.graphs
for line in graph.lines
if line.lineID is lineID
line.enabled = checked
return @render()
onClickClose: ->
@$el.addClass('hidden')
@trigger 'hidden'
onClickReloadButton: () =>
startDay = $('#input-startday').val()
endDay = $('#input-endday').val()
@getAnalytics startDay, endDay
onDblClickRecentSession: (e) ->
# Admin view of players' code
return unless me.isAdmin()
row = $(e.target).parent()
player = new User _id: row.data 'player-id'
session = new LevelSession _id: row.data 'session-id'
@openModalView new ModelModal models: [session, player]
onMouseEnterPoint: (e) ->
pointID = $(e.target).data('pointid')
container = @$el.find(".graph-point-info-container[data-pointid=#{pointID}]").show()
margin = 20
width = container.outerWidth()
height = container.outerHeight()
container.css('left', e.offsetX - width / 2)
container.css('top', e.offsetY - height - margin)
onMouseLeavePoint: (e) ->
pointID = $(e.target).data('pointid')
@$el.find(".graph-point-info-container[data-pointid=#{pointID}]").hide()
onClickReplay: (e) ->
sessionID = $(e.target).closest('tr').data 'session-id'
session = _.find @analytics.recentSessions.data, _id: sessionID
url = "/play/level/#{@level.get('slug')}?session=#{sessionID}&observing=true"
if session.isForClassroom
url += '&course=560f1a9f22961295f9427742'
window.open url, '_blank'
onClickRecentButton: (event) ->
event.preventDefault()
@limit = @$('#input-session-num').val()
@analytics.recentSessions = {data: [], loading: true}
@render() # Hide old session data while we fetch new sessions
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
makeFinishDataFetch: (data) =>
return =>
return if @destroyed
@updateAnalyticsGraphData()
data.loading = false
@render()
updateAnalyticsGraphData: ->
# console.log 'updateAnalyticsGraphData'
# Build graphs based on available @analytics data
# Currently only one graph
@analytics.graphs = [graphID: 'level-completions', lines: []]
# TODO: Where should this metadata live?
# TODO: lineIDs assumed to be unique across graphs
completionLineID = 'level-completions'
playtimeLineID = 'level-playtime'
helpsLineID = 'helps-clicked'
videosLineID = 'help-videos'
lineMetadata = {}
lineMetadata[completionLineID] =
description: 'Level Completion (%)'
color: 'red'
lineMetadata[playtimeLineID] =
description: 'Average Playtime (s)'
color: 'green'
lineMetadata[helpsLineID] =
description: 'Help click rate (%)'
color: 'blue'
lineMetadata[videosLineID] =
description: 'Help video rate (%)'
color: 'purple'
# Use this days aggregate to fill in missing days from the analytics data
days = {}
days["#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"] = true for day in @analytics.levelCompletions.data if @analytics?.levelCompletions?.data?
days[day.created] = true for day in @analytics.levelPlaytimes.data if @analytics?.levelPlaytimes?.data?
days["#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"] = true for day in @analytics.levelHelps.data if @analytics?.levelHelps?.data?
days = Object.keys(days).sort (a, b) -> if a < b then -1 else 1
if days.length > 0
currentIndex = 0
currentDay = days[currentIndex]
currentDate = new Date(currentDay + "T00:00:00.000Z")
lastDay = days[days.length - 1]
while currentDay isnt lastDay
days.splice currentIndex, 0, currentDay if days[currentIndex] isnt currentDay
currentIndex++
currentDate.setUTCDate(currentDate.getUTCDate() + 1)
currentDay = currentDate.toISOString().substr(0, 10)
# Update level completion graph data
dayStartedMap = {}
if @analytics?.levelCompletions?.data?.length > 0
# Build line data
levelPoints = []
for day, i in @analytics.levelCompletions.data
dayStartedMap[day.created] = day.started
rate = parseFloat(day.rate)
levelPoints.push
x: i
y: rate
started: day.started
day: "#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"
pointID: "#{completionLineID}#{i}"
values: ["Started: #{day.started}", "Finished: #{day.finished}", "Completion rate: #{rate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if levelPoints.length <= i or levelPoints[i].day isnt day
levelPoints.splice i, 0,
y: 0.0
day: day
values: []
levelPoints[i].x = i
levelPoints[i].pointID = "#{completionLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: completionLineID
enabled: true
points: levelPoints
description: lineMetadata[completionLineID].description
lineColor: lineMetadata[completionLineID].color
min: 0
max: 100.0
# Update average playtime graph data
if @analytics?.levelPlaytimes?.data?.length > 0
# Build line data
playtimePoints = []
for day, i in @analytics.levelPlaytimes.data
avg = parseFloat(day.average)
playtimePoints.push
x: i
y: avg
day: day.created
pointID: "#{playtimeLineID}#{i}"
values: ["Average playtime: #{avg.toFixed(2)}s"]
# Ensure points for each day
for day, i in days
if playtimePoints.length <= i or playtimePoints[i].day isnt day
playtimePoints.splice i, 0,
y: 0.0
day: day
values: []
playtimePoints[i].x = i
playtimePoints[i].pointID = "#{playtimeLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: playtimeLineID
enabled: true
points: playtimePoints
description: lineMetadata[playtimeLineID].description
lineColor: lineMetadata[playtimeLineID].color
min: 0
max: d3.max(playtimePoints, (d) -> d.y)
# Update help graph data
if @analytics?.levelHelps?.data?.length > 0
# Build line data
helpPoints = []
videoPoints = []
for day, i in @analytics.levelHelps.data
helpCount = day.alertHelps + day.paletteHelps
started = dayStartedMap[day.day] ? 0
clickRate = if started > 0 then helpCount / started * 100 else 0
videoRate = day.videoStarts / helpCount * 100
helpPoints.push
x: i
y: clickRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{helpsLineID}#{i}"
values: ["Helps clicked: #{helpCount}", "Helps click clickRate: #{clickRate.toFixed(2)}%"]
videoPoints.push
x: i
y: videoRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{videosLineID}#{i}"
values: ["Help videos started: #{day.videoStarts}", "Help videos start rate: #{videoRate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if helpPoints.length <= i or helpPoints[i].day isnt day
helpPoints.splice i, 0,
y: 0.0
day: day
values: []
helpPoints[i].x = i
helpPoints[i].pointID = "#{helpsLineID}#{i}"
if videoPoints.length <= i or videoPoints[i].day isnt day
videoPoints.splice i, 0,
y: 0.0
day: day
values: []
videoPoints[i].x = i
videoPoints[i].pointID = "#{videosLineID}#{i}"
if d3.max(helpPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: helpsLineID
enabled: true
points: helpPoints
description: lineMetadata[helpsLineID].description
lineColor: lineMetadata[helpsLineID].color
min: 0
max: 100.0
if d3.max(videoPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: videosLineID
enabled: true
points: videoPoints
description: lineMetadata[videosLineID].description
lineColor: lineMetadata[videosLineID].color
min: 0
max: 100.0
updateAnalyticsGraphs: ->
# Build d3 graphs
return unless @analytics?.graphs?.length > 0
containerSelector = '.line-graph-container'
# console.log 'updateAnalyticsGraphs', containerSelector, @analytics.graphs
margin = 20
keyHeight = <KEY>
xAxisHeight = 20
yAxisWidth = 40
containerWidth = $(containerSelector).width()
containerHeight = $(containerSelector).height()
for graph in @analytics.graphs
graphLineCount = _.reduce graph.lines, ((sum, item) -> if item.enabled then sum + 1 else sum), 0
svg = d3.select(containerSelector).append("svg")
.attr("width", containerWidth)
.attr("height", containerHeight)
width = containerWidth - margin * 2 - yAxisWidth * graphLineCount
height = containerHeight - margin * 2 - xAxisHeight - keyHeight * graphLineCount
currentLine = 0
for line in graph.lines
continue unless line.enabled
xRange = d3.scale.linear().range([0, width]).domain([d3.min(line.points, (d) -> d.x), d3.max(line.points, (d) -> d.x)])
yRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
# x-Axis and guideline once
if currentLine is 0
startDay = new Date(line.points[0].day)
endDay = new Date(line.points[line.points.length - 1].day)
xAxisRange = d3.time.scale()
.domain([startDay, endDay])
.range([0, width])
xAxis = d3.svg.axis()
.scale(xAxisRange)
svg.append("g")
.attr("class", "x axis")
.call(xAxis)
.selectAll("text")
.attr("dy", ".35em")
.attr("transform", "translate(" + (margin + yAxisWidth * (graphLineCount - 1)) + "," + (height + margin) + ")")
.style("text-anchor", "start")
# Horizontal guidelines
svg.selectAll(".line")
.data([10, 30, 50, 70, 90])
.enter()
.append("line")
.attr("x1", margin + yAxisWidth * graphLineCount)
.attr("y1", (d) -> margin + yRange(d))
.attr("x2", margin + yAxisWidth * graphLineCount + width)
.attr("y2", (d) -> margin + yRange(d))
.attr("stroke", line.lineColor)
.style("opacity", "0.5")
# y-Axis
yAxisRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
yAxis = d3.svg.axis()
.scale(yRange)
.orient("left")
svg.append("g")
.attr("class", "y axis")
.attr("transform", "translate(" + (margin + yAxisWidth * currentLine) + "," + margin + ")")
.style("color", line.lineColor)
.call(yAxis)
.selectAll("text")
.attr("y", 0)
.attr("x", 0)
.attr("fill", line.lineColor)
.style("text-anchor", "start")
# Key
svg.append("line")
.attr("x1", margin)
.attr("y1", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("x2", margin + 40)
.attr("y2", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("stroke", line.lineColor)
.attr("class", "key-line")
svg.append("text")
.attr("x", margin + 40 + 10)
.attr("y", margin + height + xAxisHeight + keyHeight * currentLine + (keyHeight + 10) / 2)
.attr("fill", line.lineColor)
.attr("class", "key-text")
.text(line.description)
# Path and points
svg.selectAll(".circle")
.data(line.points)
.enter()
.append("circle")
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.attr("cx", (d) -> xRange(d.x))
.attr("cy", (d) -> yRange(d.y))
.attr("r", (d) -> if d.started then Math.max(3, Math.min(10, Math.log(parseInt(d.started)))) + 2 else 6)
.attr("fill", line.lineColor)
.attr("stroke-width", 1)
.attr("class", "graph-point")
.attr("data-pointid", (d) -> "#{line.lineID}#{d.x}")
d3line = d3.svg.line()
.x((d) -> xRange(d.x))
.y((d) -> yRange(d.y))
.interpolate("linear")
svg.append("path")
.attr("d", d3line(line.points))
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.style("stroke-width", 1)
.style("stroke", line.lineColor)
.style("fill", "none")
currentLine++
getAnalytics: (startDay, endDay) =>
# Analytics APIs use 2 different day formats
if startDay?
startDayDashed = startDay
startDay = startDay.replace(/-/g, '')
else
startDay = utils.getUTCDay -14
startDayDashed = "#{startDay[0..3]}-#{startDay[4..5]}-#{startDay[6..7]}"
if endDay?
endDayDashed = endDay
endDay = endDay.replace(/-/g, '')
else
endDay = utils.getUTCDay -1
endDayDashed = "#{endDay[0..3]}-#{endDay[4..5]}-#{endDay[6..7]}"
# Initialize
@analytics =
startDay: startDayDashed
endDay: endDayDashed
commonProblems: {data: [], loading: true}
levelCompletions: {data: [], loading: true}
levelHelps: {data: [], loading: true}
levelPlaytimes: {data: [], loading: true}
recentSessions: {data: [], loading: true}
graphs: []
@render() # Hide old analytics data while we fetch new data
@getCommonLevelProblems startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.commonProblems)
@getLevelCompletions startDay, endDay, @makeFinishDataFetch(@analytics.levelCompletions)
@getLevelHelps startDay, endDay, @makeFinishDataFetch(@analytics.levelHelps)
@getLevelPlaytimes startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.levelPlaytimes)
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
getCommonLevelProblems: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getCommonLevelProblems', data
@analytics.commonProblems.data = data
doneCallback()
request = @supermodel.addRequestResource 'common_problems', {
url: '/db/user.code.problem/-/common_problems'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelCompletions: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelCompletions', data
data.sort (a, b) -> if a.created < b.created then -1 else 1
mapFn = (item) ->
item.rate = if item.started > 0 then item.finished / item.started * 100 else 0
item
@analytics.levelCompletions.data = _.map data, mapFn, @
doneCallback()
request = @supermodel.addRequestResource 'level_completions', {
url: '/db/analytics_perday/-/level_completions'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelHelps: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelHelps', data
@analytics.levelHelps.data = data.sort (a, b) -> if a.day < b.day then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'level_helps', {
url: '/db/analytics_perday/-/level_helps'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getLevelPlaytimes: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelPlaytimes', data
@analytics.levelPlaytimes.data = data.sort (a, b) -> if a.created < b.created then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'playtime_averages', {
url: '/db/level/-/playtime_averages'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getRecentSessions: (doneCallback) ->
# limit = 100
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getRecentSessions', data
@analytics.recentSessions.data = data
doneCallback()
request = @supermodel.addRequestResource 'level_sessions_recent', {
url: "/db/level.session/-/recent"
data: {slug: @levelSlug, limit: @limit}
method: 'POST'
success: success
}, 0
request.load()
| true | require('app/styles/editor/campaign/campaign-level-view.sass')
CocoView = require 'views/core/CocoView'
Level = require 'models/Level'
LevelSession = require 'models/LevelSession'
ModelModal = require 'views/modal/ModelModal'
User = require 'models/User'
utils = require 'core/utils'
module.exports = class CampaignLevelView extends CocoView
id: 'campaign-level-view'
template: require 'templates/editor/campaign/campaign-level-view'
events:
'change .line-graph-checkbox': 'updateGraphCheckbox'
'click .close': 'onClickClose'
'click #reload-button': 'onClickReloadButton'
'dblclick .recent-session': 'onDblClickRecentSession'
'mouseenter .graph-point': 'onMouseEnterPoint'
'mouseleave .graph-point': 'onMouseLeavePoint'
'click .replay-button': 'onClickReplay'
'click #recent-button': 'onClickRecentButton'
limit: 100
constructor: (options, @level) ->
super(options)
@fullLevel = new Level _id: @level.id
@fullLevel.fetch()
@listenToOnce @fullLevel, 'sync', => @render?()
@levelSlug = @level.get('slug')
@getAnalytics()
getRenderData: ->
c = super()
c.level = if @fullLevel.loaded then @fullLevel else @level
c.analytics = @analytics
c
afterRender: ->
super()
$("#input-startday").datepicker dateFormat: "yy-mm-dd"
$("#input-endday").datepicker dateFormat: "yy-mm-dd"
# TODO: Why does this have to be called from afterRender() instead of getRenderData()?
@updateAnalyticsGraphs()
updateGraphCheckbox: (e) ->
lineID = $(e.target).data('lineid')
checked = $(e.target).prop('checked')
for graph in @analytics.graphs
for line in graph.lines
if line.lineID is lineID
line.enabled = checked
return @render()
onClickClose: ->
@$el.addClass('hidden')
@trigger 'hidden'
onClickReloadButton: () =>
startDay = $('#input-startday').val()
endDay = $('#input-endday').val()
@getAnalytics startDay, endDay
onDblClickRecentSession: (e) ->
# Admin view of players' code
return unless me.isAdmin()
row = $(e.target).parent()
player = new User _id: row.data 'player-id'
session = new LevelSession _id: row.data 'session-id'
@openModalView new ModelModal models: [session, player]
onMouseEnterPoint: (e) ->
pointID = $(e.target).data('pointid')
container = @$el.find(".graph-point-info-container[data-pointid=#{pointID}]").show()
margin = 20
width = container.outerWidth()
height = container.outerHeight()
container.css('left', e.offsetX - width / 2)
container.css('top', e.offsetY - height - margin)
onMouseLeavePoint: (e) ->
pointID = $(e.target).data('pointid')
@$el.find(".graph-point-info-container[data-pointid=#{pointID}]").hide()
onClickReplay: (e) ->
sessionID = $(e.target).closest('tr').data 'session-id'
session = _.find @analytics.recentSessions.data, _id: sessionID
url = "/play/level/#{@level.get('slug')}?session=#{sessionID}&observing=true"
if session.isForClassroom
url += '&course=560f1a9f22961295f9427742'
window.open url, '_blank'
onClickRecentButton: (event) ->
event.preventDefault()
@limit = @$('#input-session-num').val()
@analytics.recentSessions = {data: [], loading: true}
@render() # Hide old session data while we fetch new sessions
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
makeFinishDataFetch: (data) =>
return =>
return if @destroyed
@updateAnalyticsGraphData()
data.loading = false
@render()
updateAnalyticsGraphData: ->
# console.log 'updateAnalyticsGraphData'
# Build graphs based on available @analytics data
# Currently only one graph
@analytics.graphs = [graphID: 'level-completions', lines: []]
# TODO: Where should this metadata live?
# TODO: lineIDs assumed to be unique across graphs
completionLineID = 'level-completions'
playtimeLineID = 'level-playtime'
helpsLineID = 'helps-clicked'
videosLineID = 'help-videos'
lineMetadata = {}
lineMetadata[completionLineID] =
description: 'Level Completion (%)'
color: 'red'
lineMetadata[playtimeLineID] =
description: 'Average Playtime (s)'
color: 'green'
lineMetadata[helpsLineID] =
description: 'Help click rate (%)'
color: 'blue'
lineMetadata[videosLineID] =
description: 'Help video rate (%)'
color: 'purple'
# Use this days aggregate to fill in missing days from the analytics data
days = {}
days["#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"] = true for day in @analytics.levelCompletions.data if @analytics?.levelCompletions?.data?
days[day.created] = true for day in @analytics.levelPlaytimes.data if @analytics?.levelPlaytimes?.data?
days["#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"] = true for day in @analytics.levelHelps.data if @analytics?.levelHelps?.data?
days = Object.keys(days).sort (a, b) -> if a < b then -1 else 1
if days.length > 0
currentIndex = 0
currentDay = days[currentIndex]
currentDate = new Date(currentDay + "T00:00:00.000Z")
lastDay = days[days.length - 1]
while currentDay isnt lastDay
days.splice currentIndex, 0, currentDay if days[currentIndex] isnt currentDay
currentIndex++
currentDate.setUTCDate(currentDate.getUTCDate() + 1)
currentDay = currentDate.toISOString().substr(0, 10)
# Update level completion graph data
dayStartedMap = {}
if @analytics?.levelCompletions?.data?.length > 0
# Build line data
levelPoints = []
for day, i in @analytics.levelCompletions.data
dayStartedMap[day.created] = day.started
rate = parseFloat(day.rate)
levelPoints.push
x: i
y: rate
started: day.started
day: "#{day.created[0..3]}-#{day.created[4..5]}-#{day.created[6..7]}"
pointID: "#{completionLineID}#{i}"
values: ["Started: #{day.started}", "Finished: #{day.finished}", "Completion rate: #{rate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if levelPoints.length <= i or levelPoints[i].day isnt day
levelPoints.splice i, 0,
y: 0.0
day: day
values: []
levelPoints[i].x = i
levelPoints[i].pointID = "#{completionLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: completionLineID
enabled: true
points: levelPoints
description: lineMetadata[completionLineID].description
lineColor: lineMetadata[completionLineID].color
min: 0
max: 100.0
# Update average playtime graph data
if @analytics?.levelPlaytimes?.data?.length > 0
# Build line data
playtimePoints = []
for day, i in @analytics.levelPlaytimes.data
avg = parseFloat(day.average)
playtimePoints.push
x: i
y: avg
day: day.created
pointID: "#{playtimeLineID}#{i}"
values: ["Average playtime: #{avg.toFixed(2)}s"]
# Ensure points for each day
for day, i in days
if playtimePoints.length <= i or playtimePoints[i].day isnt day
playtimePoints.splice i, 0,
y: 0.0
day: day
values: []
playtimePoints[i].x = i
playtimePoints[i].pointID = "#{playtimeLineID}#{i}"
@analytics.graphs[0].lines.push
lineID: playtimeLineID
enabled: true
points: playtimePoints
description: lineMetadata[playtimeLineID].description
lineColor: lineMetadata[playtimeLineID].color
min: 0
max: d3.max(playtimePoints, (d) -> d.y)
# Update help graph data
if @analytics?.levelHelps?.data?.length > 0
# Build line data
helpPoints = []
videoPoints = []
for day, i in @analytics.levelHelps.data
helpCount = day.alertHelps + day.paletteHelps
started = dayStartedMap[day.day] ? 0
clickRate = if started > 0 then helpCount / started * 100 else 0
videoRate = day.videoStarts / helpCount * 100
helpPoints.push
x: i
y: clickRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{helpsLineID}#{i}"
values: ["Helps clicked: #{helpCount}", "Helps click clickRate: #{clickRate.toFixed(2)}%"]
videoPoints.push
x: i
y: videoRate
day: "#{day.day[0..3]}-#{day.day[4..5]}-#{day.day[6..7]}"
pointID: "#{videosLineID}#{i}"
values: ["Help videos started: #{day.videoStarts}", "Help videos start rate: #{videoRate.toFixed(2)}%"]
# Ensure points for each day
for day, i in days
if helpPoints.length <= i or helpPoints[i].day isnt day
helpPoints.splice i, 0,
y: 0.0
day: day
values: []
helpPoints[i].x = i
helpPoints[i].pointID = "#{helpsLineID}#{i}"
if videoPoints.length <= i or videoPoints[i].day isnt day
videoPoints.splice i, 0,
y: 0.0
day: day
values: []
videoPoints[i].x = i
videoPoints[i].pointID = "#{videosLineID}#{i}"
if d3.max(helpPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: helpsLineID
enabled: true
points: helpPoints
description: lineMetadata[helpsLineID].description
lineColor: lineMetadata[helpsLineID].color
min: 0
max: 100.0
if d3.max(videoPoints, (d) -> d.y) > 0
@analytics.graphs[0].lines.push
lineID: videosLineID
enabled: true
points: videoPoints
description: lineMetadata[videosLineID].description
lineColor: lineMetadata[videosLineID].color
min: 0
max: 100.0
updateAnalyticsGraphs: ->
# Build d3 graphs
return unless @analytics?.graphs?.length > 0
containerSelector = '.line-graph-container'
# console.log 'updateAnalyticsGraphs', containerSelector, @analytics.graphs
margin = 20
keyHeight = PI:KEY:<KEY>END_PI
xAxisHeight = 20
yAxisWidth = 40
containerWidth = $(containerSelector).width()
containerHeight = $(containerSelector).height()
for graph in @analytics.graphs
graphLineCount = _.reduce graph.lines, ((sum, item) -> if item.enabled then sum + 1 else sum), 0
svg = d3.select(containerSelector).append("svg")
.attr("width", containerWidth)
.attr("height", containerHeight)
width = containerWidth - margin * 2 - yAxisWidth * graphLineCount
height = containerHeight - margin * 2 - xAxisHeight - keyHeight * graphLineCount
currentLine = 0
for line in graph.lines
continue unless line.enabled
xRange = d3.scale.linear().range([0, width]).domain([d3.min(line.points, (d) -> d.x), d3.max(line.points, (d) -> d.x)])
yRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
# x-Axis and guideline once
if currentLine is 0
startDay = new Date(line.points[0].day)
endDay = new Date(line.points[line.points.length - 1].day)
xAxisRange = d3.time.scale()
.domain([startDay, endDay])
.range([0, width])
xAxis = d3.svg.axis()
.scale(xAxisRange)
svg.append("g")
.attr("class", "x axis")
.call(xAxis)
.selectAll("text")
.attr("dy", ".35em")
.attr("transform", "translate(" + (margin + yAxisWidth * (graphLineCount - 1)) + "," + (height + margin) + ")")
.style("text-anchor", "start")
# Horizontal guidelines
svg.selectAll(".line")
.data([10, 30, 50, 70, 90])
.enter()
.append("line")
.attr("x1", margin + yAxisWidth * graphLineCount)
.attr("y1", (d) -> margin + yRange(d))
.attr("x2", margin + yAxisWidth * graphLineCount + width)
.attr("y2", (d) -> margin + yRange(d))
.attr("stroke", line.lineColor)
.style("opacity", "0.5")
# y-Axis
yAxisRange = d3.scale.linear().range([height, 0]).domain([line.min, line.max])
yAxis = d3.svg.axis()
.scale(yRange)
.orient("left")
svg.append("g")
.attr("class", "y axis")
.attr("transform", "translate(" + (margin + yAxisWidth * currentLine) + "," + margin + ")")
.style("color", line.lineColor)
.call(yAxis)
.selectAll("text")
.attr("y", 0)
.attr("x", 0)
.attr("fill", line.lineColor)
.style("text-anchor", "start")
# Key
svg.append("line")
.attr("x1", margin)
.attr("y1", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("x2", margin + 40)
.attr("y2", margin + height + xAxisHeight + keyHeight * currentLine + keyHeight / 2)
.attr("stroke", line.lineColor)
.attr("class", "key-line")
svg.append("text")
.attr("x", margin + 40 + 10)
.attr("y", margin + height + xAxisHeight + keyHeight * currentLine + (keyHeight + 10) / 2)
.attr("fill", line.lineColor)
.attr("class", "key-text")
.text(line.description)
# Path and points
svg.selectAll(".circle")
.data(line.points)
.enter()
.append("circle")
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.attr("cx", (d) -> xRange(d.x))
.attr("cy", (d) -> yRange(d.y))
.attr("r", (d) -> if d.started then Math.max(3, Math.min(10, Math.log(parseInt(d.started)))) + 2 else 6)
.attr("fill", line.lineColor)
.attr("stroke-width", 1)
.attr("class", "graph-point")
.attr("data-pointid", (d) -> "#{line.lineID}#{d.x}")
d3line = d3.svg.line()
.x((d) -> xRange(d.x))
.y((d) -> yRange(d.y))
.interpolate("linear")
svg.append("path")
.attr("d", d3line(line.points))
.attr("transform", "translate(" + (margin + yAxisWidth * graphLineCount) + "," + margin + ")")
.style("stroke-width", 1)
.style("stroke", line.lineColor)
.style("fill", "none")
currentLine++
getAnalytics: (startDay, endDay) =>
# Analytics APIs use 2 different day formats
if startDay?
startDayDashed = startDay
startDay = startDay.replace(/-/g, '')
else
startDay = utils.getUTCDay -14
startDayDashed = "#{startDay[0..3]}-#{startDay[4..5]}-#{startDay[6..7]}"
if endDay?
endDayDashed = endDay
endDay = endDay.replace(/-/g, '')
else
endDay = utils.getUTCDay -1
endDayDashed = "#{endDay[0..3]}-#{endDay[4..5]}-#{endDay[6..7]}"
# Initialize
@analytics =
startDay: startDayDashed
endDay: endDayDashed
commonProblems: {data: [], loading: true}
levelCompletions: {data: [], loading: true}
levelHelps: {data: [], loading: true}
levelPlaytimes: {data: [], loading: true}
recentSessions: {data: [], loading: true}
graphs: []
@render() # Hide old analytics data while we fetch new data
@getCommonLevelProblems startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.commonProblems)
@getLevelCompletions startDay, endDay, @makeFinishDataFetch(@analytics.levelCompletions)
@getLevelHelps startDay, endDay, @makeFinishDataFetch(@analytics.levelHelps)
@getLevelPlaytimes startDayDashed, endDayDashed, @makeFinishDataFetch(@analytics.levelPlaytimes)
@getRecentSessions @makeFinishDataFetch(@analytics.recentSessions)
getCommonLevelProblems: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getCommonLevelProblems', data
@analytics.commonProblems.data = data
doneCallback()
request = @supermodel.addRequestResource 'common_problems', {
url: '/db/user.code.problem/-/common_problems'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelCompletions: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelCompletions', data
data.sort (a, b) -> if a.created < b.created then -1 else 1
mapFn = (item) ->
item.rate = if item.started > 0 then item.finished / item.started * 100 else 0
item
@analytics.levelCompletions.data = _.map data, mapFn, @
doneCallback()
request = @supermodel.addRequestResource 'level_completions', {
url: '/db/analytics_perday/-/level_completions'
data: {startDay: startDay, endDay: endDay, slug: @levelSlug}
method: 'POST'
success: success
}, 0
request.load()
getLevelHelps: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelHelps', data
@analytics.levelHelps.data = data.sort (a, b) -> if a.day < b.day then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'level_helps', {
url: '/db/analytics_perday/-/level_helps'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getLevelPlaytimes: (startDay, endDay, doneCallback) ->
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getLevelPlaytimes', data
@analytics.levelPlaytimes.data = data.sort (a, b) -> if a.created < b.created then -1 else 1
doneCallback()
request = @supermodel.addRequestResource 'playtime_averages', {
url: '/db/level/-/playtime_averages'
data: {startDay: startDay, endDay: endDay, slugs: [@levelSlug]}
method: 'POST'
success: success
}, 0
request.load()
getRecentSessions: (doneCallback) ->
# limit = 100
success = (data) =>
return doneCallback() if @destroyed
# console.log 'getRecentSessions', data
@analytics.recentSessions.data = data
doneCallback()
request = @supermodel.addRequestResource 'level_sessions_recent', {
url: "/db/level.session/-/recent"
data: {slug: @levelSlug, limit: @limit}
method: 'POST'
success: success
}, 0
request.load()
|
[
{
"context": ", 'sync'\n\n @currentUser = new CurrentUser(id: \"user_id\", email: \"a@b.c\")\n sd.API_URL = \"http://localh",
"end": 606,
"score": 0.8440365195274353,
"start": 599,
"tag": "USERNAME",
"value": "user_id"
},
{
"context": "rentUser = new CurrentUser(id: \"user_id\", email: \"a@b.c\")\n sd.API_URL = \"http://localhost:5000/__api\"\n",
"end": 622,
"score": 0.8089593648910522,
"start": 617,
"tag": "EMAIL",
"value": "a@b.c"
}
] | src/desktop/test/models/artwork_collection.coffee | kanaabe/force | 1 | sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
Artwork = require '../../models/artwork'
Artworks = require '../../collections/artworks'
ArtworkCollection = require '../../models/artwork_collection'
CurrentUser = require '../../models/current_user'
benv = require 'benv'
sd = require('sharify').data
_ = require 'underscore'
{ fabricate } = require 'antigravity'
describe 'ArtworkCollection', ->
before (done) ->
benv.setup =>
done()
after -> benv.teardown()
beforeEach ->
sinon.stub Backbone, 'sync'
@currentUser = new CurrentUser(id: "user_id", email: "a@b.c")
sd.API_URL = "http://localhost:5000/__api"
sd.NODE_ENV = 'test'
@artworkCollection = new ArtworkCollection(userId: @currentUser.get('id'))
@artworks = new Artworks
@artworks.add [
new Artwork { id: 'foo', title: 'Foo' }
new Artwork { id: 'bar', title: 'Bar' }
]
@artworkCollection.get('artworks').add @artworks.models
@artworkCollection.addRepoArtworks @artworks
afterEach ->
Backbone.sync.restore()
describe '#saveArtwork', ->
it 'adds artwork to the saved artworks collection', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
len = @artworkCollection.get('artworks').length
@artworkCollection.saveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.true()
@artworkCollection.get('artworks').length.should.equal len + 1
it 'makes an API request to sync the action', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
it 'can trigger add events for a specific artwork', (done) ->
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id')
_.defer -> _.defer ->
specificArtworkAddedCalls.should.equal 1
done()
it 'can accept a silent option to prevent event triggers', (done) ->
artworkAddedCalls = 0
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on 'add', -> artworkAddedCalls += 1
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id'), { silent: true }
_.defer -> _.defer ->
artworkAddedCalls.should.equal 0
specificArtworkAddedCalls.should.equal 0
done()
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe '#unsaveArtwork', ->
it 'removes artwork from the saved artworks artworkCollection', ->
artwork = @artworkCollection.get('artworks').first()
len = @artworkCollection.get('artworks').length
@artworkCollection.unsaveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.false()
@artworkCollection.get('artworks').length.should.equal len - 1
it 'makes an API request to sync the action', ->
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
it 'can trigger remove events for a specific artwork', (done) ->
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id')
setTimeout ->
specificArtworkRemovedCalls.should.equal 1
done()
, 100
it 'can accept a silent option to prevent event triggers', (done) ->
artworkRemovedCalls = 0
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on 'remove', -> artworkRemovedCalls += 1
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id'), { silent: true }
setTimeout ->
artworkRemovedCalls.should.equal 0
specificArtworkRemovedCalls.should.equal 0
done()
, 100
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe 'isSaved', ->
it 'determines if an artwork is in the user\'s saved artworks artworkCollection', ->
unsavedArtwork = new Artwork({ id: 'baz', title: 'Baz' })
savedArtwork = @artworkCollection.get('artworks').first()
@artworkCollection.isSaved(unsavedArtwork).should.be.false()
@artworkCollection.isSaved(savedArtwork).should.be.true()
describe '#broadcastSaved', ->
it 'triggers an artwork specific add for all artworks in the artworkCollection', (done) ->
specificArtworkAddedCalls = 0
a1 = @artworkCollection.get('artworks').at 0
a2 = @artworkCollection.get('artworks').at 1
@artworkCollection.on "add:#{a1.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.on "add:#{a2.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.broadcastSaved()
setTimeout ->
specificArtworkAddedCalls.should.equal 2
done()
, 100
describe '#artworkIdsToSync', ->
it 'returns all artwork ids that need a server check to determine if saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'gar', title: 'Gar' }
])
@artworkCollection.artworkIdsToSync()[0].should.equal 'moo'
@artworkCollection.artworkIdsToSync()[1].should.equal 'gar'
@artworkCollection.get('artworks').add new Artwork({ id: 'moo', title: 'Moo' })
@artworkCollection.artworkIdsToSync()[0].should.equal 'gar'
@artworkCollection.artworkIdsToSync().length.should.equal 1
describe '#syncSavedArtworks', ->
xit 'requests the difference between this artworkCollection and the application artworks repository to determine what\'s saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'boo', title: 'Boo' }
new Artwork { id: 'gar', title: 'Gar' }
])
url = @artworkCollection.url
response = [200, {"Content-Type": "application/json"}, '{[ { "id": "boo", "title": "Boo" } ]}']
server.respondWith("GET", "#{url}?artworks[]=moo&artworks[]=boo&artworks[]=gar", response)
@artworkCollection.syncSavedArtworks()
server.respond()
@artworkCollection.get('artworks').get('boo').should.be.true()
@artworkCollection.get('artworks').get('moo').should.be.false()
xit 'cleans up when all saves are fetched', ->
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.false()
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.true()
@artworkCollection.unsavedCache.length.should.equal 0
@artworkCollection.pendingRequests.length.should.equal 0
@artworkCollection.completedRequests.length.should.equal 0
describe '#processRequests', ->
xit 'makes multiple requests determined by @requestSlugMax', ->
@artworkCollection.artworkIdsToSync = sinon.stub().returns(['moo', 'foo', 'bar'])
@artworkCollection.syncSavedArtworks()
@artworkCollection.requestSlugMax = 2
| 208046 | sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
Artwork = require '../../models/artwork'
Artworks = require '../../collections/artworks'
ArtworkCollection = require '../../models/artwork_collection'
CurrentUser = require '../../models/current_user'
benv = require 'benv'
sd = require('sharify').data
_ = require 'underscore'
{ fabricate } = require 'antigravity'
describe 'ArtworkCollection', ->
before (done) ->
benv.setup =>
done()
after -> benv.teardown()
beforeEach ->
sinon.stub Backbone, 'sync'
@currentUser = new CurrentUser(id: "user_id", email: "<EMAIL>")
sd.API_URL = "http://localhost:5000/__api"
sd.NODE_ENV = 'test'
@artworkCollection = new ArtworkCollection(userId: @currentUser.get('id'))
@artworks = new Artworks
@artworks.add [
new Artwork { id: 'foo', title: 'Foo' }
new Artwork { id: 'bar', title: 'Bar' }
]
@artworkCollection.get('artworks').add @artworks.models
@artworkCollection.addRepoArtworks @artworks
afterEach ->
Backbone.sync.restore()
describe '#saveArtwork', ->
it 'adds artwork to the saved artworks collection', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
len = @artworkCollection.get('artworks').length
@artworkCollection.saveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.true()
@artworkCollection.get('artworks').length.should.equal len + 1
it 'makes an API request to sync the action', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
it 'can trigger add events for a specific artwork', (done) ->
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id')
_.defer -> _.defer ->
specificArtworkAddedCalls.should.equal 1
done()
it 'can accept a silent option to prevent event triggers', (done) ->
artworkAddedCalls = 0
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on 'add', -> artworkAddedCalls += 1
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id'), { silent: true }
_.defer -> _.defer ->
artworkAddedCalls.should.equal 0
specificArtworkAddedCalls.should.equal 0
done()
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe '#unsaveArtwork', ->
it 'removes artwork from the saved artworks artworkCollection', ->
artwork = @artworkCollection.get('artworks').first()
len = @artworkCollection.get('artworks').length
@artworkCollection.unsaveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.false()
@artworkCollection.get('artworks').length.should.equal len - 1
it 'makes an API request to sync the action', ->
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
it 'can trigger remove events for a specific artwork', (done) ->
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id')
setTimeout ->
specificArtworkRemovedCalls.should.equal 1
done()
, 100
it 'can accept a silent option to prevent event triggers', (done) ->
artworkRemovedCalls = 0
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on 'remove', -> artworkRemovedCalls += 1
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id'), { silent: true }
setTimeout ->
artworkRemovedCalls.should.equal 0
specificArtworkRemovedCalls.should.equal 0
done()
, 100
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe 'isSaved', ->
it 'determines if an artwork is in the user\'s saved artworks artworkCollection', ->
unsavedArtwork = new Artwork({ id: 'baz', title: 'Baz' })
savedArtwork = @artworkCollection.get('artworks').first()
@artworkCollection.isSaved(unsavedArtwork).should.be.false()
@artworkCollection.isSaved(savedArtwork).should.be.true()
describe '#broadcastSaved', ->
it 'triggers an artwork specific add for all artworks in the artworkCollection', (done) ->
specificArtworkAddedCalls = 0
a1 = @artworkCollection.get('artworks').at 0
a2 = @artworkCollection.get('artworks').at 1
@artworkCollection.on "add:#{a1.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.on "add:#{a2.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.broadcastSaved()
setTimeout ->
specificArtworkAddedCalls.should.equal 2
done()
, 100
describe '#artworkIdsToSync', ->
it 'returns all artwork ids that need a server check to determine if saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'gar', title: 'Gar' }
])
@artworkCollection.artworkIdsToSync()[0].should.equal 'moo'
@artworkCollection.artworkIdsToSync()[1].should.equal 'gar'
@artworkCollection.get('artworks').add new Artwork({ id: 'moo', title: 'Moo' })
@artworkCollection.artworkIdsToSync()[0].should.equal 'gar'
@artworkCollection.artworkIdsToSync().length.should.equal 1
describe '#syncSavedArtworks', ->
xit 'requests the difference between this artworkCollection and the application artworks repository to determine what\'s saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'boo', title: 'Boo' }
new Artwork { id: 'gar', title: 'Gar' }
])
url = @artworkCollection.url
response = [200, {"Content-Type": "application/json"}, '{[ { "id": "boo", "title": "Boo" } ]}']
server.respondWith("GET", "#{url}?artworks[]=moo&artworks[]=boo&artworks[]=gar", response)
@artworkCollection.syncSavedArtworks()
server.respond()
@artworkCollection.get('artworks').get('boo').should.be.true()
@artworkCollection.get('artworks').get('moo').should.be.false()
xit 'cleans up when all saves are fetched', ->
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.false()
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.true()
@artworkCollection.unsavedCache.length.should.equal 0
@artworkCollection.pendingRequests.length.should.equal 0
@artworkCollection.completedRequests.length.should.equal 0
describe '#processRequests', ->
xit 'makes multiple requests determined by @requestSlugMax', ->
@artworkCollection.artworkIdsToSync = sinon.stub().returns(['moo', 'foo', 'bar'])
@artworkCollection.syncSavedArtworks()
@artworkCollection.requestSlugMax = 2
| true | sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
Artwork = require '../../models/artwork'
Artworks = require '../../collections/artworks'
ArtworkCollection = require '../../models/artwork_collection'
CurrentUser = require '../../models/current_user'
benv = require 'benv'
sd = require('sharify').data
_ = require 'underscore'
{ fabricate } = require 'antigravity'
describe 'ArtworkCollection', ->
before (done) ->
benv.setup =>
done()
after -> benv.teardown()
beforeEach ->
sinon.stub Backbone, 'sync'
@currentUser = new CurrentUser(id: "user_id", email: "PI:EMAIL:<EMAIL>END_PI")
sd.API_URL = "http://localhost:5000/__api"
sd.NODE_ENV = 'test'
@artworkCollection = new ArtworkCollection(userId: @currentUser.get('id'))
@artworks = new Artworks
@artworks.add [
new Artwork { id: 'foo', title: 'Foo' }
new Artwork { id: 'bar', title: 'Bar' }
]
@artworkCollection.get('artworks').add @artworks.models
@artworkCollection.addRepoArtworks @artworks
afterEach ->
Backbone.sync.restore()
describe '#saveArtwork', ->
it 'adds artwork to the saved artworks collection', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
len = @artworkCollection.get('artworks').length
@artworkCollection.saveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.true()
@artworkCollection.get('artworks').length.should.equal len + 1
it 'makes an API request to sync the action', ->
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
it 'can trigger add events for a specific artwork', (done) ->
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id')
_.defer -> _.defer ->
specificArtworkAddedCalls.should.equal 1
done()
it 'can accept a silent option to prevent event triggers', (done) ->
artworkAddedCalls = 0
specificArtworkAddedCalls = 0
artwork = new Artwork({ id: 'baz', title: 'Baz' })
@artworkCollection.on 'add', -> artworkAddedCalls += 1
@artworkCollection.on "add:#{artwork.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.saveArtwork artwork.get('id'), { silent: true }
_.defer -> _.defer ->
artworkAddedCalls.should.equal 0
specificArtworkAddedCalls.should.equal 0
done()
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = new Artwork { id: 'baz', title: 'Baz' }
@artworkCollection.saveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'create'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/baz'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe '#unsaveArtwork', ->
it 'removes artwork from the saved artworks artworkCollection', ->
artwork = @artworkCollection.get('artworks').first()
len = @artworkCollection.get('artworks').length
@artworkCollection.unsaveArtwork artwork.get('id')
@artworkCollection.isSaved(artwork).should.be.false()
@artworkCollection.get('artworks').length.should.equal len - 1
it 'makes an API request to sync the action', ->
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id')
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
it 'can trigger remove events for a specific artwork', (done) ->
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id')
setTimeout ->
specificArtworkRemovedCalls.should.equal 1
done()
, 100
it 'can accept a silent option to prevent event triggers', (done) ->
artworkRemovedCalls = 0
specificArtworkRemovedCalls = 0
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.on 'remove', -> artworkRemovedCalls += 1
@artworkCollection.on "remove:#{artwork.get('id')}", -> specificArtworkRemovedCalls += 1
@artworkCollection.unsaveArtwork artwork.get('id'), { silent: true }
setTimeout ->
artworkRemovedCalls.should.equal 0
specificArtworkRemovedCalls.should.equal 0
done()
, 100
it 'calls the success callback', ->
successCb = sinon.stub()
artwork = @artworkCollection.get('artworks').first()
@artworkCollection.unsaveArtwork artwork.get('id'), { success: successCb }
Backbone.sync.args[0][0].should.equal 'delete'
Backbone.sync.args[0][1].url.should.containEql '/api/v1/collection/saved-artwork/artwork/foo'
Backbone.sync.args[0][2].success { foo: 'bar' }
successCb.called.should.be.ok()
describe 'isSaved', ->
it 'determines if an artwork is in the user\'s saved artworks artworkCollection', ->
unsavedArtwork = new Artwork({ id: 'baz', title: 'Baz' })
savedArtwork = @artworkCollection.get('artworks').first()
@artworkCollection.isSaved(unsavedArtwork).should.be.false()
@artworkCollection.isSaved(savedArtwork).should.be.true()
describe '#broadcastSaved', ->
it 'triggers an artwork specific add for all artworks in the artworkCollection', (done) ->
specificArtworkAddedCalls = 0
a1 = @artworkCollection.get('artworks').at 0
a2 = @artworkCollection.get('artworks').at 1
@artworkCollection.on "add:#{a1.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.on "add:#{a2.get('id')}", -> specificArtworkAddedCalls += 1
@artworkCollection.broadcastSaved()
setTimeout ->
specificArtworkAddedCalls.should.equal 2
done()
, 100
describe '#artworkIdsToSync', ->
it 'returns all artwork ids that need a server check to determine if saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'gar', title: 'Gar' }
])
@artworkCollection.artworkIdsToSync()[0].should.equal 'moo'
@artworkCollection.artworkIdsToSync()[1].should.equal 'gar'
@artworkCollection.get('artworks').add new Artwork({ id: 'moo', title: 'Moo' })
@artworkCollection.artworkIdsToSync()[0].should.equal 'gar'
@artworkCollection.artworkIdsToSync().length.should.equal 1
describe '#syncSavedArtworks', ->
xit 'requests the difference between this artworkCollection and the application artworks repository to determine what\'s saved', ->
@artworkCollection.addRepoArtworks new Artworks([
new Artwork { id: 'moo', title: 'Moo' }
new Artwork { id: 'boo', title: 'Boo' }
new Artwork { id: 'gar', title: 'Gar' }
])
url = @artworkCollection.url
response = [200, {"Content-Type": "application/json"}, '{[ { "id": "boo", "title": "Boo" } ]}']
server.respondWith("GET", "#{url}?artworks[]=moo&artworks[]=boo&artworks[]=gar", response)
@artworkCollection.syncSavedArtworks()
server.respond()
@artworkCollection.get('artworks').get('boo').should.be.true()
@artworkCollection.get('artworks').get('moo').should.be.false()
xit 'cleans up when all saves are fetched', ->
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.false()
@artworkCollection.syncSavedArtworks()
@artworkCollection.allFetched.should.be.true()
@artworkCollection.unsavedCache.length.should.equal 0
@artworkCollection.pendingRequests.length.should.equal 0
@artworkCollection.completedRequests.length.should.equal 0
describe '#processRequests', ->
xit 'makes multiple requests determined by @requestSlugMax', ->
@artworkCollection.artworkIdsToSync = sinon.stub().returns(['moo', 'foo', 'bar'])
@artworkCollection.syncSavedArtworks()
@artworkCollection.requestSlugMax = 2
|
[
{
"context": "name:\"\n \"sync password\" : \"Password:\"\n \"sync help\" : \"Are ",
"end": 9048,
"score": 0.9977520108222961,
"start": 9040,
"tag": "PASSWORD",
"value": "Password"
}
] | client/app/locales/en.coffee | Noctisae/cozy-series | 0 | module.exports =
"default calendar name" : "my calendar"
"Add" : "Add"
"event" : "Event"
"create event" : "Event creation"
"edit event" : "Event edition"
"edit" : "Edit"
"create" : "Create"
"creation" : "Creation"
"invite" : "Invite"
"close" : "Close"
"delete" : "Delete"
"change color" : "Change color"
"rename" : "Rename"
"export" : "Export"
"remove" : "Remove event"
"duplicate" : "Duplicate event"
"Place" : "Place"
'all day' : 'all day'
'All day' : 'All day'
"description" : "Description"
"date" : "date"
"Day" : "Day"
"days" : "days"
"Edit" : "Edit"
"Email" : "Email"
"Import" : "Import"
"Export" : "Export"
"show" : "Show"
"hide" : "Hide"
"List" : "List"
"list" : "list"
"Calendar" : "Calendar"
"calendar" : "Calendar"
"Sync" : "Sync"
"ie: 9:00 important meeting" : "ie: 9:00 important meeting"
"Month" : "Month"
"Popup" : "Popup"
"Switch to List" : "Switch to List"
"Switch to Calendar" : "Switch to Calendar"
"time" : "time"
"Today" : "Today"
'today' : 'today'
"What should I remind you ?" : "What should I remind you?"
"select an icalendar file" : "Select an icalendar file"
"import your icalendar file" : "import your icalendar file"
"confirm import" : "confirm import"
"cancel" : "cancel"
"Create" : "Create"
"Events to import" : "Events to import"
"Create Event" : "Create Event"
"From [hours:minutes]" : "From [hours:minutes]"
"To [hours:minutes]" : "To [hours:minutes]"
"To [date]" : "To [date]"
"Description" : "Description"
"days after" : "days after"
"days later" : "days later"
"Week" : "Week"
"Display" : "Notification"
"DISPLAY" : "Notification"
"EMAIL" : "E-mail"
"BOTH" : "E-mail & Notification"
"display previous events" : "Display previous events"
"display next events" : "Display next events"
"event" : "Event"
"are you sure" : "Are you sure?"
"confirm delete calendar" : "You are about to delete all the events related to %{calendarName}. Are you sure?"
"confirm delete selected calendars" : "You are about to delete all the selected calendars. Are you sure?"
"advanced" : "More details"
"enter email" : "Enter email"
"ON" : "on"
"OFF" : "off"
"no description" : "No description"
"add calendar" : "Add calendar"
"new calendar" : "New calendar"
"multiple actions" : "Multiple actions"
# RRULE related
"recurrence" : "Recurrence"
"recurrence rule" : "Recurrence rules"
"make reccurent" : "Make recurrent"
"repeat every" : "Repeat every"
"no recurrence" : "No recurrence"
"repeat on" : "Repeat on"
"repeat on date" : "Repeat on dates"
"repeat on weekday" : "Repeat on weekday"
"repeat until" : "Repeat until"
"after" : "After"
"repeat" : "Repeat"
"forever" : "Forever"
"occurences" : "occurences"
"every" : "Every"
'minutes' : 'minutes'
'minute ' : 'minute'
'minute' : 'minute'
'hours' : 'hours'
'hour' : 'hour'
"days" : "days"
"day" : "day"
"weeks" : "weeks"
"week" : "week"
"months" : "months"
"month" : "month"
"years" : "years"
"year" : "year"
"until" : "until"
"for" : "for"
"on" : "on"
"on the" : "on the"
"th" : "th"
"nd" : "nd"
"rd" : "rd"
"st" : "st"
"last" : "last"
"and" : "and"
"times" : "times"
"weekday" : "weekday"
# MODAL
"summary" : "Summary"
"start" : "Start"
"end" : "End"
"tags" : "Tags"
"add tags" : "Add tags"
"change" : "Change"
"change to" : "Change to"
"change calendar" : "Change calendar"
"save changes" : "Save changes"
"save changes and invite guests" : "Save changes and invite guests"
"guests" : "Guests"
"cancel Invitation" : "Cancel the invitation"
"From" : "From"
"To" : "To"
"All day, until" : "All day, until"
"All one day" : "All day"
'Reminders before the event' : 'Reminders before the event'
"reminder" : "Reminder"
# CONFIRM MODAL
'send mails question' : 'Send a notification email to:'
'modal send mails' : 'Send a notification'
'yes' : 'Yes'
'no' : 'No'
# ERRORS
"no summary" : "A summary must be set."
"start after end" : "The start date is after the end date."
"invalid start date" : "The start date is invalid."
"invalid end date" : "The end date is invalid."
"invalid trigg date" : "The date is invalid."
"invalid action" : "The action is invalid."
"server error occured" : "A server error occured."
# SYNC
"synchronization" : "Synchronization"
"mobile sync" : "Mobile Sync (CalDAV)"
"link imported events with calendar": "Link events to import with following calendar:"
"import an ical file" : "To import an ICal file into your cozy calendar, first click on this button to preload it:"
"download a copy of your calendar" : "Select one calendar and then click on the export button, to download a copy if the calendar as an ICal file, :"
"icalendar export" : "ICalendar Export"
"icalendar import" : "ICalendar Import"
"to sync your cal with" : "To synchronize your calendar with your devices, you must follow two steps"
"sync headline with data" : "To synchronize your calendar, use the following information:"
"sync url" : "URL:"
"sync login" : "Username:"
"sync password" : "Password:"
"sync help" : "Are you lost? Follow the"
"sync help link" : "step-by-step guide!"
"install the sync module" : "Install the Sync module from the Cozy App Store"
"connect to it and follow" : "Connect to it and follow the instructions related to CalDAV."
"some event fail to save" : "An event was not saved (an error occured)."
"imported events": "Amount of imported events"
"import finished": "Your import is now finished"
"import error occured for": "Import error occured for following elements:"
"export your calendar": "Export your calendar"
'please select existing calendar' : 'Please select an existing calendar.'
# DATE
"January" : "January"
"February" : "February"
"March" : "March"
"April" : "April"
"May" : "May"
"June" : "June"
"July" : "July"
"August" : "August"
"September" : "September"
"October" : "October"
"November" : "November"
"December" : "December"
"January" : "January"
"February" : "February"
'Jan' : 'Jan'
'Feb' : 'Feb'
'Mar' : 'Mar'
'Apr' : 'Apr'
'Jun' : 'Jun'
'Jul' : 'Jul'
'Aug' : 'Aug'
'Sep' : 'Sep'
'Oct' : 'Oct'
'Nov' : 'Nov'
'Dec' : 'Dec'
'calendar exist error': 'A calendar named "New Calendar" already exists.'
# Emails
'email date format': 'MMMM Do YYYY, h:mm a'
'email date format allday': 'MMMM Do YYYY, [all day long]'
'email invitation title': 'Invitation to "%{description}"'
'email invitation content': """
Hello, I would like to invite you to the following event:
%{description} %{place}
on %{date}
Would you be there?
Yes
%{url}?status=ACCEPTED&key=%{key}
No
%{url}?status=DECLINED&key=%{key}
"""
'email update title': "Event \"%{description}\" has changed"
'email update content': """
An event you were invited to has changed:
%{description} %{place}
On %{date}
I'm still going
%{url}?status=ACCEPTED&key=%{key}
I'm not going anymore
%{url}?status=DECLINED&key=%{key}
"""
'email delete title': 'This event has been canceled: %{description}'
'email delete content': """
This event has been canceled:
%{description} %{place}
On %{date}
"""
| 117398 | module.exports =
"default calendar name" : "my calendar"
"Add" : "Add"
"event" : "Event"
"create event" : "Event creation"
"edit event" : "Event edition"
"edit" : "Edit"
"create" : "Create"
"creation" : "Creation"
"invite" : "Invite"
"close" : "Close"
"delete" : "Delete"
"change color" : "Change color"
"rename" : "Rename"
"export" : "Export"
"remove" : "Remove event"
"duplicate" : "Duplicate event"
"Place" : "Place"
'all day' : 'all day'
'All day' : 'All day'
"description" : "Description"
"date" : "date"
"Day" : "Day"
"days" : "days"
"Edit" : "Edit"
"Email" : "Email"
"Import" : "Import"
"Export" : "Export"
"show" : "Show"
"hide" : "Hide"
"List" : "List"
"list" : "list"
"Calendar" : "Calendar"
"calendar" : "Calendar"
"Sync" : "Sync"
"ie: 9:00 important meeting" : "ie: 9:00 important meeting"
"Month" : "Month"
"Popup" : "Popup"
"Switch to List" : "Switch to List"
"Switch to Calendar" : "Switch to Calendar"
"time" : "time"
"Today" : "Today"
'today' : 'today'
"What should I remind you ?" : "What should I remind you?"
"select an icalendar file" : "Select an icalendar file"
"import your icalendar file" : "import your icalendar file"
"confirm import" : "confirm import"
"cancel" : "cancel"
"Create" : "Create"
"Events to import" : "Events to import"
"Create Event" : "Create Event"
"From [hours:minutes]" : "From [hours:minutes]"
"To [hours:minutes]" : "To [hours:minutes]"
"To [date]" : "To [date]"
"Description" : "Description"
"days after" : "days after"
"days later" : "days later"
"Week" : "Week"
"Display" : "Notification"
"DISPLAY" : "Notification"
"EMAIL" : "E-mail"
"BOTH" : "E-mail & Notification"
"display previous events" : "Display previous events"
"display next events" : "Display next events"
"event" : "Event"
"are you sure" : "Are you sure?"
"confirm delete calendar" : "You are about to delete all the events related to %{calendarName}. Are you sure?"
"confirm delete selected calendars" : "You are about to delete all the selected calendars. Are you sure?"
"advanced" : "More details"
"enter email" : "Enter email"
"ON" : "on"
"OFF" : "off"
"no description" : "No description"
"add calendar" : "Add calendar"
"new calendar" : "New calendar"
"multiple actions" : "Multiple actions"
# RRULE related
"recurrence" : "Recurrence"
"recurrence rule" : "Recurrence rules"
"make reccurent" : "Make recurrent"
"repeat every" : "Repeat every"
"no recurrence" : "No recurrence"
"repeat on" : "Repeat on"
"repeat on date" : "Repeat on dates"
"repeat on weekday" : "Repeat on weekday"
"repeat until" : "Repeat until"
"after" : "After"
"repeat" : "Repeat"
"forever" : "Forever"
"occurences" : "occurences"
"every" : "Every"
'minutes' : 'minutes'
'minute ' : 'minute'
'minute' : 'minute'
'hours' : 'hours'
'hour' : 'hour'
"days" : "days"
"day" : "day"
"weeks" : "weeks"
"week" : "week"
"months" : "months"
"month" : "month"
"years" : "years"
"year" : "year"
"until" : "until"
"for" : "for"
"on" : "on"
"on the" : "on the"
"th" : "th"
"nd" : "nd"
"rd" : "rd"
"st" : "st"
"last" : "last"
"and" : "and"
"times" : "times"
"weekday" : "weekday"
# MODAL
"summary" : "Summary"
"start" : "Start"
"end" : "End"
"tags" : "Tags"
"add tags" : "Add tags"
"change" : "Change"
"change to" : "Change to"
"change calendar" : "Change calendar"
"save changes" : "Save changes"
"save changes and invite guests" : "Save changes and invite guests"
"guests" : "Guests"
"cancel Invitation" : "Cancel the invitation"
"From" : "From"
"To" : "To"
"All day, until" : "All day, until"
"All one day" : "All day"
'Reminders before the event' : 'Reminders before the event'
"reminder" : "Reminder"
# CONFIRM MODAL
'send mails question' : 'Send a notification email to:'
'modal send mails' : 'Send a notification'
'yes' : 'Yes'
'no' : 'No'
# ERRORS
"no summary" : "A summary must be set."
"start after end" : "The start date is after the end date."
"invalid start date" : "The start date is invalid."
"invalid end date" : "The end date is invalid."
"invalid trigg date" : "The date is invalid."
"invalid action" : "The action is invalid."
"server error occured" : "A server error occured."
# SYNC
"synchronization" : "Synchronization"
"mobile sync" : "Mobile Sync (CalDAV)"
"link imported events with calendar": "Link events to import with following calendar:"
"import an ical file" : "To import an ICal file into your cozy calendar, first click on this button to preload it:"
"download a copy of your calendar" : "Select one calendar and then click on the export button, to download a copy if the calendar as an ICal file, :"
"icalendar export" : "ICalendar Export"
"icalendar import" : "ICalendar Import"
"to sync your cal with" : "To synchronize your calendar with your devices, you must follow two steps"
"sync headline with data" : "To synchronize your calendar, use the following information:"
"sync url" : "URL:"
"sync login" : "Username:"
"sync password" : "<PASSWORD>:"
"sync help" : "Are you lost? Follow the"
"sync help link" : "step-by-step guide!"
"install the sync module" : "Install the Sync module from the Cozy App Store"
"connect to it and follow" : "Connect to it and follow the instructions related to CalDAV."
"some event fail to save" : "An event was not saved (an error occured)."
"imported events": "Amount of imported events"
"import finished": "Your import is now finished"
"import error occured for": "Import error occured for following elements:"
"export your calendar": "Export your calendar"
'please select existing calendar' : 'Please select an existing calendar.'
# DATE
"January" : "January"
"February" : "February"
"March" : "March"
"April" : "April"
"May" : "May"
"June" : "June"
"July" : "July"
"August" : "August"
"September" : "September"
"October" : "October"
"November" : "November"
"December" : "December"
"January" : "January"
"February" : "February"
'Jan' : 'Jan'
'Feb' : 'Feb'
'Mar' : 'Mar'
'Apr' : 'Apr'
'Jun' : 'Jun'
'Jul' : 'Jul'
'Aug' : 'Aug'
'Sep' : 'Sep'
'Oct' : 'Oct'
'Nov' : 'Nov'
'Dec' : 'Dec'
'calendar exist error': 'A calendar named "New Calendar" already exists.'
# Emails
'email date format': 'MMMM Do YYYY, h:mm a'
'email date format allday': 'MMMM Do YYYY, [all day long]'
'email invitation title': 'Invitation to "%{description}"'
'email invitation content': """
Hello, I would like to invite you to the following event:
%{description} %{place}
on %{date}
Would you be there?
Yes
%{url}?status=ACCEPTED&key=%{key}
No
%{url}?status=DECLINED&key=%{key}
"""
'email update title': "Event \"%{description}\" has changed"
'email update content': """
An event you were invited to has changed:
%{description} %{place}
On %{date}
I'm still going
%{url}?status=ACCEPTED&key=%{key}
I'm not going anymore
%{url}?status=DECLINED&key=%{key}
"""
'email delete title': 'This event has been canceled: %{description}'
'email delete content': """
This event has been canceled:
%{description} %{place}
On %{date}
"""
| true | module.exports =
"default calendar name" : "my calendar"
"Add" : "Add"
"event" : "Event"
"create event" : "Event creation"
"edit event" : "Event edition"
"edit" : "Edit"
"create" : "Create"
"creation" : "Creation"
"invite" : "Invite"
"close" : "Close"
"delete" : "Delete"
"change color" : "Change color"
"rename" : "Rename"
"export" : "Export"
"remove" : "Remove event"
"duplicate" : "Duplicate event"
"Place" : "Place"
'all day' : 'all day'
'All day' : 'All day'
"description" : "Description"
"date" : "date"
"Day" : "Day"
"days" : "days"
"Edit" : "Edit"
"Email" : "Email"
"Import" : "Import"
"Export" : "Export"
"show" : "Show"
"hide" : "Hide"
"List" : "List"
"list" : "list"
"Calendar" : "Calendar"
"calendar" : "Calendar"
"Sync" : "Sync"
"ie: 9:00 important meeting" : "ie: 9:00 important meeting"
"Month" : "Month"
"Popup" : "Popup"
"Switch to List" : "Switch to List"
"Switch to Calendar" : "Switch to Calendar"
"time" : "time"
"Today" : "Today"
'today' : 'today'
"What should I remind you ?" : "What should I remind you?"
"select an icalendar file" : "Select an icalendar file"
"import your icalendar file" : "import your icalendar file"
"confirm import" : "confirm import"
"cancel" : "cancel"
"Create" : "Create"
"Events to import" : "Events to import"
"Create Event" : "Create Event"
"From [hours:minutes]" : "From [hours:minutes]"
"To [hours:minutes]" : "To [hours:minutes]"
"To [date]" : "To [date]"
"Description" : "Description"
"days after" : "days after"
"days later" : "days later"
"Week" : "Week"
"Display" : "Notification"
"DISPLAY" : "Notification"
"EMAIL" : "E-mail"
"BOTH" : "E-mail & Notification"
"display previous events" : "Display previous events"
"display next events" : "Display next events"
"event" : "Event"
"are you sure" : "Are you sure?"
"confirm delete calendar" : "You are about to delete all the events related to %{calendarName}. Are you sure?"
"confirm delete selected calendars" : "You are about to delete all the selected calendars. Are you sure?"
"advanced" : "More details"
"enter email" : "Enter email"
"ON" : "on"
"OFF" : "off"
"no description" : "No description"
"add calendar" : "Add calendar"
"new calendar" : "New calendar"
"multiple actions" : "Multiple actions"
# RRULE related
"recurrence" : "Recurrence"
"recurrence rule" : "Recurrence rules"
"make reccurent" : "Make recurrent"
"repeat every" : "Repeat every"
"no recurrence" : "No recurrence"
"repeat on" : "Repeat on"
"repeat on date" : "Repeat on dates"
"repeat on weekday" : "Repeat on weekday"
"repeat until" : "Repeat until"
"after" : "After"
"repeat" : "Repeat"
"forever" : "Forever"
"occurences" : "occurences"
"every" : "Every"
'minutes' : 'minutes'
'minute ' : 'minute'
'minute' : 'minute'
'hours' : 'hours'
'hour' : 'hour'
"days" : "days"
"day" : "day"
"weeks" : "weeks"
"week" : "week"
"months" : "months"
"month" : "month"
"years" : "years"
"year" : "year"
"until" : "until"
"for" : "for"
"on" : "on"
"on the" : "on the"
"th" : "th"
"nd" : "nd"
"rd" : "rd"
"st" : "st"
"last" : "last"
"and" : "and"
"times" : "times"
"weekday" : "weekday"
# MODAL
"summary" : "Summary"
"start" : "Start"
"end" : "End"
"tags" : "Tags"
"add tags" : "Add tags"
"change" : "Change"
"change to" : "Change to"
"change calendar" : "Change calendar"
"save changes" : "Save changes"
"save changes and invite guests" : "Save changes and invite guests"
"guests" : "Guests"
"cancel Invitation" : "Cancel the invitation"
"From" : "From"
"To" : "To"
"All day, until" : "All day, until"
"All one day" : "All day"
'Reminders before the event' : 'Reminders before the event'
"reminder" : "Reminder"
# CONFIRM MODAL
'send mails question' : 'Send a notification email to:'
'modal send mails' : 'Send a notification'
'yes' : 'Yes'
'no' : 'No'
# ERRORS
"no summary" : "A summary must be set."
"start after end" : "The start date is after the end date."
"invalid start date" : "The start date is invalid."
"invalid end date" : "The end date is invalid."
"invalid trigg date" : "The date is invalid."
"invalid action" : "The action is invalid."
"server error occured" : "A server error occured."
# SYNC
"synchronization" : "Synchronization"
"mobile sync" : "Mobile Sync (CalDAV)"
"link imported events with calendar": "Link events to import with following calendar:"
"import an ical file" : "To import an ICal file into your cozy calendar, first click on this button to preload it:"
"download a copy of your calendar" : "Select one calendar and then click on the export button, to download a copy if the calendar as an ICal file, :"
"icalendar export" : "ICalendar Export"
"icalendar import" : "ICalendar Import"
"to sync your cal with" : "To synchronize your calendar with your devices, you must follow two steps"
"sync headline with data" : "To synchronize your calendar, use the following information:"
"sync url" : "URL:"
"sync login" : "Username:"
"sync password" : "PI:PASSWORD:<PASSWORD>END_PI:"
"sync help" : "Are you lost? Follow the"
"sync help link" : "step-by-step guide!"
"install the sync module" : "Install the Sync module from the Cozy App Store"
"connect to it and follow" : "Connect to it and follow the instructions related to CalDAV."
"some event fail to save" : "An event was not saved (an error occured)."
"imported events": "Amount of imported events"
"import finished": "Your import is now finished"
"import error occured for": "Import error occured for following elements:"
"export your calendar": "Export your calendar"
'please select existing calendar' : 'Please select an existing calendar.'
# DATE
"January" : "January"
"February" : "February"
"March" : "March"
"April" : "April"
"May" : "May"
"June" : "June"
"July" : "July"
"August" : "August"
"September" : "September"
"October" : "October"
"November" : "November"
"December" : "December"
"January" : "January"
"February" : "February"
'Jan' : 'Jan'
'Feb' : 'Feb'
'Mar' : 'Mar'
'Apr' : 'Apr'
'Jun' : 'Jun'
'Jul' : 'Jul'
'Aug' : 'Aug'
'Sep' : 'Sep'
'Oct' : 'Oct'
'Nov' : 'Nov'
'Dec' : 'Dec'
'calendar exist error': 'A calendar named "New Calendar" already exists.'
# Emails
'email date format': 'MMMM Do YYYY, h:mm a'
'email date format allday': 'MMMM Do YYYY, [all day long]'
'email invitation title': 'Invitation to "%{description}"'
'email invitation content': """
Hello, I would like to invite you to the following event:
%{description} %{place}
on %{date}
Would you be there?
Yes
%{url}?status=ACCEPTED&key=%{key}
No
%{url}?status=DECLINED&key=%{key}
"""
'email update title': "Event \"%{description}\" has changed"
'email update content': """
An event you were invited to has changed:
%{description} %{place}
On %{date}
I'm still going
%{url}?status=ACCEPTED&key=%{key}
I'm not going anymore
%{url}?status=DECLINED&key=%{key}
"""
'email delete title': 'This event has been canceled: %{description}'
'email delete content': """
This event has been canceled:
%{description} %{place}
On %{date}
"""
|
[
{
"context": "\n beforeEach ->\n player = new Player(\"Jin\")\n oldState = new GameState\n oldSta",
"end": 536,
"score": 0.9982751607894897,
"start": 533,
"tag": "NAME",
"value": "Jin"
},
{
"context": "ull\n\n beforeEach ->\n player = new Player(\"Zell\")\n state = new GameState\n\n describe \"when",
"end": 1304,
"score": 0.9981250166893005,
"start": 1300,
"tag": "NAME",
"value": "Zell"
},
{
"context": "ull\n\n beforeEach ->\n player = new Player(\"Squall\")\n state = new GameState\n spyOn(state, ",
"end": 2644,
"score": 0.9424309730529785,
"start": 2638,
"tag": "USERNAME",
"value": "Squall"
},
{
"context": "ll\n\n beforeEach ->\n player1 = new Player(\"Squall\")\n player2 = new Player(\"Winter\")\n stat",
"end": 3719,
"score": 0.9653022885322571,
"start": 3713,
"tag": "USERNAME",
"value": "Squall"
},
{
"context": " new Player(\"Squall\")\n player2 = new Player(\"Winter\")\n state = new GameState\n\n describe \"ther",
"end": 3756,
"score": 0.999493420124054,
"start": 3750,
"tag": "NAME",
"value": "Winter"
}
] | spec/javascripts/game_state_spec.js.coffee | fcbajao/connect-4 | 0 | $ = require("jquery")
GameState = require("game_state")
Player = require("player")
describe "GameState", ->
state = null
describe "constructor", ->
describe "previous state was not given", ->
beforeEach ->
state = new GameState
it "sets grid with empty columns (x's contains empty y's)", ->
for n in [0..6]
expect(state.grid[n].length).toEqual(0)
describe "previous state was given", ->
player = null
oldState = null
beforeEach ->
player = new Player("Jin")
oldState = new GameState
oldState.grid[0][0] = player
oldState.grid[0][1] = player
oldState.grid[0][2] = player
oldState.lastMove = {x: 0, player: player}
state = new GameState(oldState)
it "copies the grid from the previous state", ->
expect(state.grid[0][0]).toEqual(oldState.grid[0][0])
expect(state.grid[0][1]).toEqual(oldState.grid[0][1])
expect(state.grid[0][2]).toEqual(oldState.grid[0][2])
it "copies the lastMove from the previous state", ->
expect(state.lastMove.x).toEqual(oldState.lastMove.x)
expect(state.lastMove.player).toEqual(oldState.lastMove.player)
describe "#isOver", ->
player = null
beforeEach ->
player = new Player("Zell")
state = new GameState
describe "when there's a winner", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([0])
spyOn(state, "getWinner").and.returnValue(player)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "when there are no more moves left", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([])
spyOn(state, "getWinner").and.returnValue(null)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "#getAvailableMoves", ->
beforeEach ->
state = new GameState
state.grid[0][0] = true
state.grid[0][1] = true
state.grid[0][2] = true
state.grid[0][3] = true
state.grid[0][4] = true
state.grid[2][0] = true
state.grid[2][1] = true
state.grid[2][2] = true
state.grid[2][3] = true
state.grid[2][4] = true
state.grid[2][5] = true
state.grid[3][0] = true
state.grid[3][1] = true
state.grid[3][2] = true
state.grid[3][3] = true
it "returns an array of available columns to make a move on", ->
cols = state.getAvailableMoves()
expect(cols).toEqual([0, 1, 3, 4, 5, 6])
describe "#makeMove", ->
player = null
beforeEach ->
player = new Player("Squall")
state = new GameState
spyOn(state, "checkWinner")
describe "and the column is empty", ->
beforeEach ->
state.makeMove(0, player)
it "sets player on the slot", ->
expect(state.grid[0][0]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(0, 0)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 0, y: 0, player: player})
describe "and the column is not empty", ->
beforeEach ->
state.grid[2][0] = player
state.makeMove(2, player)
it "sets player on the top available slot of the column", ->
expect(state.grid[2][1]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(2, 1)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 2, y: 1, player: player})
describe "#checkWinner", ->
player1 = null
player2 = null
beforeEach ->
player1 = new Player("Squall")
player2 = new Player("Winter")
state = new GameState
describe "there's a winner", ->
describe "diagonal backward '/' victory", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player1
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "diagonal forward '\' victory", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player1
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(2, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "horizontal victory", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player1
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(4, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "vertical victory", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player1
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0, 5)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "no winner yet", ->
describe "broken diagonal backward '/'", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player2
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken diagonal forward '\'", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player2
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(4, 0)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken horizontal", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player2
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(2, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken vertical", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player2
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0 ,5)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
| 180152 | $ = require("jquery")
GameState = require("game_state")
Player = require("player")
describe "GameState", ->
state = null
describe "constructor", ->
describe "previous state was not given", ->
beforeEach ->
state = new GameState
it "sets grid with empty columns (x's contains empty y's)", ->
for n in [0..6]
expect(state.grid[n].length).toEqual(0)
describe "previous state was given", ->
player = null
oldState = null
beforeEach ->
player = new Player("<NAME>")
oldState = new GameState
oldState.grid[0][0] = player
oldState.grid[0][1] = player
oldState.grid[0][2] = player
oldState.lastMove = {x: 0, player: player}
state = new GameState(oldState)
it "copies the grid from the previous state", ->
expect(state.grid[0][0]).toEqual(oldState.grid[0][0])
expect(state.grid[0][1]).toEqual(oldState.grid[0][1])
expect(state.grid[0][2]).toEqual(oldState.grid[0][2])
it "copies the lastMove from the previous state", ->
expect(state.lastMove.x).toEqual(oldState.lastMove.x)
expect(state.lastMove.player).toEqual(oldState.lastMove.player)
describe "#isOver", ->
player = null
beforeEach ->
player = new Player("<NAME>")
state = new GameState
describe "when there's a winner", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([0])
spyOn(state, "getWinner").and.returnValue(player)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "when there are no more moves left", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([])
spyOn(state, "getWinner").and.returnValue(null)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "#getAvailableMoves", ->
beforeEach ->
state = new GameState
state.grid[0][0] = true
state.grid[0][1] = true
state.grid[0][2] = true
state.grid[0][3] = true
state.grid[0][4] = true
state.grid[2][0] = true
state.grid[2][1] = true
state.grid[2][2] = true
state.grid[2][3] = true
state.grid[2][4] = true
state.grid[2][5] = true
state.grid[3][0] = true
state.grid[3][1] = true
state.grid[3][2] = true
state.grid[3][3] = true
it "returns an array of available columns to make a move on", ->
cols = state.getAvailableMoves()
expect(cols).toEqual([0, 1, 3, 4, 5, 6])
describe "#makeMove", ->
player = null
beforeEach ->
player = new Player("Squall")
state = new GameState
spyOn(state, "checkWinner")
describe "and the column is empty", ->
beforeEach ->
state.makeMove(0, player)
it "sets player on the slot", ->
expect(state.grid[0][0]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(0, 0)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 0, y: 0, player: player})
describe "and the column is not empty", ->
beforeEach ->
state.grid[2][0] = player
state.makeMove(2, player)
it "sets player on the top available slot of the column", ->
expect(state.grid[2][1]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(2, 1)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 2, y: 1, player: player})
describe "#checkWinner", ->
player1 = null
player2 = null
beforeEach ->
player1 = new Player("Squall")
player2 = new Player("<NAME>")
state = new GameState
describe "there's a winner", ->
describe "diagonal backward '/' victory", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player1
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "diagonal forward '\' victory", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player1
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(2, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "horizontal victory", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player1
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(4, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "vertical victory", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player1
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0, 5)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "no winner yet", ->
describe "broken diagonal backward '/'", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player2
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken diagonal forward '\'", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player2
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(4, 0)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken horizontal", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player2
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(2, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken vertical", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player2
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0 ,5)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
| true | $ = require("jquery")
GameState = require("game_state")
Player = require("player")
describe "GameState", ->
state = null
describe "constructor", ->
describe "previous state was not given", ->
beforeEach ->
state = new GameState
it "sets grid with empty columns (x's contains empty y's)", ->
for n in [0..6]
expect(state.grid[n].length).toEqual(0)
describe "previous state was given", ->
player = null
oldState = null
beforeEach ->
player = new Player("PI:NAME:<NAME>END_PI")
oldState = new GameState
oldState.grid[0][0] = player
oldState.grid[0][1] = player
oldState.grid[0][2] = player
oldState.lastMove = {x: 0, player: player}
state = new GameState(oldState)
it "copies the grid from the previous state", ->
expect(state.grid[0][0]).toEqual(oldState.grid[0][0])
expect(state.grid[0][1]).toEqual(oldState.grid[0][1])
expect(state.grid[0][2]).toEqual(oldState.grid[0][2])
it "copies the lastMove from the previous state", ->
expect(state.lastMove.x).toEqual(oldState.lastMove.x)
expect(state.lastMove.player).toEqual(oldState.lastMove.player)
describe "#isOver", ->
player = null
beforeEach ->
player = new Player("PI:NAME:<NAME>END_PI")
state = new GameState
describe "when there's a winner", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([0])
spyOn(state, "getWinner").and.returnValue(player)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "when there are no more moves left", ->
beforeEach ->
spyOn(state, "getAvailableMoves").and.returnValue([])
spyOn(state, "getWinner").and.returnValue(null)
it "returns true", ->
expect(state.isOver()).toBe(true)
describe "#getAvailableMoves", ->
beforeEach ->
state = new GameState
state.grid[0][0] = true
state.grid[0][1] = true
state.grid[0][2] = true
state.grid[0][3] = true
state.grid[0][4] = true
state.grid[2][0] = true
state.grid[2][1] = true
state.grid[2][2] = true
state.grid[2][3] = true
state.grid[2][4] = true
state.grid[2][5] = true
state.grid[3][0] = true
state.grid[3][1] = true
state.grid[3][2] = true
state.grid[3][3] = true
it "returns an array of available columns to make a move on", ->
cols = state.getAvailableMoves()
expect(cols).toEqual([0, 1, 3, 4, 5, 6])
describe "#makeMove", ->
player = null
beforeEach ->
player = new Player("Squall")
state = new GameState
spyOn(state, "checkWinner")
describe "and the column is empty", ->
beforeEach ->
state.makeMove(0, player)
it "sets player on the slot", ->
expect(state.grid[0][0]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(0, 0)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 0, y: 0, player: player})
describe "and the column is not empty", ->
beforeEach ->
state.grid[2][0] = player
state.makeMove(2, player)
it "sets player on the top available slot of the column", ->
expect(state.grid[2][1]).toEqual(player)
it "calls #checkWinner with the latest x and y", ->
expect(state.checkWinner).toHaveBeenCalledWith(2, 1)
it "sets lastMove", ->
expect(state.getLastMove()).toEqual({x: 2, y: 1, player: player})
describe "#checkWinner", ->
player1 = null
player2 = null
beforeEach ->
player1 = new Player("Squall")
player2 = new Player("PI:NAME:<NAME>END_PI")
state = new GameState
describe "there's a winner", ->
describe "diagonal backward '/' victory", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player1
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "diagonal forward '\' victory", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player1
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(2, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "horizontal victory", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player1
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(4, 2)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "vertical victory", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player1
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0, 5)
it "sets winner", ->
expect(state.getWinner()).toEqual(player1)
describe "no winner yet", ->
describe "broken diagonal backward '/'", ->
beforeEach ->
state.grid[0][1] = player1
state.grid[1][2] = player1
state.grid[2][3] = player2
state.grid[3][4] = player1
state.grid[4][5] = player1
state.checkWinner(1, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken diagonal forward '\'", ->
beforeEach ->
state.grid[0][4] = player1
state.grid[1][3] = player1
state.grid[2][2] = player2
state.grid[3][1] = player1
state.grid[4][0] = player1
state.checkWinner(4, 0)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken horizontal", ->
beforeEach ->
state.grid[2][2] = player1
state.grid[3][2] = player1
state.grid[4][2] = player2
state.grid[5][2] = player1
state.grid[6][2] = player1
state.checkWinner(2, 2)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
describe "broken vertical", ->
beforeEach ->
state.grid[0][5] = player1
state.grid[0][4] = player2
state.grid[0][3] = player1
state.grid[0][2] = player1
state.checkWinner(0 ,5)
it "sets no winner", ->
expect(state.getWinner()).toBeUndefined()
|
[
{
"context": "###\n backbone-sql.js 0.5.7\n Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-sql\n Lice",
"end": 57,
"score": 0.9991560578346252,
"start": 49,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": " Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-sql\n License: MIT (http://www.opensourc",
"end": 87,
"score": 0.9997164607048035,
"start": 79,
"tag": "USERNAME",
"value": "vidigami"
}
] | src/connection.coffee | michaelBenin/backbone-sql | 1 | ###
backbone-sql.js 0.5.7
Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-sql
License: MIT (http://www.opensource.org/licenses/mit-license.php)
###
_ = require 'underscore'
Knex = require 'knex'
ConnectionPool = require 'backbone-orm/lib/connection_pool'
DatabaseUrl = require 'backbone-orm/lib/database_url'
PROTOCOLS =
'mysql:': 'mysql', 'mysql2:': 'mysql'
'postgres:': 'postgres', 'pg:': 'postgres'
'sqlite:': 'sqlite3', 'sqlite3:': 'sqlite3'
class KnexConnection
constructor: (@knex) ->
destroy: -> # TODO: look for a way to close knex
module.exports = class Connection
constructor: (full_url) ->
database_url = new DatabaseUrl(full_url)
@url = database_url.format({exclude_table: true, exclude_query: true}) # pool the raw endpoint without the table
return if @knex_connection = ConnectionPool.get(@url) # found in pool
throw "Unrecognized sql variant: #{full_url} for protocol: #{database_url.protocol}" unless protocol = PROTOCOLS[database_url.protocol]
if protocol is 'sqlite3'
connection_info = {filename: database_url.host or ':memory:'}
else
connection_info = _.extend({host: database_url.hostname, database: database_url.database, charset: 'utf8'}, database_url.parseAuth() or {})
knex = Knex.initialize({client: protocol, connection: connection_info})
ConnectionPool.set(@url, @knex_connection = new KnexConnection(knex))
knex: -> return @knex_connection?.knex
| 90281 | ###
backbone-sql.js 0.5.7
Copyright (c) 2013 <NAME> - https://github.com/vidigami/backbone-sql
License: MIT (http://www.opensource.org/licenses/mit-license.php)
###
_ = require 'underscore'
Knex = require 'knex'
ConnectionPool = require 'backbone-orm/lib/connection_pool'
DatabaseUrl = require 'backbone-orm/lib/database_url'
PROTOCOLS =
'mysql:': 'mysql', 'mysql2:': 'mysql'
'postgres:': 'postgres', 'pg:': 'postgres'
'sqlite:': 'sqlite3', 'sqlite3:': 'sqlite3'
class KnexConnection
constructor: (@knex) ->
destroy: -> # TODO: look for a way to close knex
module.exports = class Connection
constructor: (full_url) ->
database_url = new DatabaseUrl(full_url)
@url = database_url.format({exclude_table: true, exclude_query: true}) # pool the raw endpoint without the table
return if @knex_connection = ConnectionPool.get(@url) # found in pool
throw "Unrecognized sql variant: #{full_url} for protocol: #{database_url.protocol}" unless protocol = PROTOCOLS[database_url.protocol]
if protocol is 'sqlite3'
connection_info = {filename: database_url.host or ':memory:'}
else
connection_info = _.extend({host: database_url.hostname, database: database_url.database, charset: 'utf8'}, database_url.parseAuth() or {})
knex = Knex.initialize({client: protocol, connection: connection_info})
ConnectionPool.set(@url, @knex_connection = new KnexConnection(knex))
knex: -> return @knex_connection?.knex
| true | ###
backbone-sql.js 0.5.7
Copyright (c) 2013 PI:NAME:<NAME>END_PI - https://github.com/vidigami/backbone-sql
License: MIT (http://www.opensource.org/licenses/mit-license.php)
###
_ = require 'underscore'
Knex = require 'knex'
ConnectionPool = require 'backbone-orm/lib/connection_pool'
DatabaseUrl = require 'backbone-orm/lib/database_url'
PROTOCOLS =
'mysql:': 'mysql', 'mysql2:': 'mysql'
'postgres:': 'postgres', 'pg:': 'postgres'
'sqlite:': 'sqlite3', 'sqlite3:': 'sqlite3'
class KnexConnection
constructor: (@knex) ->
destroy: -> # TODO: look for a way to close knex
module.exports = class Connection
constructor: (full_url) ->
database_url = new DatabaseUrl(full_url)
@url = database_url.format({exclude_table: true, exclude_query: true}) # pool the raw endpoint without the table
return if @knex_connection = ConnectionPool.get(@url) # found in pool
throw "Unrecognized sql variant: #{full_url} for protocol: #{database_url.protocol}" unless protocol = PROTOCOLS[database_url.protocol]
if protocol is 'sqlite3'
connection_info = {filename: database_url.host or ':memory:'}
else
connection_info = _.extend({host: database_url.hostname, database: database_url.database, charset: 'utf8'}, database_url.parseAuth() or {})
knex = Knex.initialize({client: protocol, connection: connection_info})
ConnectionPool.set(@url, @knex_connection = new KnexConnection(knex))
knex: -> return @knex_connection?.knex
|
[
{
"context": ",\n id: 'mapbox.streets',\n accessToken: 'pk.eyJ1IjoicmVwc2FqamoiLCJhIjoiY2phZTRyaHloMXNieDMzcjF2czdxZmFwMSJ9.Xd4rx5sAMSeK60eO_Qq2yA'\n }).addTo(window.monster_map);\n\n L.marker([\n ",
"end": 1678,
"score": 0.9845934510231018,
"start": 1588,
"tag": "KEY",
"value": "pk.eyJ1IjoicmVwc2FqamoiLCJhIjoiY2phZTRyaHloMXNieDMzcjF2czdxZmFwMSJ9.Xd4rx5sAMSeK60eO_Qq2yA"
}
] | app/assets/javascripts/monster_details.coffee | sillevl/ludejo | 0 | $(".monsters.show").ready ->
monster = {
id: $("#monster_detail_map").data("monster-id"),
image: $("#monster_detail_map").data("monster-image"),
longitude: $("#monster_detail_map").data("monster-latitude"),
latitude: $("#monster_detail_map").data("monster-longitude")
}
window.monster_map = L.map('monster_detail_map').setView([monster.longitude, monster.latitude], 15);
onlocationfound = (e) ->
window.my_location = {
latitude: e.latitude
longitude: e.longitude
latlng: e.latlng
}
monster_location = L.latLng(monster.longitude, monster.latitude)
L.circleMarker(e.latlng, {color: "#FF0000", fillOpacity: 1}).addTo(window.monster_map)
distance = e.latlng.distanceTo(monster_location)
$("#distance_to_monster").text(Math.round(distance / 100, 2)/10)
latlongs = [e.latlng, monster_location]
polyline = L.polyline(latlongs, {color: "#FF0000", weight: 2}).addTo(window.monster_map);
if distance > 100
$('#feed_me').addClass 'disabled'
$('#feed_me').removeClass 'pulse'
$('feed_me_text').hide
else
window.monster_map.locate()
window.monster_map.on('locationfound',onlocationfound)
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token={accessToken}', {
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="http://mapbox.com">Mapbox</a>',
maxZoom: 18,
id: 'mapbox.streets',
accessToken: 'pk.eyJ1IjoicmVwc2FqamoiLCJhIjoiY2phZTRyaHloMXNieDMzcjF2czdxZmFwMSJ9.Xd4rx5sAMSeK60eO_Qq2yA'
}).addTo(window.monster_map);
L.marker([
monster.longitude,
monster.latitude],
{icon: L.icon(
iconUrl: "/assets/monsters/" + monster.image
iconSize: [ 48, 48]
iconAnchor: [ 24, 24 ])
}
).addTo(window.monster_map);
| 8616 | $(".monsters.show").ready ->
monster = {
id: $("#monster_detail_map").data("monster-id"),
image: $("#monster_detail_map").data("monster-image"),
longitude: $("#monster_detail_map").data("monster-latitude"),
latitude: $("#monster_detail_map").data("monster-longitude")
}
window.monster_map = L.map('monster_detail_map').setView([monster.longitude, monster.latitude], 15);
onlocationfound = (e) ->
window.my_location = {
latitude: e.latitude
longitude: e.longitude
latlng: e.latlng
}
monster_location = L.latLng(monster.longitude, monster.latitude)
L.circleMarker(e.latlng, {color: "#FF0000", fillOpacity: 1}).addTo(window.monster_map)
distance = e.latlng.distanceTo(monster_location)
$("#distance_to_monster").text(Math.round(distance / 100, 2)/10)
latlongs = [e.latlng, monster_location]
polyline = L.polyline(latlongs, {color: "#FF0000", weight: 2}).addTo(window.monster_map);
if distance > 100
$('#feed_me').addClass 'disabled'
$('#feed_me').removeClass 'pulse'
$('feed_me_text').hide
else
window.monster_map.locate()
window.monster_map.on('locationfound',onlocationfound)
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token={accessToken}', {
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="http://mapbox.com">Mapbox</a>',
maxZoom: 18,
id: 'mapbox.streets',
accessToken: '<KEY>'
}).addTo(window.monster_map);
L.marker([
monster.longitude,
monster.latitude],
{icon: L.icon(
iconUrl: "/assets/monsters/" + monster.image
iconSize: [ 48, 48]
iconAnchor: [ 24, 24 ])
}
).addTo(window.monster_map);
| true | $(".monsters.show").ready ->
monster = {
id: $("#monster_detail_map").data("monster-id"),
image: $("#monster_detail_map").data("monster-image"),
longitude: $("#monster_detail_map").data("monster-latitude"),
latitude: $("#monster_detail_map").data("monster-longitude")
}
window.monster_map = L.map('monster_detail_map').setView([monster.longitude, monster.latitude], 15);
onlocationfound = (e) ->
window.my_location = {
latitude: e.latitude
longitude: e.longitude
latlng: e.latlng
}
monster_location = L.latLng(monster.longitude, monster.latitude)
L.circleMarker(e.latlng, {color: "#FF0000", fillOpacity: 1}).addTo(window.monster_map)
distance = e.latlng.distanceTo(monster_location)
$("#distance_to_monster").text(Math.round(distance / 100, 2)/10)
latlongs = [e.latlng, monster_location]
polyline = L.polyline(latlongs, {color: "#FF0000", weight: 2}).addTo(window.monster_map);
if distance > 100
$('#feed_me').addClass 'disabled'
$('#feed_me').removeClass 'pulse'
$('feed_me_text').hide
else
window.monster_map.locate()
window.monster_map.on('locationfound',onlocationfound)
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token={accessToken}', {
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, Imagery © <a href="http://mapbox.com">Mapbox</a>',
maxZoom: 18,
id: 'mapbox.streets',
accessToken: 'PI:KEY:<KEY>END_PI'
}).addTo(window.monster_map);
L.marker([
monster.longitude,
monster.latitude],
{icon: L.icon(
iconUrl: "/assets/monsters/" + monster.image
iconSize: [ 48, 48]
iconAnchor: [ 24, 24 ])
}
).addTo(window.monster_map);
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9955520629882812,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/hotCodeReplace.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Tools for implementing hot code replace.
#
# This module provides methods for snapshotting a React render tree.
#
# Note: this module depends on the following internal React v0.13.3 properties:
#
# - ReactComponent._reactInternalInstance
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactCompositeComponentWrapper.getPublicInstance()
# returns a ReactComponent
#
# - ReactCompositeComponentWrapper._renderedComponent
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactDOMComponent._renderedChildren
# either null or an object mapping child keys to children.
# each child is either a ReactCompositeComponentWrapper or a ReactDOMComponent
Assert = require 'assert'
Async = require 'async'
Imm = require 'immutable'
load = (win) =>
takeSnapshot = (rootComponent) =>
return extractState rootComponent._reactInternalInstance
restoreSnapshot = (rootComponent, snapshot) =>
errors = injectState rootComponent._reactInternalInstance, snapshot
if errors.size > 0
console.error "#{errors.size} error(s) occurred while restoring state snapshot:"
errors.forEach (err) =>
console.error "HCR restoration error: #{err.toString()}"
masterError = new Error("snapshot restoration partially failed")
masterError.causes = errors
throw masterError
extractState = (root) =>
# Is this a composite component?
if root._renderedComponent
return Imm.Map {
type: 'composite'
value: root.getPublicInstance().state
next: extractState root._renderedComponent
}
# OK, we reached the raw DOM component, now we can iterate its children
return Imm.Map {
type: 'dom'
children: Imm.Map(root._renderedChildren or {})
.map (renderedChild) =>
return extractState renderedChild
}
injectState = (root, state) =>
switch state.get('type')
when 'composite'
unless root._renderedComponent?
return Imm.List([
new Error "expected composite but found DOM node"
])
if state.get('value')?
root.getPublicInstance().setState state.get('value')
return injectState root._renderedComponent, state.get('next')
when 'dom'
if root._renderedComponent
return Imm.List([
new Error "expected DOM node but found composite"
])
childErrors = state.get('children').entrySeq().flatMap ([childKey, childState]) =>
renderedChild = root._renderedChildren[childKey]
unless renderedChild
return Imm.List([
new Error "missing child with key #{JSON.stringify childKey}"
])
return injectState renderedChild, childState
.toList()
expectedChildCount = state.get('children').size
actualChildCount = Object.keys(root._renderedChildren or {}).length
if expectedChildCount isnt actualChildCount
return childErrors.push new Error(
"expected #{expectedChildCount} children but found #{actualChildCount}"
)
return childErrors
else
throw new Error "unknown state node type: #{state.get('type')}"
return {
takeSnapshot
restoreSnapshot
}
module.exports = {load}
| 24997 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Tools for implementing hot code replace.
#
# This module provides methods for snapshotting a React render tree.
#
# Note: this module depends on the following internal React v0.13.3 properties:
#
# - ReactComponent._reactInternalInstance
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactCompositeComponentWrapper.getPublicInstance()
# returns a ReactComponent
#
# - ReactCompositeComponentWrapper._renderedComponent
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactDOMComponent._renderedChildren
# either null or an object mapping child keys to children.
# each child is either a ReactCompositeComponentWrapper or a ReactDOMComponent
Assert = require 'assert'
Async = require 'async'
Imm = require 'immutable'
load = (win) =>
takeSnapshot = (rootComponent) =>
return extractState rootComponent._reactInternalInstance
restoreSnapshot = (rootComponent, snapshot) =>
errors = injectState rootComponent._reactInternalInstance, snapshot
if errors.size > 0
console.error "#{errors.size} error(s) occurred while restoring state snapshot:"
errors.forEach (err) =>
console.error "HCR restoration error: #{err.toString()}"
masterError = new Error("snapshot restoration partially failed")
masterError.causes = errors
throw masterError
extractState = (root) =>
# Is this a composite component?
if root._renderedComponent
return Imm.Map {
type: 'composite'
value: root.getPublicInstance().state
next: extractState root._renderedComponent
}
# OK, we reached the raw DOM component, now we can iterate its children
return Imm.Map {
type: 'dom'
children: Imm.Map(root._renderedChildren or {})
.map (renderedChild) =>
return extractState renderedChild
}
injectState = (root, state) =>
switch state.get('type')
when 'composite'
unless root._renderedComponent?
return Imm.List([
new Error "expected composite but found DOM node"
])
if state.get('value')?
root.getPublicInstance().setState state.get('value')
return injectState root._renderedComponent, state.get('next')
when 'dom'
if root._renderedComponent
return Imm.List([
new Error "expected DOM node but found composite"
])
childErrors = state.get('children').entrySeq().flatMap ([childKey, childState]) =>
renderedChild = root._renderedChildren[childKey]
unless renderedChild
return Imm.List([
new Error "missing child with key #{JSON.stringify childKey}"
])
return injectState renderedChild, childState
.toList()
expectedChildCount = state.get('children').size
actualChildCount = Object.keys(root._renderedChildren or {}).length
if expectedChildCount isnt actualChildCount
return childErrors.push new Error(
"expected #{expectedChildCount} children but found #{actualChildCount}"
)
return childErrors
else
throw new Error "unknown state node type: #{state.get('type')}"
return {
takeSnapshot
restoreSnapshot
}
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Tools for implementing hot code replace.
#
# This module provides methods for snapshotting a React render tree.
#
# Note: this module depends on the following internal React v0.13.3 properties:
#
# - ReactComponent._reactInternalInstance
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactCompositeComponentWrapper.getPublicInstance()
# returns a ReactComponent
#
# - ReactCompositeComponentWrapper._renderedComponent
# either a ReactCompositeComponentWrapper or a ReactDOMComponent
#
# - ReactDOMComponent._renderedChildren
# either null or an object mapping child keys to children.
# each child is either a ReactCompositeComponentWrapper or a ReactDOMComponent
Assert = require 'assert'
Async = require 'async'
Imm = require 'immutable'
load = (win) =>
takeSnapshot = (rootComponent) =>
return extractState rootComponent._reactInternalInstance
restoreSnapshot = (rootComponent, snapshot) =>
errors = injectState rootComponent._reactInternalInstance, snapshot
if errors.size > 0
console.error "#{errors.size} error(s) occurred while restoring state snapshot:"
errors.forEach (err) =>
console.error "HCR restoration error: #{err.toString()}"
masterError = new Error("snapshot restoration partially failed")
masterError.causes = errors
throw masterError
extractState = (root) =>
# Is this a composite component?
if root._renderedComponent
return Imm.Map {
type: 'composite'
value: root.getPublicInstance().state
next: extractState root._renderedComponent
}
# OK, we reached the raw DOM component, now we can iterate its children
return Imm.Map {
type: 'dom'
children: Imm.Map(root._renderedChildren or {})
.map (renderedChild) =>
return extractState renderedChild
}
injectState = (root, state) =>
switch state.get('type')
when 'composite'
unless root._renderedComponent?
return Imm.List([
new Error "expected composite but found DOM node"
])
if state.get('value')?
root.getPublicInstance().setState state.get('value')
return injectState root._renderedComponent, state.get('next')
when 'dom'
if root._renderedComponent
return Imm.List([
new Error "expected DOM node but found composite"
])
childErrors = state.get('children').entrySeq().flatMap ([childKey, childState]) =>
renderedChild = root._renderedChildren[childKey]
unless renderedChild
return Imm.List([
new Error "missing child with key #{JSON.stringify childKey}"
])
return injectState renderedChild, childState
.toList()
expectedChildCount = state.get('children').size
actualChildCount = Object.keys(root._renderedChildren or {}).length
if expectedChildCount isnt actualChildCount
return childErrors.push new Error(
"expected #{expectedChildCount} children but found #{actualChildCount}"
)
return childErrors
else
throw new Error "unknown state node type: #{state.get('type')}"
return {
takeSnapshot
restoreSnapshot
}
module.exports = {load}
|
[
{
"context": "to']\n choices:\n aa:\n label: 'Aardvark'\n description: 'Basically a long-nose ra",
"end": 2045,
"score": 0.5770446062088013,
"start": 2037,
"tag": "NAME",
"value": "Aardvark"
}
] | app/classifier/mock-data.coffee | camallen/Panoptes-Front-End | 0 | apiClient = require '../api/client'
# This is just a blank image for testing drawing tools.
BLANK_IMAGE = ['data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgAQMAAAA',
'PH06nAAAABlBMVEXMzMyWlpYU2uzLAAAAPUlEQVR4nO3BAQ0AAADCoPdPbQ43oAAAAAAAAAAAAA',
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgzwCX4AAB9Dl2RwAAAABJRU5ErkJggg=='].join ''
workflow = apiClient.type('workflows').create
id: 'MOCK_WORKFLOW_FOR_CLASSIFIER'
first_task: 'crop'
tasks:
crop:
type: 'crop'
instruction: 'Drag out a box around the face.'
help: 'The face is the thing with the nose.'
next: 'survey'
survey:
type: 'survey'
required: true
characteristicsOrder: ['pa', 'co']
characteristics:
pa:
label: 'Pattern'
valuesOrder: ['so', 'sp', 'st', 'ba']
values:
so:
label: 'Solid'
image: '//placehold.it/64.png?text=Solid'
sp:
label: 'Spots'
image: '//placehold.it/64.png?text=Spots'
st:
label: 'Stripes'
image: '//placehold.it/64.png?text=Stripes'
ba:
label: 'Bands'
image: '//placehold.it/64.png?text=Bands'
co:
label: 'Color'
valuesOrder: ['wh', 'ta', 're', 'br', 'bl', 'gr']
values:
wh:
label: 'White'
image: '//placehold.it/64.png?text=White'
ta:
label: 'Tan'
image: '//placehold.it/64.png?text=Tan'
re:
label: 'Red'
image: '//placehold.it/64.png?text=Red'
br:
label: 'Brown'
image: '//placehold.it/64.png?text=Brown'
bl:
label: 'Black'
image: '//placehold.it/64.png?text=Black'
gr:
label: 'Green'
image: '//placehold.it/64.png?text=Green'
choicesOrder: ['aa', 'ar', 'to']
choices:
aa:
label: 'Aardvark'
description: 'Basically a long-nose rabbit'
images: [
'//placehold.it/320x240.png?text=Aardvark 1'
'//placehold.it/320x240.png?text=Aardvark 2'
]
characteristics:
pa: ['so']
co: ['ta', 'br']
confusionsOrder: ['ar']
confusions:
ar: 'They both start with “A”, so _some_ **dummies** get these two mixed up.'
ar:
label: 'Armadillo'
description: 'A little rolly dude'
images: [
'//placehold.it/320x240.png?text=Armadillo 1'
'//placehold.it/320x240.png?text=Armadillo 2'
]
characteristics:
pa: ['so', 'st']
co: ['ta', 'br']
confusionsOrder: []
confusions: {}
to:
label: 'Tortoise'
description: 'Little green house with legs'
images: [
'//placehold.it/320x240.png?text=Tortoise 1'
'//placehold.it/320x240.png?text=Tortoise 2'
]
characteristics:
pa: ['so']
co: ['gr']
confusionsOrder: []
confusions: {}
questionsOrder: ['ho', 'be', 'in', 'hr']
questions:
ho:
required: true
multiple: false
label: 'How many?'
answersOrder: ['one', 'two', 'many']
answers:
one:
label: '1'
two:
label: '2'
many:
label: '3+'
be:
required: true
multiple: true
label: 'Any activity?'
answersOrder: ['mo', 'ea', 'in']
answers:
mo:
label: 'Moving'
ea:
label: 'Eating'
in:
label: 'Interacting'
in:
required: false
label: 'Any injuries?'
answersOrder: ['y', 'n']
answers:
y:
label: 'Yep'
n:
label: 'Nope'
hr:
required: false
multiple: true
label: 'Horns toggle'
answersOrder: ['y']
answers:
y:
label: 'Present'
images: {}
# next: 'draw'
next: 'draw'
draw:
type: 'drawing'
required: true
instruction: 'Draw something.'
help: '''
Do this:
* Pick a tool
* Draw something
'''
tools: [
{
type: 'point'
label: 'Point'
color: 'red'
details: [{
type: 'single'
required: true
question: 'Cool?'
answers: [
{label: 'Yeah'}
{label: 'Nah'}
]
}, {
type: 'multiple'
question: 'Cool stuff?'
answers: [
{label: 'Ice'}
{label: 'Snow'}
]
}]
}
{type: 'line', label: 'Line', color: 'yellow', details: []}
{type: 'rectangle', label: 'Rectangle', color: 'lime', details: []}
{type: 'polygon', label: 'Polygon', color: 'cyan', details: []}
{type: 'circle', label: 'Circle', color: 'blue', details: []}
{type: 'ellipse', label: 'Ellipse', color: 'magenta', details: []}
]
next: 'cool'
cool:
type: 'single'
question: 'Is this cool?'
answers: [
{label: 'Yeah', next: 'features'}
{label: 'Nah', next: null}
]
features:
type: 'multiple'
question: 'What cool features are present?'
answers: [
{label: 'Cold water'}
{label: 'Snow'}
{label: 'Ice'}
{label: 'Sunglasses'}
]
subject = apiClient.type('subjects').create
id: 'MOCK_SUBJECT_FOR_CLASSIFIER'
locations: [
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/1' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/2' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/3' else BLANK_IMAGE}
]
metadata:
'Capture date': '5 Feb, 2015'
'Region': 'Chicago, IL'
expert_classification_data:
annotations: [{
task: 'draw'
value: [{
tool: 0
x: 50
y: 50
frame: 0
}, {
tool: 0
x: 150
y: 50
frame: 0
}]
}, {
task: 'cool'
value: 0
}, {
task: 'features'
value: [0, 2]
}]
classification = apiClient.type('classifications').create
annotations: []
metadata: {}
links:
project: 'NO_PROJECT'
workflow: workflow.id
subjects: [subject.id]
_workflow: workflow # TEMP
_subjects: [subject] # TEMP
module.exports = {workflow, subject, classification}
window.mockClassifierData = module.exports
| 192749 | apiClient = require '../api/client'
# This is just a blank image for testing drawing tools.
BLANK_IMAGE = ['data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgAQMAAAA',
'PH06nAAAABlBMVEXMzMyWlpYU2uzLAAAAPUlEQVR4nO3BAQ0AAADCoPdPbQ43oAAAAAAAAAAAAA',
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgzwCX4AAB9Dl2RwAAAABJRU5ErkJggg=='].join ''
workflow = apiClient.type('workflows').create
id: 'MOCK_WORKFLOW_FOR_CLASSIFIER'
first_task: 'crop'
tasks:
crop:
type: 'crop'
instruction: 'Drag out a box around the face.'
help: 'The face is the thing with the nose.'
next: 'survey'
survey:
type: 'survey'
required: true
characteristicsOrder: ['pa', 'co']
characteristics:
pa:
label: 'Pattern'
valuesOrder: ['so', 'sp', 'st', 'ba']
values:
so:
label: 'Solid'
image: '//placehold.it/64.png?text=Solid'
sp:
label: 'Spots'
image: '//placehold.it/64.png?text=Spots'
st:
label: 'Stripes'
image: '//placehold.it/64.png?text=Stripes'
ba:
label: 'Bands'
image: '//placehold.it/64.png?text=Bands'
co:
label: 'Color'
valuesOrder: ['wh', 'ta', 're', 'br', 'bl', 'gr']
values:
wh:
label: 'White'
image: '//placehold.it/64.png?text=White'
ta:
label: 'Tan'
image: '//placehold.it/64.png?text=Tan'
re:
label: 'Red'
image: '//placehold.it/64.png?text=Red'
br:
label: 'Brown'
image: '//placehold.it/64.png?text=Brown'
bl:
label: 'Black'
image: '//placehold.it/64.png?text=Black'
gr:
label: 'Green'
image: '//placehold.it/64.png?text=Green'
choicesOrder: ['aa', 'ar', 'to']
choices:
aa:
label: '<NAME>'
description: 'Basically a long-nose rabbit'
images: [
'//placehold.it/320x240.png?text=Aardvark 1'
'//placehold.it/320x240.png?text=Aardvark 2'
]
characteristics:
pa: ['so']
co: ['ta', 'br']
confusionsOrder: ['ar']
confusions:
ar: 'They both start with “A”, so _some_ **dummies** get these two mixed up.'
ar:
label: 'Armadillo'
description: 'A little rolly dude'
images: [
'//placehold.it/320x240.png?text=Armadillo 1'
'//placehold.it/320x240.png?text=Armadillo 2'
]
characteristics:
pa: ['so', 'st']
co: ['ta', 'br']
confusionsOrder: []
confusions: {}
to:
label: 'Tortoise'
description: 'Little green house with legs'
images: [
'//placehold.it/320x240.png?text=Tortoise 1'
'//placehold.it/320x240.png?text=Tortoise 2'
]
characteristics:
pa: ['so']
co: ['gr']
confusionsOrder: []
confusions: {}
questionsOrder: ['ho', 'be', 'in', 'hr']
questions:
ho:
required: true
multiple: false
label: 'How many?'
answersOrder: ['one', 'two', 'many']
answers:
one:
label: '1'
two:
label: '2'
many:
label: '3+'
be:
required: true
multiple: true
label: 'Any activity?'
answersOrder: ['mo', 'ea', 'in']
answers:
mo:
label: 'Moving'
ea:
label: 'Eating'
in:
label: 'Interacting'
in:
required: false
label: 'Any injuries?'
answersOrder: ['y', 'n']
answers:
y:
label: 'Yep'
n:
label: 'Nope'
hr:
required: false
multiple: true
label: 'Horns toggle'
answersOrder: ['y']
answers:
y:
label: 'Present'
images: {}
# next: 'draw'
next: 'draw'
draw:
type: 'drawing'
required: true
instruction: 'Draw something.'
help: '''
Do this:
* Pick a tool
* Draw something
'''
tools: [
{
type: 'point'
label: 'Point'
color: 'red'
details: [{
type: 'single'
required: true
question: 'Cool?'
answers: [
{label: 'Yeah'}
{label: 'Nah'}
]
}, {
type: 'multiple'
question: 'Cool stuff?'
answers: [
{label: 'Ice'}
{label: 'Snow'}
]
}]
}
{type: 'line', label: 'Line', color: 'yellow', details: []}
{type: 'rectangle', label: 'Rectangle', color: 'lime', details: []}
{type: 'polygon', label: 'Polygon', color: 'cyan', details: []}
{type: 'circle', label: 'Circle', color: 'blue', details: []}
{type: 'ellipse', label: 'Ellipse', color: 'magenta', details: []}
]
next: 'cool'
cool:
type: 'single'
question: 'Is this cool?'
answers: [
{label: 'Yeah', next: 'features'}
{label: 'Nah', next: null}
]
features:
type: 'multiple'
question: 'What cool features are present?'
answers: [
{label: 'Cold water'}
{label: 'Snow'}
{label: 'Ice'}
{label: 'Sunglasses'}
]
subject = apiClient.type('subjects').create
id: 'MOCK_SUBJECT_FOR_CLASSIFIER'
locations: [
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/1' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/2' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/3' else BLANK_IMAGE}
]
metadata:
'Capture date': '5 Feb, 2015'
'Region': 'Chicago, IL'
expert_classification_data:
annotations: [{
task: 'draw'
value: [{
tool: 0
x: 50
y: 50
frame: 0
}, {
tool: 0
x: 150
y: 50
frame: 0
}]
}, {
task: 'cool'
value: 0
}, {
task: 'features'
value: [0, 2]
}]
classification = apiClient.type('classifications').create
annotations: []
metadata: {}
links:
project: 'NO_PROJECT'
workflow: workflow.id
subjects: [subject.id]
_workflow: workflow # TEMP
_subjects: [subject] # TEMP
module.exports = {workflow, subject, classification}
window.mockClassifierData = module.exports
| true | apiClient = require '../api/client'
# This is just a blank image for testing drawing tools.
BLANK_IMAGE = ['data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgAQMAAAA',
'PH06nAAAABlBMVEXMzMyWlpYU2uzLAAAAPUlEQVR4nO3BAQ0AAADCoPdPbQ43oAAAAAAAAAAAAA',
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgzwCX4AAB9Dl2RwAAAABJRU5ErkJggg=='].join ''
workflow = apiClient.type('workflows').create
id: 'MOCK_WORKFLOW_FOR_CLASSIFIER'
first_task: 'crop'
tasks:
crop:
type: 'crop'
instruction: 'Drag out a box around the face.'
help: 'The face is the thing with the nose.'
next: 'survey'
survey:
type: 'survey'
required: true
characteristicsOrder: ['pa', 'co']
characteristics:
pa:
label: 'Pattern'
valuesOrder: ['so', 'sp', 'st', 'ba']
values:
so:
label: 'Solid'
image: '//placehold.it/64.png?text=Solid'
sp:
label: 'Spots'
image: '//placehold.it/64.png?text=Spots'
st:
label: 'Stripes'
image: '//placehold.it/64.png?text=Stripes'
ba:
label: 'Bands'
image: '//placehold.it/64.png?text=Bands'
co:
label: 'Color'
valuesOrder: ['wh', 'ta', 're', 'br', 'bl', 'gr']
values:
wh:
label: 'White'
image: '//placehold.it/64.png?text=White'
ta:
label: 'Tan'
image: '//placehold.it/64.png?text=Tan'
re:
label: 'Red'
image: '//placehold.it/64.png?text=Red'
br:
label: 'Brown'
image: '//placehold.it/64.png?text=Brown'
bl:
label: 'Black'
image: '//placehold.it/64.png?text=Black'
gr:
label: 'Green'
image: '//placehold.it/64.png?text=Green'
choicesOrder: ['aa', 'ar', 'to']
choices:
aa:
label: 'PI:NAME:<NAME>END_PI'
description: 'Basically a long-nose rabbit'
images: [
'//placehold.it/320x240.png?text=Aardvark 1'
'//placehold.it/320x240.png?text=Aardvark 2'
]
characteristics:
pa: ['so']
co: ['ta', 'br']
confusionsOrder: ['ar']
confusions:
ar: 'They both start with “A”, so _some_ **dummies** get these two mixed up.'
ar:
label: 'Armadillo'
description: 'A little rolly dude'
images: [
'//placehold.it/320x240.png?text=Armadillo 1'
'//placehold.it/320x240.png?text=Armadillo 2'
]
characteristics:
pa: ['so', 'st']
co: ['ta', 'br']
confusionsOrder: []
confusions: {}
to:
label: 'Tortoise'
description: 'Little green house with legs'
images: [
'//placehold.it/320x240.png?text=Tortoise 1'
'//placehold.it/320x240.png?text=Tortoise 2'
]
characteristics:
pa: ['so']
co: ['gr']
confusionsOrder: []
confusions: {}
questionsOrder: ['ho', 'be', 'in', 'hr']
questions:
ho:
required: true
multiple: false
label: 'How many?'
answersOrder: ['one', 'two', 'many']
answers:
one:
label: '1'
two:
label: '2'
many:
label: '3+'
be:
required: true
multiple: true
label: 'Any activity?'
answersOrder: ['mo', 'ea', 'in']
answers:
mo:
label: 'Moving'
ea:
label: 'Eating'
in:
label: 'Interacting'
in:
required: false
label: 'Any injuries?'
answersOrder: ['y', 'n']
answers:
y:
label: 'Yep'
n:
label: 'Nope'
hr:
required: false
multiple: true
label: 'Horns toggle'
answersOrder: ['y']
answers:
y:
label: 'Present'
images: {}
# next: 'draw'
next: 'draw'
draw:
type: 'drawing'
required: true
instruction: 'Draw something.'
help: '''
Do this:
* Pick a tool
* Draw something
'''
tools: [
{
type: 'point'
label: 'Point'
color: 'red'
details: [{
type: 'single'
required: true
question: 'Cool?'
answers: [
{label: 'Yeah'}
{label: 'Nah'}
]
}, {
type: 'multiple'
question: 'Cool stuff?'
answers: [
{label: 'Ice'}
{label: 'Snow'}
]
}]
}
{type: 'line', label: 'Line', color: 'yellow', details: []}
{type: 'rectangle', label: 'Rectangle', color: 'lime', details: []}
{type: 'polygon', label: 'Polygon', color: 'cyan', details: []}
{type: 'circle', label: 'Circle', color: 'blue', details: []}
{type: 'ellipse', label: 'Ellipse', color: 'magenta', details: []}
]
next: 'cool'
cool:
type: 'single'
question: 'Is this cool?'
answers: [
{label: 'Yeah', next: 'features'}
{label: 'Nah', next: null}
]
features:
type: 'multiple'
question: 'What cool features are present?'
answers: [
{label: 'Cold water'}
{label: 'Snow'}
{label: 'Ice'}
{label: 'Sunglasses'}
]
subject = apiClient.type('subjects').create
id: 'MOCK_SUBJECT_FOR_CLASSIFIER'
locations: [
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/1' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/2' else BLANK_IMAGE}
{'image/jpeg': if navigator.onLine then 'http://lorempixel.com/320/240/animals/3' else BLANK_IMAGE}
]
metadata:
'Capture date': '5 Feb, 2015'
'Region': 'Chicago, IL'
expert_classification_data:
annotations: [{
task: 'draw'
value: [{
tool: 0
x: 50
y: 50
frame: 0
}, {
tool: 0
x: 150
y: 50
frame: 0
}]
}, {
task: 'cool'
value: 0
}, {
task: 'features'
value: [0, 2]
}]
classification = apiClient.type('classifications').create
annotations: []
metadata: {}
links:
project: 'NO_PROJECT'
workflow: workflow.id
subjects: [subject.id]
_workflow: workflow # TEMP
_subjects: [subject] # TEMP
module.exports = {workflow, subject, classification}
window.mockClassifierData = module.exports
|
[
{
"context": "'\n type: 'string'\n default: '127.0.0.1'\n\n port:\n title: 'TCP port'\n ",
"end": 801,
"score": 0.9997719526290894,
"start": 792,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "fault: 830\n\n username:\n title: 'Username'\n type: 'string'\n default: '",
"end": 947,
"score": 0.44531115889549255,
"start": 939,
"tag": "NAME",
"value": "Username"
},
{
"context": "'\n type: 'string'\n default: 'netconf'\n\n password:\n title: 'Password'",
"end": 1004,
"score": 0.9184151291847229,
"start": 997,
"tag": "USERNAME",
"value": "netconf"
}
] | lib/netconf.coffee | nokia/atom-netconf | 15 | ###
netconf.coffee
Copyright (c) 2016 Nokia
Note:
This file is part of the netconf package for the ATOM Text Editor.
Licensed under the MIT license
See LICENSE.md delivered with this project for more information.
###
{CompositeDisposable} = require 'atom'
ncclient = require './ncclient'
NetconfToolbar = require './toolbar-netconf'
NavigationToolbar = require './toolbar-navigation'
CandidateToolbar = require './toolbar-candidate'
NokiaToolbar = require './toolbar-nokia'
StatusbarNetconf = require './statusbar-netconf'
XmlToolbar = require './toolbar-xml'
module.exports =
config:
server:
title: 'Netconf Server'
type: 'object'
order: 1
properties:
host:
title: 'Hostname'
type: 'string'
default: '127.0.0.1'
port:
title: 'TCP port'
type: 'integer'
default: 830
username:
title: 'Username'
type: 'string'
default: 'netconf'
password:
title: 'Password'
type: 'string'
default: ''
description: "Password of the user. If empty, password authentication
will be disabled."
keysfile:
title: 'Private Key File'
type: 'string'
default: ''
description: "Complete path of the file containing private keys
for SSHv2 authentication. On Linux and MacOS X you could use
~/.ssh2/id_rsa to access the registered private keys of the user.
If empty, key authentication will be disabled."
timeout:
title: 'Netconf Timeout'
type: 'integer'
default: 300
description : "Time to wait after an <rpc> message has been send,
to receive an appropriate <rpc-reply> with the same message-id.
Timeout is defined in seconds, default is 300 (5min).
Please be aware that <get-config> on larger configurations might
take time, so it is not recommended to use short timers."
base11:
title: 'base:1.1 (enable chunked-framing)'
type : 'boolean'
default : true
behavior:
title: 'Look and Feel'
type: 'object'
order: 2
properties:
audio :
title: 'Enable Audio'
type : 'boolean'
default : true
displayBanner :
title: 'Display SSH Banner/Greeting'
type : 'boolean'
default : true
description : "Display SSH Banner/Greeting as Notification"
sampleHello :
title: 'Sample Hello'
type : 'boolean'
default : true
description : "Sample hello messages from netconf server into new
XML file. Option to be enabled to check netconf capabilities
announced."
sampleError :
title: 'Sample Error'
type : 'boolean'
default : false
description : "Sample rpc-reply error messages from netconf server
into new XML file."
resultUntitled :
title: 'Result: Untitled'
type : 'boolean'
default : false
description : 'Store Results always in new XML file buffer. If
option is not set, filenames are build from netconf msg-id.'
resultFocus :
title: 'Result: Focus'
type : 'boolean'
default : false
description : 'Focus on the Pane/Editor which was used to store
the XML result. If option is unchecked, focus will be restored
after operation.'
resultReadOnly :
title: 'Result: Read-Only'
type : 'boolean'
default : false
description : 'Text Editor for XML results will be blocked for
editing.'
enableTooltips :
title: 'Enable Tooltips'
type : 'boolean'
default : true
splitPane:
title: 'Display Results'
type: 'string'
default: 'down'
enum: ['default', 'right', 'down', 'left']
xmlProcessor:
title: 'XML Result Post Processing'
type: 'string'
default: 'prettify'
enum: ['raw', 'minify', 'prettify']
xmlFoldLevel:
title: 'XML Result Folding'
type: 'integer'
default: 3
description : "To be used in combination with prettify!"
transactional:
title: 'Transactional Options'
type: 'object'
order: 3
properties:
diffMod :
title: 'Show Differences as Added, Removed or Modified'
type: 'boolean'
default: true
description: "When enabled compare shows compact results. In case
remove/added chunks are attached to each other, only the
added part from candidate is display and marked as 'modified'.
By disabling this option, both chunks are displayed and marked
accordingly as 'added' and 'removed'."
diffWords :
title: 'Compare Differences Word by Word'
type: 'boolean'
default: true
description: "Enable differences on words level for compare running
vs candidate. When disabled comparision is on line level only."
debug:
title: 'Debug Options'
type: 'object'
order: 4
properties:
netconf:
title: 'Enable Debug for Package'
type : 'boolean'
default : false
_ncclient:
title: 'Enable Debug for Netconf Module'
type : 'boolean'
default : false
_ssh:
title: 'Enable Debug for SSH Module'
type : 'boolean'
default : false
environment:
title: 'Environment'
type: 'object'
order: 5
properties:
workspace:
title: 'Private Workspace'
type: 'string'
default: 'workspace_atom_netconf'
activate: (state) ->
# console.debug 'netconf::activate()'
@subscriptions = new CompositeDisposable
@client = new ncclient
@status = new StatusbarNetconf
@whatis = undefined
@toolbars = []
@toolbars.push new NetconfToolbar
@toolbars.push new XmlToolbar
@toolbars.push new NavigationToolbar
@toolbars.push new CandidateToolbar
@toolbars.push new NokiaToolbar
consumeStatusBar: (atomStatusBar) ->
# console.debug 'netconf::consumeStatusBar()'
@status.initialize(atomStatusBar)
@status.register(@client)
# --- register toolbars ---------------------------------------------------
@toolbars.forEach (toolbar) =>
toolbar.initialize(atomStatusBar)
toolbar.register(@client)
toolbar.register(@status)
# --- enable toolbars, icons and tooltips ---------------------------------
@tooltips atom.config.get 'atom-netconf.behavior.enableTooltips'
@updateUI atom.workspace.getActiveTextEditor()
# --- register events -----------------------------------------------------
atom.config.observe 'atom-netconf.behavior.enableTooltips', @tooltips.bind(this)
atom.workspace.onDidChangeActivePaneItem @updateUI.bind(this)
# --- register commands ---------------------------------------------------
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:sendrpc': => @toolbars[0].do_rpc_call()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:connect': => @toolbars[0].do_connect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:disconnect': => @toolbars[0].do_disconnect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:smart_select': => @toolbars[1].do_smart_select()
tooltips: (option) ->
# console.debug 'netconf::tooltips()'
if option
if @whatis == undefined
@whatis = new CompositeDisposable
@status.tooltips(@whatis)
@toolbars.forEach (toolbar) => toolbar.tooltips(@whatis)
else
if @whatis != undefined
@whatis.dispose()
@whatis = undefined
updateUI: (editor)->
# console.debug 'netconf::updateUI()'
@toolbars.forEach (toolbar) => toolbar.updateUI(editor)
deactivate: ->
# console.debug 'netconf::deactivate()'
@subscriptions.dispose()
serialize: ->
# console.debug 'netconf::serialize()'
| 114535 | ###
netconf.coffee
Copyright (c) 2016 Nokia
Note:
This file is part of the netconf package for the ATOM Text Editor.
Licensed under the MIT license
See LICENSE.md delivered with this project for more information.
###
{CompositeDisposable} = require 'atom'
ncclient = require './ncclient'
NetconfToolbar = require './toolbar-netconf'
NavigationToolbar = require './toolbar-navigation'
CandidateToolbar = require './toolbar-candidate'
NokiaToolbar = require './toolbar-nokia'
StatusbarNetconf = require './statusbar-netconf'
XmlToolbar = require './toolbar-xml'
module.exports =
config:
server:
title: 'Netconf Server'
type: 'object'
order: 1
properties:
host:
title: 'Hostname'
type: 'string'
default: '127.0.0.1'
port:
title: 'TCP port'
type: 'integer'
default: 830
username:
title: '<NAME>'
type: 'string'
default: 'netconf'
password:
title: 'Password'
type: 'string'
default: ''
description: "Password of the user. If empty, password authentication
will be disabled."
keysfile:
title: 'Private Key File'
type: 'string'
default: ''
description: "Complete path of the file containing private keys
for SSHv2 authentication. On Linux and MacOS X you could use
~/.ssh2/id_rsa to access the registered private keys of the user.
If empty, key authentication will be disabled."
timeout:
title: 'Netconf Timeout'
type: 'integer'
default: 300
description : "Time to wait after an <rpc> message has been send,
to receive an appropriate <rpc-reply> with the same message-id.
Timeout is defined in seconds, default is 300 (5min).
Please be aware that <get-config> on larger configurations might
take time, so it is not recommended to use short timers."
base11:
title: 'base:1.1 (enable chunked-framing)'
type : 'boolean'
default : true
behavior:
title: 'Look and Feel'
type: 'object'
order: 2
properties:
audio :
title: 'Enable Audio'
type : 'boolean'
default : true
displayBanner :
title: 'Display SSH Banner/Greeting'
type : 'boolean'
default : true
description : "Display SSH Banner/Greeting as Notification"
sampleHello :
title: 'Sample Hello'
type : 'boolean'
default : true
description : "Sample hello messages from netconf server into new
XML file. Option to be enabled to check netconf capabilities
announced."
sampleError :
title: 'Sample Error'
type : 'boolean'
default : false
description : "Sample rpc-reply error messages from netconf server
into new XML file."
resultUntitled :
title: 'Result: Untitled'
type : 'boolean'
default : false
description : 'Store Results always in new XML file buffer. If
option is not set, filenames are build from netconf msg-id.'
resultFocus :
title: 'Result: Focus'
type : 'boolean'
default : false
description : 'Focus on the Pane/Editor which was used to store
the XML result. If option is unchecked, focus will be restored
after operation.'
resultReadOnly :
title: 'Result: Read-Only'
type : 'boolean'
default : false
description : 'Text Editor for XML results will be blocked for
editing.'
enableTooltips :
title: 'Enable Tooltips'
type : 'boolean'
default : true
splitPane:
title: 'Display Results'
type: 'string'
default: 'down'
enum: ['default', 'right', 'down', 'left']
xmlProcessor:
title: 'XML Result Post Processing'
type: 'string'
default: 'prettify'
enum: ['raw', 'minify', 'prettify']
xmlFoldLevel:
title: 'XML Result Folding'
type: 'integer'
default: 3
description : "To be used in combination with prettify!"
transactional:
title: 'Transactional Options'
type: 'object'
order: 3
properties:
diffMod :
title: 'Show Differences as Added, Removed or Modified'
type: 'boolean'
default: true
description: "When enabled compare shows compact results. In case
remove/added chunks are attached to each other, only the
added part from candidate is display and marked as 'modified'.
By disabling this option, both chunks are displayed and marked
accordingly as 'added' and 'removed'."
diffWords :
title: 'Compare Differences Word by Word'
type: 'boolean'
default: true
description: "Enable differences on words level for compare running
vs candidate. When disabled comparision is on line level only."
debug:
title: 'Debug Options'
type: 'object'
order: 4
properties:
netconf:
title: 'Enable Debug for Package'
type : 'boolean'
default : false
_ncclient:
title: 'Enable Debug for Netconf Module'
type : 'boolean'
default : false
_ssh:
title: 'Enable Debug for SSH Module'
type : 'boolean'
default : false
environment:
title: 'Environment'
type: 'object'
order: 5
properties:
workspace:
title: 'Private Workspace'
type: 'string'
default: 'workspace_atom_netconf'
activate: (state) ->
# console.debug 'netconf::activate()'
@subscriptions = new CompositeDisposable
@client = new ncclient
@status = new StatusbarNetconf
@whatis = undefined
@toolbars = []
@toolbars.push new NetconfToolbar
@toolbars.push new XmlToolbar
@toolbars.push new NavigationToolbar
@toolbars.push new CandidateToolbar
@toolbars.push new NokiaToolbar
consumeStatusBar: (atomStatusBar) ->
# console.debug 'netconf::consumeStatusBar()'
@status.initialize(atomStatusBar)
@status.register(@client)
# --- register toolbars ---------------------------------------------------
@toolbars.forEach (toolbar) =>
toolbar.initialize(atomStatusBar)
toolbar.register(@client)
toolbar.register(@status)
# --- enable toolbars, icons and tooltips ---------------------------------
@tooltips atom.config.get 'atom-netconf.behavior.enableTooltips'
@updateUI atom.workspace.getActiveTextEditor()
# --- register events -----------------------------------------------------
atom.config.observe 'atom-netconf.behavior.enableTooltips', @tooltips.bind(this)
atom.workspace.onDidChangeActivePaneItem @updateUI.bind(this)
# --- register commands ---------------------------------------------------
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:sendrpc': => @toolbars[0].do_rpc_call()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:connect': => @toolbars[0].do_connect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:disconnect': => @toolbars[0].do_disconnect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:smart_select': => @toolbars[1].do_smart_select()
tooltips: (option) ->
# console.debug 'netconf::tooltips()'
if option
if @whatis == undefined
@whatis = new CompositeDisposable
@status.tooltips(@whatis)
@toolbars.forEach (toolbar) => toolbar.tooltips(@whatis)
else
if @whatis != undefined
@whatis.dispose()
@whatis = undefined
updateUI: (editor)->
# console.debug 'netconf::updateUI()'
@toolbars.forEach (toolbar) => toolbar.updateUI(editor)
deactivate: ->
# console.debug 'netconf::deactivate()'
@subscriptions.dispose()
serialize: ->
# console.debug 'netconf::serialize()'
| true | ###
netconf.coffee
Copyright (c) 2016 Nokia
Note:
This file is part of the netconf package for the ATOM Text Editor.
Licensed under the MIT license
See LICENSE.md delivered with this project for more information.
###
{CompositeDisposable} = require 'atom'
ncclient = require './ncclient'
NetconfToolbar = require './toolbar-netconf'
NavigationToolbar = require './toolbar-navigation'
CandidateToolbar = require './toolbar-candidate'
NokiaToolbar = require './toolbar-nokia'
StatusbarNetconf = require './statusbar-netconf'
XmlToolbar = require './toolbar-xml'
module.exports =
config:
server:
title: 'Netconf Server'
type: 'object'
order: 1
properties:
host:
title: 'Hostname'
type: 'string'
default: '127.0.0.1'
port:
title: 'TCP port'
type: 'integer'
default: 830
username:
title: 'PI:NAME:<NAME>END_PI'
type: 'string'
default: 'netconf'
password:
title: 'Password'
type: 'string'
default: ''
description: "Password of the user. If empty, password authentication
will be disabled."
keysfile:
title: 'Private Key File'
type: 'string'
default: ''
description: "Complete path of the file containing private keys
for SSHv2 authentication. On Linux and MacOS X you could use
~/.ssh2/id_rsa to access the registered private keys of the user.
If empty, key authentication will be disabled."
timeout:
title: 'Netconf Timeout'
type: 'integer'
default: 300
description : "Time to wait after an <rpc> message has been send,
to receive an appropriate <rpc-reply> with the same message-id.
Timeout is defined in seconds, default is 300 (5min).
Please be aware that <get-config> on larger configurations might
take time, so it is not recommended to use short timers."
base11:
title: 'base:1.1 (enable chunked-framing)'
type : 'boolean'
default : true
behavior:
title: 'Look and Feel'
type: 'object'
order: 2
properties:
audio :
title: 'Enable Audio'
type : 'boolean'
default : true
displayBanner :
title: 'Display SSH Banner/Greeting'
type : 'boolean'
default : true
description : "Display SSH Banner/Greeting as Notification"
sampleHello :
title: 'Sample Hello'
type : 'boolean'
default : true
description : "Sample hello messages from netconf server into new
XML file. Option to be enabled to check netconf capabilities
announced."
sampleError :
title: 'Sample Error'
type : 'boolean'
default : false
description : "Sample rpc-reply error messages from netconf server
into new XML file."
resultUntitled :
title: 'Result: Untitled'
type : 'boolean'
default : false
description : 'Store Results always in new XML file buffer. If
option is not set, filenames are build from netconf msg-id.'
resultFocus :
title: 'Result: Focus'
type : 'boolean'
default : false
description : 'Focus on the Pane/Editor which was used to store
the XML result. If option is unchecked, focus will be restored
after operation.'
resultReadOnly :
title: 'Result: Read-Only'
type : 'boolean'
default : false
description : 'Text Editor for XML results will be blocked for
editing.'
enableTooltips :
title: 'Enable Tooltips'
type : 'boolean'
default : true
splitPane:
title: 'Display Results'
type: 'string'
default: 'down'
enum: ['default', 'right', 'down', 'left']
xmlProcessor:
title: 'XML Result Post Processing'
type: 'string'
default: 'prettify'
enum: ['raw', 'minify', 'prettify']
xmlFoldLevel:
title: 'XML Result Folding'
type: 'integer'
default: 3
description : "To be used in combination with prettify!"
transactional:
title: 'Transactional Options'
type: 'object'
order: 3
properties:
diffMod :
title: 'Show Differences as Added, Removed or Modified'
type: 'boolean'
default: true
description: "When enabled compare shows compact results. In case
remove/added chunks are attached to each other, only the
added part from candidate is display and marked as 'modified'.
By disabling this option, both chunks are displayed and marked
accordingly as 'added' and 'removed'."
diffWords :
title: 'Compare Differences Word by Word'
type: 'boolean'
default: true
description: "Enable differences on words level for compare running
vs candidate. When disabled comparision is on line level only."
debug:
title: 'Debug Options'
type: 'object'
order: 4
properties:
netconf:
title: 'Enable Debug for Package'
type : 'boolean'
default : false
_ncclient:
title: 'Enable Debug for Netconf Module'
type : 'boolean'
default : false
_ssh:
title: 'Enable Debug for SSH Module'
type : 'boolean'
default : false
environment:
title: 'Environment'
type: 'object'
order: 5
properties:
workspace:
title: 'Private Workspace'
type: 'string'
default: 'workspace_atom_netconf'
activate: (state) ->
# console.debug 'netconf::activate()'
@subscriptions = new CompositeDisposable
@client = new ncclient
@status = new StatusbarNetconf
@whatis = undefined
@toolbars = []
@toolbars.push new NetconfToolbar
@toolbars.push new XmlToolbar
@toolbars.push new NavigationToolbar
@toolbars.push new CandidateToolbar
@toolbars.push new NokiaToolbar
consumeStatusBar: (atomStatusBar) ->
# console.debug 'netconf::consumeStatusBar()'
@status.initialize(atomStatusBar)
@status.register(@client)
# --- register toolbars ---------------------------------------------------
@toolbars.forEach (toolbar) =>
toolbar.initialize(atomStatusBar)
toolbar.register(@client)
toolbar.register(@status)
# --- enable toolbars, icons and tooltips ---------------------------------
@tooltips atom.config.get 'atom-netconf.behavior.enableTooltips'
@updateUI atom.workspace.getActiveTextEditor()
# --- register events -----------------------------------------------------
atom.config.observe 'atom-netconf.behavior.enableTooltips', @tooltips.bind(this)
atom.workspace.onDidChangeActivePaneItem @updateUI.bind(this)
# --- register commands ---------------------------------------------------
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:sendrpc': => @toolbars[0].do_rpc_call()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:connect': => @toolbars[0].do_connect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:disconnect': => @toolbars[0].do_disconnect()
@subscriptions.add atom.commands.add 'atom-workspace', 'netconf:smart_select': => @toolbars[1].do_smart_select()
tooltips: (option) ->
# console.debug 'netconf::tooltips()'
if option
if @whatis == undefined
@whatis = new CompositeDisposable
@status.tooltips(@whatis)
@toolbars.forEach (toolbar) => toolbar.tooltips(@whatis)
else
if @whatis != undefined
@whatis.dispose()
@whatis = undefined
updateUI: (editor)->
# console.debug 'netconf::updateUI()'
@toolbars.forEach (toolbar) => toolbar.updateUI(editor)
deactivate: ->
# console.debug 'netconf::deactivate()'
@subscriptions.dispose()
serialize: ->
# console.debug 'netconf::serialize()'
|
[
{
"context": "\njQuery Waypoints - v2.0.5\nCopyright (c) 2011-2014 Caleb Troughton\nLicensed under the MIT license.\nhttps://github.co",
"end": 70,
"score": 0.9998724460601807,
"start": 55,
"tag": "NAME",
"value": "Caleb Troughton"
},
{
"context": "icensed under the MIT license.\nhttps://github.com/imakewebthings/jquery-waypoints/blob/master/licenses.txt\n###\n((r",
"end": 136,
"score": 0.91156405210495,
"start": 122,
"tag": "USERNAME",
"value": "imakewebthings"
},
{
"context": "nter = 1\n contexts = {}\n contextKey = 'waypoints-context-id'\n resizeEvent = 'resize.waypoints'\n scrollEvent",
"end": 2060,
"score": 0.8690831065177917,
"start": 2050,
"tag": "KEY",
"value": "context-id"
},
{
"context": "'\n waypointCounter = 1\n waypointKey = 'waypoints-waypoint-ids'\n wp = 'waypoint'\n wps = 'waypoints",
"end": 2180,
"score": 0.7516940832138062,
"start": 2180,
"tag": "KEY",
"value": ""
},
{
"context": "intCounter = 1\n waypointKey = 'waypoints-waypoint-ids'\n wp = 'waypoint'\n wps = 'waypoints'\n\n # Conte",
"end": 2193,
"score": 0.7655155658721924,
"start": 2190,
"tag": "KEY",
"value": "ids"
},
{
"context": "or more information on throttling, check out\n # Ben Alman’s throttle / debounce plugin.\n # http://",
"end": 26213,
"score": 0.8976558446884155,
"start": 26210,
"tag": "NAME",
"value": "Ben"
},
{
"context": "or more information\n # on throttling, check out Ben Alman’s throttle / debounce plugin.\n # http://",
"end": 26572,
"score": 0.9350391626358032,
"start": 26569,
"tag": "NAME",
"value": "Ben"
}
] | html/coffee/waypoints.coffee | sebastianmaier/sebastianmaier.net | 152 | ###!
jQuery Waypoints - v2.0.5
Copyright (c) 2011-2014 Caleb Troughton
Licensed under the MIT license.
https://github.com/imakewebthings/jquery-waypoints/blob/master/licenses.txt
###
((root, factory) ->
if typeof define is 'function' and define.amd
define 'waypoints', ['jquery'], ($) ->
factory $, root
else
factory root.jQuery, root
) window, ($, window) ->
$w = $ window
# Touch support feature test
isTouch = 'ontouchstart' in window
# Internal plugin-wide variables:
# - allWaypoints: A hash containing two hashes, one for vertical waypoints
# and one for horizontal waypoints. In each hash they value is a Waypoint
# instance and the key is that waypoint's unique ID.
# - contextCounter: A counter that is incremented with each instantiation
# of the Context class, used in its unique ID.
# - contexts: A hash of all contexts. The value of each entry is a Context
# instance and the key is that context's unique ID.
# - contextKey: The DOM element for each context keeps a reference to the
# context's unique ID in the jQuery .data() object. This is the key for
# that data entry.
# - resizeEvent: The namespaced resize event used by contexts.
# - scrollEvent: The namespaced scroll event used by contexts.
# - waypointCounter: A counter that is incremented with each instantiation
# of the Waypoint class, used in its unique ID.
# - waypointKey: The DOM element for each waypoint keeps a reference to an
# array of the unique IDs of all waypoints attached to that element. This
# array is kept in the jQuery .data() object, and this is the key for
# that entry.
# - wp: A variable shortcut for the waypoint method name on the $.fn object.
# Using this variable just helps with minification.
# - wps: A variable shortcut for the waypoints method name on the $ object.
# Using this variable just helps with minification.
allWaypoints =
horizontal: {}
vertical: {}
contextCounter = 1
contexts = {}
contextKey = 'waypoints-context-id'
resizeEvent = 'resize.waypoints'
scrollEvent = 'scroll.waypoints'
waypointCounter = 1
waypointKey = 'waypoints-waypoint-ids'
wp = 'waypoint'
wps = 'waypoints'
# Context: Represents a single scrolling element in which waypoints live.
# For most users there will only be one Context, the window, but users can
# use other scrollable elements as a context using the "context" option
# when creating waypoints.
# Properties:
# - $element: jQuery object containing the context element.
# - element: The raw HTMLNode of the context element.
# - didResize: A flag used in throttling the resize event.
# - didScroll: A flag used in throttling the scroll event.
# - id: A unique identifier for the context.
# - oldScroll: A hash containing...
# - x: The context's last known horizontal scroll value.
# - y: The context's last known vertical scroll value.
# - waypoints: A hash containing two hashes with all waypoints in the context.
# Entries are in the same style as the allWaypoints hashes:
# (key = waypoint.id, value = waypoint)
# - horizontal: A hash of all horizontal waypoints.
# - vertical: A hash of all vertical waypoints.
class Context
constructor: ($element) ->
@$element = $element
@element = $element[0]
@didResize = no
@didScroll = no
@id = 'context' + contextCounter++
@oldScroll =
x: $element.scrollLeft()
y: $element.scrollTop()
@waypoints =
horizontal: {}
vertical: {}
# We need to keep a reference to this Context instance on the DOM node
# so we can look it up later based on the node.
@element[contextKey] = @id
# To do that look up, we need to have this instance in the global hash.
contexts[@id] = this
# Run scroll checks on scroll, but throttle it for performance reasons.
$element.bind scrollEvent, =>
unless @didScroll or isTouch
@didScroll = yes
scrollHandler = =>
@doScroll()
@didScroll = no
window.setTimeout scrollHandler, $[wps].settings.scrollThrottle
# Run a refresh on resize, but throttle it for performance reasons.
$element.bind resizeEvent, =>
unless @didResize
@didResize = yes
resizeHandler = =>
$[wps] 'refresh'
@didResize = no
window.setTimeout resizeHandler, $[wps].settings.resizeThrottle
# doScroll()
# Looks at the new scroll values for the context, compares them to the old
# scroll values, and checks to see if any waypoints should be triggered
# by that change.
doScroll: ->
# We use some hashes with common values for each axis so that we can
# just iterate over it rather than write the whole thing twice for
# each axis.
axes =
horizontal:
newScroll: @$element.scrollLeft()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
vertical:
newScroll: @$element.scrollTop()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
# This is a small "hack" for iOS, needed because scrolls in mobile
# Safari that start or end with the URL bar showing will cause window
# height changes without firing a resize event.
if isTouch and (!axes.vertical.oldScroll or !axes.vertical.newScroll)
$[wps] 'refresh'
# For each axis, check to see if any waypoints have been crossed.
# Also determine the direction it's being crossed and sort/reverse all
# crossed waypoints accordingly. And, of course, trigger the waypoints.
$.each axes, (aKey, axis) =>
triggered = []
isForward = axis.newScroll > axis.oldScroll
direction = if isForward then axis.forward else axis.backward
$.each @waypoints[aKey], (wKey, waypoint) ->
if axis.oldScroll < waypoint.offset <= axis.newScroll
triggered.push waypoint
else if axis.newScroll < waypoint.offset <= axis.oldScroll
triggered.push waypoint
triggered.sort (a, b) -> a.offset - b.offset
triggered.reverse() unless isForward
$.each triggered, (i, waypoint) ->
if waypoint.options.continuous or i is triggered.length - 1
waypoint.trigger [direction]
# Now that we're done with the check, the new scroll values become
# the old scroll values for the next check.
@oldScroll =
x: axes.horizontal.newScroll
y: axes.vertical.newScroll
# refresh()
# Runs through all of the waypoints in the context and recalculates
# their offsets (the scroll value at which the waypoint is triggered.)
# If a change in offset also happens to cross the context's current
# scroll value, the waypoint will be triggered in the appropriate direction
# unless prevented by the "onlyOnScroll" waypoint option.
refresh: () ->
isWin = $.isWindow @element
cOffset = @$element.offset()
# Make sure we have the most up-to-date scroll values for our context.
@doScroll()
# Each axis recalculation needs to know some things:
# - contextOffset: The distance between the edge of the document and
# the context element.
# - contextScroll: The scroll value of the context. However, if the
# context is the window this needs to be 0 because this value only
# comes into play when used in adjustment calculations for non-window
# context waypoints.
# - contextDimension: Width or height of the context.
# - oldScroll: The scroll value of the context. Unlike "contextScroll",
# this is the same no matter the type of context, and is used when
# determining whether a newly added waypoint should immediately fire
# on its first offset calculation.
# - forward: Direction string passed to forward waypoint triggers.
# - backward: Direction string passed to backward waypoint triggers.
# - offsetProp: Key of the .offset() object for this axis.
axes =
horizontal:
contextOffset: if isWin then 0 else cOffset.left
contextScroll: if isWin then 0 else @oldScroll.x
contextDimension: @$element.width()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
offsetProp: 'left'
vertical:
contextOffset: if isWin then 0 else cOffset.top
contextScroll: if isWin then 0 else @oldScroll.y
contextDimension: if isWin then $[wps]('viewportHeight') else \
@$element.height()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
offsetProp: 'top'
# For each axis, run through the waypoints. Store the old offset.
# Recalculate the new offset. Check the difference against the context's
# current scroll value and trigger any crossed waypoints accordingly.
$.each axes, (aKey, axis) =>
$.each @waypoints[aKey], (i, waypoint) ->
adjustment = waypoint.options.offset
oldOffset = waypoint.offset
elementOffset = if $.isWindow waypoint.element then 0 else \
waypoint.$element.offset()[axis.offsetProp]
# The "offset" waypoint option (which we call "adjustment" here) can
# be a number, percentage string, keyword string (bottom-in-view),
# or a function. So we deal with all of these types here.
if $.isFunction adjustment
adjustment = adjustment.apply waypoint.element
else if typeof adjustment is 'string'
adjustment = parseFloat adjustment
if waypoint.options.offset.indexOf('%') > -1
adjustment = Math.ceil(axis.contextDimension * adjustment / 100)
# We've finally calculated all the crazy little adjustments that
# can come from using non-window contexts and the "offset" option.
# Store the damn thing.
waypoint.offset = elementOffset \
- axis.contextOffset \
+ axis.contextScroll \
- adjustment
# "onlyOnScroll" tells us to not even consider triggering waypoints
# during refresh, so we can eject early.
return if (waypoint.options.onlyOnScroll and oldOffset?) or \
!waypoint.enabled
# Case where the refresh causes a backward trigger.
if oldOffset isnt null and \
oldOffset < axis.oldScroll <= waypoint.offset
waypoint.trigger [axis.backward]
# Now the forward case.
else if oldOffset isnt null and \
oldOffset > axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# "oldOffset" values of null mean this is the first calculation of
# the waypoint's offset. It's a special time in a waypoint's life.
else if oldOffset is null and axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# checkEmpty()
# Looks at the waypoints hashes. If they are empty, the context removes
# itself from the global contexts hash.
checkEmpty: ->
if $.isEmptyObject(@waypoints.horizontal) and \
$.isEmptyObject(@waypoints.vertical)
@$element.unbind [resizeEvent, scrollEvent].join(' ')
delete contexts[@id]
# Waypoint: Represents a single callback function tied to an element. An
# element can have multiple waypoints with multiple offsets.
# Properties:
# - $element: jQuery object containing the waypoint element.
# - element: The raw HTMLNode of the waypoint element.
# - axis: 'horizontal' || 'vertical' - The axis on which this waypoint lives.
# - callback: The function that is fired when the waypoint is triggered.
# - context: A reference to the context this waypoint belongs to.
# - enabled: Boolean indicating whether this waypoint is enabled or not.
# Disabled waypoints are still returned in functions that aggregate
# waypoints, but do not fire their callbacks.
# - id: A unique identifier for the waypoint.
# - offset: The scroll offset at which the waypoint should trigger.
# - options: A hash containing the various waypoint options.
# See $.fn.waypoint.defaults for more information on those options.
class Waypoint
constructor: ($element, context, options) ->
if options.offset is 'bottom-in-view'
options.offset = ->
contextHeight = $[wps] 'viewportHeight'
unless $.isWindow context.element
contextHeight = context.$element.height()
contextHeight - $(this).outerHeight()
@$element = $element
@element = $element[0]
@axis = if options.horizontal then 'horizontal' else 'vertical'
@callback = options.handler
@context = context
@enabled = options.enabled
@id = 'waypoints' + waypointCounter++
@offset = null
@options = options
# Add our new waypoint to its context.
context.waypoints[@axis][@id] = this
# Add it to the global hash.
allWaypoints[@axis][@id] = this
# Add the waypoint's id to the element's waypoint id list.
idList = @element[waypointKey] ? []
idList.push @id
@element[waypointKey] = idList
# trigger(array)
# Calls the waypoint's callback function, passing to it the arguments
# supplied in the "args" array.
trigger: (args) ->
return unless @enabled
if @callback?
@callback.apply @element, args
if @options.triggerOnce
@destroy()
# disable()
# Temporarily disables a waypoint from firing its callback.
disable: ->
@enabled = false
# enable()
# Breathe life back into the waypoint.
enable: ->
@context.refresh()
@enabled = true
# destroy()
# Kills the waypoint for good.
destroy: ->
delete allWaypoints[@axis][@id]
delete @context.waypoints[@axis][@id]
@context.checkEmpty()
# Waypoint.getWaypointsByElement(HTMLNode)
# Returns an array of all Waypoint instances attached to the "element"
# HTMLNode. Returns an empty array if there are no attached waypoints.
@getWaypointsByElement: (element) ->
ids = element[waypointKey]
return [] unless ids
all = $.extend {}, allWaypoints.horizontal, allWaypoints.vertical
$.map ids, (id) ->
all[id]
# These methods are available on the $.fn object by using the method
# name as the first argument to .waypoint. Ex: $('div').waypoint('destroy')
methods =
# init(function, object)
# Creates a new waypoint (and if needed, a new context) using the supplied
# callback function and options.
# The "f" function and the "options" object are both optional, but at least
# one must be supplied. So acceptable signatures are:
# - .waypoint(f)
# - .waypoint(options)
# - .waypoint(f, options)
# This "init" method should never need to be called explicity by the user.
# It is the default method that is delegated to when .waypoint is called
# with one of the above signatures.
# Ex: $('div').waypoint(function(direction) {
# // Do things
# }, { offset: '100%' });
init: (f, options) ->
options = $.extend {}, $.fn[wp].defaults, options
options.handler ?= f
@each ->
$this = $ this
contextElement = options.context ? $.fn[wp].defaults.context
unless $.isWindow contextElement
contextElement = $this.closest contextElement
contextElement = $ contextElement
context = contexts[contextElement[0][contextKey]]
context = new Context contextElement unless context
new Waypoint $this, context, options
$[wps] 'refresh'
this
# Disable, enable, and destroy all just delegate to the instance methods
# of the waypoints attached to the subject elements.
disable: -> methods._invoke.call this, 'disable'
enable: -> methods._invoke.call this, 'enable'
destroy: -> methods._invoke.call this, 'destroy'
# .waypoint('prev', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing previous waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's prev
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
prev: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index-1] if index > 0
# .waypoint('next', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing next waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's next
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
next: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index+1] if index < waypoints.length-1
# Internal: Aggregates waypoints on a given axis of a context, and applies
# a "push" callback for each element in the subject jQuery object. This
# callback builds the element array to push to the jQuery stack.
_traverse: (axis = 'vertical', selector = window, push) ->
waypoints = jQMethods.aggregate selector
stack = []
@each ->
index = $.inArray this, waypoints[axis]
push stack, index, waypoints[axis]
@pushStack stack
# Internal: Finds all waypoints on a given set of "$elements" and invokes
# "method" on each instance.
_invoke: (method) ->
this.each ->
waypoints = Waypoint.getWaypointsByElement this
$.each waypoints, (i, waypoint) ->
waypoint[method]()
true
this
# $.fn.waypoint. Let's just hook this guy up to our methods hash and
# add some trivial error reporting for bogus calls.
$.fn[wp] = (method, args...) ->
if methods[method]
methods[method].apply this, args
else if $.isFunction(method)
methods.init.apply this, arguments
else if $.isPlainObject(method)
methods.init.apply this, [null, method]
else if !method
$.error "jQuery Waypoints needs a callback function or handler option."
else
$.error "The #{method} method does not exist in jQuery Waypoints."
# The default options object for a waypoint.
# - context: string|HTMLNode|jQuery - The scrollable element that the
# waypoint acts within. The waypoint will look for the closest ancestor
# element that matches this selector or node.
# - continuous: Multiple waypoints may be triggered by a single scroll check.
# If you would like a waypoint to only trigger if it is the last waypoint
# in a scroll check, set this to false.
# - enabled: Should this waypoint start enabled (true) or disabled (false)?
# - handler: This option is not defined by default, but can be used as an
# alternate way to pass the waypoint callback function, rather than as
# the first argument to .waypoint.
# Ex: $('div').waypoint({
# handler: function(direction) { ... }
# });
# - horizontal: Set this to true if the waypoint is, well, horizontal.
# - offset: number|string|function - Determines how far from the top (or left
# if the waypoint is horizontal) of the context's viewport to trigger the
# waypoint. The default of 0 means that the waypoint is triggered when the
# top of the waypoint element hits the top of the window/context-element.
# An offset of 50 would mean the waypoint triggers when the top of the
# element is 50 pixels from the top of the window.
# A % string is translated into a percentage of the width/height of
# the context.
# If a function is passed, that function should return a number. The "this"
# keyword within this function will be set to the raw HTMLNode of the
# waypoint element.
# - triggerOnce: If true, the waypoint will destroy itself after
# first trigger.
$.fn[wp].defaults =
context: window
continuous: true
enabled: true
horizontal: false
offset: 0
triggerOnce: false
# These methods are available on the $ object by using the method name as
# the first argument to .waypoint. Ex: $.waypoints('refresh')
jQMethods =
# $.waypoints('refresh')
# Forces a refresh on all contexts, recalculating all waypoint offsets.
# This is done automatically on waypoint addition and during resize events,
# but if a user does something to change the DOM, CSS, or in some way
# change the layout of a page and its elements, they might need to call
# this method manually.
refresh: ->
$.each contexts, (i, context) -> context.refresh()
# $.waypoints('viewportHeight')
# A utility method that returns the window height, but takes into account
# inconsistencies that come with just using jQuery's .height() on iOS.
viewportHeight: ->
window.innerHeight ? $w.height()
# $.waypoints(['aggregate'], [contextSelector])
# Returns an object containing two HTMLNode arrays, one for each axis:
# {
# horizontal: [ HTMLNode... ]
# vertical: [ HTMLNode... ]
# }
# This is the default method used when calling $.waypoints(). If
# "contextSelector" is not supplied, it returns all waypoints. If
# "contextSelector" is supplied it only returns waypoints for that context.
# The array of waypoint elements is returned sorted by calculated offset,
# the order in which they would be triggered on the page.
aggregate: (contextSelector) ->
collection = allWaypoints
if contextSelector
collection = contexts[$(contextSelector)[0][contextKey]]?.waypoints
return [] unless collection
waypoints =
horizontal: []
vertical: []
$.each waypoints, (axis, arr) ->
$.each collection[axis], (key, waypoint) ->
arr.push waypoint
arr.sort (a, b) -> a.offset - b.offset
waypoints[axis] = $.map arr, (waypoint) -> waypoint.element
waypoints[axis] = $.unique waypoints[axis]
waypoints
# $.waypoints('above', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie above the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
above: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset <= context.oldScroll.y
# $.waypoints('below', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie below the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
below: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset > context.oldScroll.y
# $.waypoints('left', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints left of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
left: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset <= context.oldScroll.x
# $.waypoints('right', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints right of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
right: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset > context.oldScroll.x
# $.waypoints('enable/disable/destroy')
# These methods delegate to the enable/disable/destroy instance methods
# for all waypoints.
enable: -> jQMethods._invoke 'enable'
disable: -> jQMethods._invoke 'disable'
destroy: -> jQMethods._invoke 'destroy'
# $.waypoints('extendFn', string, function)
# Extends the $.fn.waypoint method object with a new method, "f". This
# just lets other modules piggyback on the .waypoint namespace.
extendFn: (methodName, f) ->
methods[methodName] = f
# Internal: Invokes "method" on all waypoints.
_invoke: (method) ->
waypoints = $.extend {}, allWaypoints.vertical, allWaypoints.horizontal
$.each waypoints, (key, waypoint) ->
waypoint[method]()
true
# Internal: Returns an array of all HTMLNodes for each waypoint that passes
# the "test" function. Only waypoints within the "selector" context on the
# "axis" axis are tested. As with .aggregate, the array is sorted by
# calculated offset (trigger order).
_filter: (selector, axis, test) ->
context = contexts[$(selector)[0][contextKey]]
return [] unless context
waypoints = []
$.each context.waypoints[axis], (i, waypoint) ->
waypoints.push waypoint if test context, waypoint
waypoints.sort (a, b) -> a.offset - b.offset
$.map waypoints, (waypoint) -> waypoint.element
# Hook up jQMethods to the $.waypoints namespace.
$[wps] = (method, args...) ->
if jQMethods[method]
jQMethods[method].apply null, args
else
jQMethods.aggregate.call null, method
# Plugin-wide settings:
# - resizeThrottle: For performance reasons, the refresh performed during
# resizes is throttled. This value is the rate-limit in milliseconds
# between resize refreshes. For more information on throttling, check out
# Ben Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
# - scrollThrottle: For performance reasons, checking for any crossed
# waypoints during a scroll event is throttled. This value is the
# rate-limit in milliseconds between scroll checks. For more information
# on throttling, check out Ben Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
$[wps].settings =
resizeThrottle: 100
scrollThrottle: 30
# Ensure a refresh on page load. Newly loaded images often shift layout.
$w.on 'load.waypoints', -> $[wps] 'refresh'
| 206992 | ###!
jQuery Waypoints - v2.0.5
Copyright (c) 2011-2014 <NAME>
Licensed under the MIT license.
https://github.com/imakewebthings/jquery-waypoints/blob/master/licenses.txt
###
((root, factory) ->
if typeof define is 'function' and define.amd
define 'waypoints', ['jquery'], ($) ->
factory $, root
else
factory root.jQuery, root
) window, ($, window) ->
$w = $ window
# Touch support feature test
isTouch = 'ontouchstart' in window
# Internal plugin-wide variables:
# - allWaypoints: A hash containing two hashes, one for vertical waypoints
# and one for horizontal waypoints. In each hash they value is a Waypoint
# instance and the key is that waypoint's unique ID.
# - contextCounter: A counter that is incremented with each instantiation
# of the Context class, used in its unique ID.
# - contexts: A hash of all contexts. The value of each entry is a Context
# instance and the key is that context's unique ID.
# - contextKey: The DOM element for each context keeps a reference to the
# context's unique ID in the jQuery .data() object. This is the key for
# that data entry.
# - resizeEvent: The namespaced resize event used by contexts.
# - scrollEvent: The namespaced scroll event used by contexts.
# - waypointCounter: A counter that is incremented with each instantiation
# of the Waypoint class, used in its unique ID.
# - waypointKey: The DOM element for each waypoint keeps a reference to an
# array of the unique IDs of all waypoints attached to that element. This
# array is kept in the jQuery .data() object, and this is the key for
# that entry.
# - wp: A variable shortcut for the waypoint method name on the $.fn object.
# Using this variable just helps with minification.
# - wps: A variable shortcut for the waypoints method name on the $ object.
# Using this variable just helps with minification.
allWaypoints =
horizontal: {}
vertical: {}
contextCounter = 1
contexts = {}
contextKey = 'waypoints-<KEY>'
resizeEvent = 'resize.waypoints'
scrollEvent = 'scroll.waypoints'
waypointCounter = 1
waypointKey = 'waypoints<KEY>-waypoint-<KEY>'
wp = 'waypoint'
wps = 'waypoints'
# Context: Represents a single scrolling element in which waypoints live.
# For most users there will only be one Context, the window, but users can
# use other scrollable elements as a context using the "context" option
# when creating waypoints.
# Properties:
# - $element: jQuery object containing the context element.
# - element: The raw HTMLNode of the context element.
# - didResize: A flag used in throttling the resize event.
# - didScroll: A flag used in throttling the scroll event.
# - id: A unique identifier for the context.
# - oldScroll: A hash containing...
# - x: The context's last known horizontal scroll value.
# - y: The context's last known vertical scroll value.
# - waypoints: A hash containing two hashes with all waypoints in the context.
# Entries are in the same style as the allWaypoints hashes:
# (key = waypoint.id, value = waypoint)
# - horizontal: A hash of all horizontal waypoints.
# - vertical: A hash of all vertical waypoints.
class Context
constructor: ($element) ->
@$element = $element
@element = $element[0]
@didResize = no
@didScroll = no
@id = 'context' + contextCounter++
@oldScroll =
x: $element.scrollLeft()
y: $element.scrollTop()
@waypoints =
horizontal: {}
vertical: {}
# We need to keep a reference to this Context instance on the DOM node
# so we can look it up later based on the node.
@element[contextKey] = @id
# To do that look up, we need to have this instance in the global hash.
contexts[@id] = this
# Run scroll checks on scroll, but throttle it for performance reasons.
$element.bind scrollEvent, =>
unless @didScroll or isTouch
@didScroll = yes
scrollHandler = =>
@doScroll()
@didScroll = no
window.setTimeout scrollHandler, $[wps].settings.scrollThrottle
# Run a refresh on resize, but throttle it for performance reasons.
$element.bind resizeEvent, =>
unless @didResize
@didResize = yes
resizeHandler = =>
$[wps] 'refresh'
@didResize = no
window.setTimeout resizeHandler, $[wps].settings.resizeThrottle
# doScroll()
# Looks at the new scroll values for the context, compares them to the old
# scroll values, and checks to see if any waypoints should be triggered
# by that change.
doScroll: ->
# We use some hashes with common values for each axis so that we can
# just iterate over it rather than write the whole thing twice for
# each axis.
axes =
horizontal:
newScroll: @$element.scrollLeft()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
vertical:
newScroll: @$element.scrollTop()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
# This is a small "hack" for iOS, needed because scrolls in mobile
# Safari that start or end with the URL bar showing will cause window
# height changes without firing a resize event.
if isTouch and (!axes.vertical.oldScroll or !axes.vertical.newScroll)
$[wps] 'refresh'
# For each axis, check to see if any waypoints have been crossed.
# Also determine the direction it's being crossed and sort/reverse all
# crossed waypoints accordingly. And, of course, trigger the waypoints.
$.each axes, (aKey, axis) =>
triggered = []
isForward = axis.newScroll > axis.oldScroll
direction = if isForward then axis.forward else axis.backward
$.each @waypoints[aKey], (wKey, waypoint) ->
if axis.oldScroll < waypoint.offset <= axis.newScroll
triggered.push waypoint
else if axis.newScroll < waypoint.offset <= axis.oldScroll
triggered.push waypoint
triggered.sort (a, b) -> a.offset - b.offset
triggered.reverse() unless isForward
$.each triggered, (i, waypoint) ->
if waypoint.options.continuous or i is triggered.length - 1
waypoint.trigger [direction]
# Now that we're done with the check, the new scroll values become
# the old scroll values for the next check.
@oldScroll =
x: axes.horizontal.newScroll
y: axes.vertical.newScroll
# refresh()
# Runs through all of the waypoints in the context and recalculates
# their offsets (the scroll value at which the waypoint is triggered.)
# If a change in offset also happens to cross the context's current
# scroll value, the waypoint will be triggered in the appropriate direction
# unless prevented by the "onlyOnScroll" waypoint option.
refresh: () ->
isWin = $.isWindow @element
cOffset = @$element.offset()
# Make sure we have the most up-to-date scroll values for our context.
@doScroll()
# Each axis recalculation needs to know some things:
# - contextOffset: The distance between the edge of the document and
# the context element.
# - contextScroll: The scroll value of the context. However, if the
# context is the window this needs to be 0 because this value only
# comes into play when used in adjustment calculations for non-window
# context waypoints.
# - contextDimension: Width or height of the context.
# - oldScroll: The scroll value of the context. Unlike "contextScroll",
# this is the same no matter the type of context, and is used when
# determining whether a newly added waypoint should immediately fire
# on its first offset calculation.
# - forward: Direction string passed to forward waypoint triggers.
# - backward: Direction string passed to backward waypoint triggers.
# - offsetProp: Key of the .offset() object for this axis.
axes =
horizontal:
contextOffset: if isWin then 0 else cOffset.left
contextScroll: if isWin then 0 else @oldScroll.x
contextDimension: @$element.width()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
offsetProp: 'left'
vertical:
contextOffset: if isWin then 0 else cOffset.top
contextScroll: if isWin then 0 else @oldScroll.y
contextDimension: if isWin then $[wps]('viewportHeight') else \
@$element.height()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
offsetProp: 'top'
# For each axis, run through the waypoints. Store the old offset.
# Recalculate the new offset. Check the difference against the context's
# current scroll value and trigger any crossed waypoints accordingly.
$.each axes, (aKey, axis) =>
$.each @waypoints[aKey], (i, waypoint) ->
adjustment = waypoint.options.offset
oldOffset = waypoint.offset
elementOffset = if $.isWindow waypoint.element then 0 else \
waypoint.$element.offset()[axis.offsetProp]
# The "offset" waypoint option (which we call "adjustment" here) can
# be a number, percentage string, keyword string (bottom-in-view),
# or a function. So we deal with all of these types here.
if $.isFunction adjustment
adjustment = adjustment.apply waypoint.element
else if typeof adjustment is 'string'
adjustment = parseFloat adjustment
if waypoint.options.offset.indexOf('%') > -1
adjustment = Math.ceil(axis.contextDimension * adjustment / 100)
# We've finally calculated all the crazy little adjustments that
# can come from using non-window contexts and the "offset" option.
# Store the damn thing.
waypoint.offset = elementOffset \
- axis.contextOffset \
+ axis.contextScroll \
- adjustment
# "onlyOnScroll" tells us to not even consider triggering waypoints
# during refresh, so we can eject early.
return if (waypoint.options.onlyOnScroll and oldOffset?) or \
!waypoint.enabled
# Case where the refresh causes a backward trigger.
if oldOffset isnt null and \
oldOffset < axis.oldScroll <= waypoint.offset
waypoint.trigger [axis.backward]
# Now the forward case.
else if oldOffset isnt null and \
oldOffset > axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# "oldOffset" values of null mean this is the first calculation of
# the waypoint's offset. It's a special time in a waypoint's life.
else if oldOffset is null and axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# checkEmpty()
# Looks at the waypoints hashes. If they are empty, the context removes
# itself from the global contexts hash.
checkEmpty: ->
if $.isEmptyObject(@waypoints.horizontal) and \
$.isEmptyObject(@waypoints.vertical)
@$element.unbind [resizeEvent, scrollEvent].join(' ')
delete contexts[@id]
# Waypoint: Represents a single callback function tied to an element. An
# element can have multiple waypoints with multiple offsets.
# Properties:
# - $element: jQuery object containing the waypoint element.
# - element: The raw HTMLNode of the waypoint element.
# - axis: 'horizontal' || 'vertical' - The axis on which this waypoint lives.
# - callback: The function that is fired when the waypoint is triggered.
# - context: A reference to the context this waypoint belongs to.
# - enabled: Boolean indicating whether this waypoint is enabled or not.
# Disabled waypoints are still returned in functions that aggregate
# waypoints, but do not fire their callbacks.
# - id: A unique identifier for the waypoint.
# - offset: The scroll offset at which the waypoint should trigger.
# - options: A hash containing the various waypoint options.
# See $.fn.waypoint.defaults for more information on those options.
class Waypoint
constructor: ($element, context, options) ->
if options.offset is 'bottom-in-view'
options.offset = ->
contextHeight = $[wps] 'viewportHeight'
unless $.isWindow context.element
contextHeight = context.$element.height()
contextHeight - $(this).outerHeight()
@$element = $element
@element = $element[0]
@axis = if options.horizontal then 'horizontal' else 'vertical'
@callback = options.handler
@context = context
@enabled = options.enabled
@id = 'waypoints' + waypointCounter++
@offset = null
@options = options
# Add our new waypoint to its context.
context.waypoints[@axis][@id] = this
# Add it to the global hash.
allWaypoints[@axis][@id] = this
# Add the waypoint's id to the element's waypoint id list.
idList = @element[waypointKey] ? []
idList.push @id
@element[waypointKey] = idList
# trigger(array)
# Calls the waypoint's callback function, passing to it the arguments
# supplied in the "args" array.
trigger: (args) ->
return unless @enabled
if @callback?
@callback.apply @element, args
if @options.triggerOnce
@destroy()
# disable()
# Temporarily disables a waypoint from firing its callback.
disable: ->
@enabled = false
# enable()
# Breathe life back into the waypoint.
enable: ->
@context.refresh()
@enabled = true
# destroy()
# Kills the waypoint for good.
destroy: ->
delete allWaypoints[@axis][@id]
delete @context.waypoints[@axis][@id]
@context.checkEmpty()
# Waypoint.getWaypointsByElement(HTMLNode)
# Returns an array of all Waypoint instances attached to the "element"
# HTMLNode. Returns an empty array if there are no attached waypoints.
@getWaypointsByElement: (element) ->
ids = element[waypointKey]
return [] unless ids
all = $.extend {}, allWaypoints.horizontal, allWaypoints.vertical
$.map ids, (id) ->
all[id]
# These methods are available on the $.fn object by using the method
# name as the first argument to .waypoint. Ex: $('div').waypoint('destroy')
methods =
# init(function, object)
# Creates a new waypoint (and if needed, a new context) using the supplied
# callback function and options.
# The "f" function and the "options" object are both optional, but at least
# one must be supplied. So acceptable signatures are:
# - .waypoint(f)
# - .waypoint(options)
# - .waypoint(f, options)
# This "init" method should never need to be called explicity by the user.
# It is the default method that is delegated to when .waypoint is called
# with one of the above signatures.
# Ex: $('div').waypoint(function(direction) {
# // Do things
# }, { offset: '100%' });
init: (f, options) ->
options = $.extend {}, $.fn[wp].defaults, options
options.handler ?= f
@each ->
$this = $ this
contextElement = options.context ? $.fn[wp].defaults.context
unless $.isWindow contextElement
contextElement = $this.closest contextElement
contextElement = $ contextElement
context = contexts[contextElement[0][contextKey]]
context = new Context contextElement unless context
new Waypoint $this, context, options
$[wps] 'refresh'
this
# Disable, enable, and destroy all just delegate to the instance methods
# of the waypoints attached to the subject elements.
disable: -> methods._invoke.call this, 'disable'
enable: -> methods._invoke.call this, 'enable'
destroy: -> methods._invoke.call this, 'destroy'
# .waypoint('prev', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing previous waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's prev
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
prev: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index-1] if index > 0
# .waypoint('next', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing next waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's next
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
next: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index+1] if index < waypoints.length-1
# Internal: Aggregates waypoints on a given axis of a context, and applies
# a "push" callback for each element in the subject jQuery object. This
# callback builds the element array to push to the jQuery stack.
_traverse: (axis = 'vertical', selector = window, push) ->
waypoints = jQMethods.aggregate selector
stack = []
@each ->
index = $.inArray this, waypoints[axis]
push stack, index, waypoints[axis]
@pushStack stack
# Internal: Finds all waypoints on a given set of "$elements" and invokes
# "method" on each instance.
_invoke: (method) ->
this.each ->
waypoints = Waypoint.getWaypointsByElement this
$.each waypoints, (i, waypoint) ->
waypoint[method]()
true
this
# $.fn.waypoint. Let's just hook this guy up to our methods hash and
# add some trivial error reporting for bogus calls.
$.fn[wp] = (method, args...) ->
if methods[method]
methods[method].apply this, args
else if $.isFunction(method)
methods.init.apply this, arguments
else if $.isPlainObject(method)
methods.init.apply this, [null, method]
else if !method
$.error "jQuery Waypoints needs a callback function or handler option."
else
$.error "The #{method} method does not exist in jQuery Waypoints."
# The default options object for a waypoint.
# - context: string|HTMLNode|jQuery - The scrollable element that the
# waypoint acts within. The waypoint will look for the closest ancestor
# element that matches this selector or node.
# - continuous: Multiple waypoints may be triggered by a single scroll check.
# If you would like a waypoint to only trigger if it is the last waypoint
# in a scroll check, set this to false.
# - enabled: Should this waypoint start enabled (true) or disabled (false)?
# - handler: This option is not defined by default, but can be used as an
# alternate way to pass the waypoint callback function, rather than as
# the first argument to .waypoint.
# Ex: $('div').waypoint({
# handler: function(direction) { ... }
# });
# - horizontal: Set this to true if the waypoint is, well, horizontal.
# - offset: number|string|function - Determines how far from the top (or left
# if the waypoint is horizontal) of the context's viewport to trigger the
# waypoint. The default of 0 means that the waypoint is triggered when the
# top of the waypoint element hits the top of the window/context-element.
# An offset of 50 would mean the waypoint triggers when the top of the
# element is 50 pixels from the top of the window.
# A % string is translated into a percentage of the width/height of
# the context.
# If a function is passed, that function should return a number. The "this"
# keyword within this function will be set to the raw HTMLNode of the
# waypoint element.
# - triggerOnce: If true, the waypoint will destroy itself after
# first trigger.
$.fn[wp].defaults =
context: window
continuous: true
enabled: true
horizontal: false
offset: 0
triggerOnce: false
# These methods are available on the $ object by using the method name as
# the first argument to .waypoint. Ex: $.waypoints('refresh')
jQMethods =
# $.waypoints('refresh')
# Forces a refresh on all contexts, recalculating all waypoint offsets.
# This is done automatically on waypoint addition and during resize events,
# but if a user does something to change the DOM, CSS, or in some way
# change the layout of a page and its elements, they might need to call
# this method manually.
refresh: ->
$.each contexts, (i, context) -> context.refresh()
# $.waypoints('viewportHeight')
# A utility method that returns the window height, but takes into account
# inconsistencies that come with just using jQuery's .height() on iOS.
viewportHeight: ->
window.innerHeight ? $w.height()
# $.waypoints(['aggregate'], [contextSelector])
# Returns an object containing two HTMLNode arrays, one for each axis:
# {
# horizontal: [ HTMLNode... ]
# vertical: [ HTMLNode... ]
# }
# This is the default method used when calling $.waypoints(). If
# "contextSelector" is not supplied, it returns all waypoints. If
# "contextSelector" is supplied it only returns waypoints for that context.
# The array of waypoint elements is returned sorted by calculated offset,
# the order in which they would be triggered on the page.
aggregate: (contextSelector) ->
collection = allWaypoints
if contextSelector
collection = contexts[$(contextSelector)[0][contextKey]]?.waypoints
return [] unless collection
waypoints =
horizontal: []
vertical: []
$.each waypoints, (axis, arr) ->
$.each collection[axis], (key, waypoint) ->
arr.push waypoint
arr.sort (a, b) -> a.offset - b.offset
waypoints[axis] = $.map arr, (waypoint) -> waypoint.element
waypoints[axis] = $.unique waypoints[axis]
waypoints
# $.waypoints('above', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie above the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
above: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset <= context.oldScroll.y
# $.waypoints('below', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie below the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
below: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset > context.oldScroll.y
# $.waypoints('left', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints left of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
left: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset <= context.oldScroll.x
# $.waypoints('right', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints right of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
right: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset > context.oldScroll.x
# $.waypoints('enable/disable/destroy')
# These methods delegate to the enable/disable/destroy instance methods
# for all waypoints.
enable: -> jQMethods._invoke 'enable'
disable: -> jQMethods._invoke 'disable'
destroy: -> jQMethods._invoke 'destroy'
# $.waypoints('extendFn', string, function)
# Extends the $.fn.waypoint method object with a new method, "f". This
# just lets other modules piggyback on the .waypoint namespace.
extendFn: (methodName, f) ->
methods[methodName] = f
# Internal: Invokes "method" on all waypoints.
_invoke: (method) ->
waypoints = $.extend {}, allWaypoints.vertical, allWaypoints.horizontal
$.each waypoints, (key, waypoint) ->
waypoint[method]()
true
# Internal: Returns an array of all HTMLNodes for each waypoint that passes
# the "test" function. Only waypoints within the "selector" context on the
# "axis" axis are tested. As with .aggregate, the array is sorted by
# calculated offset (trigger order).
_filter: (selector, axis, test) ->
context = contexts[$(selector)[0][contextKey]]
return [] unless context
waypoints = []
$.each context.waypoints[axis], (i, waypoint) ->
waypoints.push waypoint if test context, waypoint
waypoints.sort (a, b) -> a.offset - b.offset
$.map waypoints, (waypoint) -> waypoint.element
# Hook up jQMethods to the $.waypoints namespace.
$[wps] = (method, args...) ->
if jQMethods[method]
jQMethods[method].apply null, args
else
jQMethods.aggregate.call null, method
# Plugin-wide settings:
# - resizeThrottle: For performance reasons, the refresh performed during
# resizes is throttled. This value is the rate-limit in milliseconds
# between resize refreshes. For more information on throttling, check out
# <NAME> Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
# - scrollThrottle: For performance reasons, checking for any crossed
# waypoints during a scroll event is throttled. This value is the
# rate-limit in milliseconds between scroll checks. For more information
# on throttling, check out <NAME> Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
$[wps].settings =
resizeThrottle: 100
scrollThrottle: 30
# Ensure a refresh on page load. Newly loaded images often shift layout.
$w.on 'load.waypoints', -> $[wps] 'refresh'
| true | ###!
jQuery Waypoints - v2.0.5
Copyright (c) 2011-2014 PI:NAME:<NAME>END_PI
Licensed under the MIT license.
https://github.com/imakewebthings/jquery-waypoints/blob/master/licenses.txt
###
((root, factory) ->
if typeof define is 'function' and define.amd
define 'waypoints', ['jquery'], ($) ->
factory $, root
else
factory root.jQuery, root
) window, ($, window) ->
$w = $ window
# Touch support feature test
isTouch = 'ontouchstart' in window
# Internal plugin-wide variables:
# - allWaypoints: A hash containing two hashes, one for vertical waypoints
# and one for horizontal waypoints. In each hash they value is a Waypoint
# instance and the key is that waypoint's unique ID.
# - contextCounter: A counter that is incremented with each instantiation
# of the Context class, used in its unique ID.
# - contexts: A hash of all contexts. The value of each entry is a Context
# instance and the key is that context's unique ID.
# - contextKey: The DOM element for each context keeps a reference to the
# context's unique ID in the jQuery .data() object. This is the key for
# that data entry.
# - resizeEvent: The namespaced resize event used by contexts.
# - scrollEvent: The namespaced scroll event used by contexts.
# - waypointCounter: A counter that is incremented with each instantiation
# of the Waypoint class, used in its unique ID.
# - waypointKey: The DOM element for each waypoint keeps a reference to an
# array of the unique IDs of all waypoints attached to that element. This
# array is kept in the jQuery .data() object, and this is the key for
# that entry.
# - wp: A variable shortcut for the waypoint method name on the $.fn object.
# Using this variable just helps with minification.
# - wps: A variable shortcut for the waypoints method name on the $ object.
# Using this variable just helps with minification.
allWaypoints =
horizontal: {}
vertical: {}
contextCounter = 1
contexts = {}
contextKey = 'waypoints-PI:KEY:<KEY>END_PI'
resizeEvent = 'resize.waypoints'
scrollEvent = 'scroll.waypoints'
waypointCounter = 1
waypointKey = 'waypointsPI:KEY:<KEY>END_PI-waypoint-PI:KEY:<KEY>END_PI'
wp = 'waypoint'
wps = 'waypoints'
# Context: Represents a single scrolling element in which waypoints live.
# For most users there will only be one Context, the window, but users can
# use other scrollable elements as a context using the "context" option
# when creating waypoints.
# Properties:
# - $element: jQuery object containing the context element.
# - element: The raw HTMLNode of the context element.
# - didResize: A flag used in throttling the resize event.
# - didScroll: A flag used in throttling the scroll event.
# - id: A unique identifier for the context.
# - oldScroll: A hash containing...
# - x: The context's last known horizontal scroll value.
# - y: The context's last known vertical scroll value.
# - waypoints: A hash containing two hashes with all waypoints in the context.
# Entries are in the same style as the allWaypoints hashes:
# (key = waypoint.id, value = waypoint)
# - horizontal: A hash of all horizontal waypoints.
# - vertical: A hash of all vertical waypoints.
class Context
constructor: ($element) ->
@$element = $element
@element = $element[0]
@didResize = no
@didScroll = no
@id = 'context' + contextCounter++
@oldScroll =
x: $element.scrollLeft()
y: $element.scrollTop()
@waypoints =
horizontal: {}
vertical: {}
# We need to keep a reference to this Context instance on the DOM node
# so we can look it up later based on the node.
@element[contextKey] = @id
# To do that look up, we need to have this instance in the global hash.
contexts[@id] = this
# Run scroll checks on scroll, but throttle it for performance reasons.
$element.bind scrollEvent, =>
unless @didScroll or isTouch
@didScroll = yes
scrollHandler = =>
@doScroll()
@didScroll = no
window.setTimeout scrollHandler, $[wps].settings.scrollThrottle
# Run a refresh on resize, but throttle it for performance reasons.
$element.bind resizeEvent, =>
unless @didResize
@didResize = yes
resizeHandler = =>
$[wps] 'refresh'
@didResize = no
window.setTimeout resizeHandler, $[wps].settings.resizeThrottle
# doScroll()
# Looks at the new scroll values for the context, compares them to the old
# scroll values, and checks to see if any waypoints should be triggered
# by that change.
doScroll: ->
# We use some hashes with common values for each axis so that we can
# just iterate over it rather than write the whole thing twice for
# each axis.
axes =
horizontal:
newScroll: @$element.scrollLeft()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
vertical:
newScroll: @$element.scrollTop()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
# This is a small "hack" for iOS, needed because scrolls in mobile
# Safari that start or end with the URL bar showing will cause window
# height changes without firing a resize event.
if isTouch and (!axes.vertical.oldScroll or !axes.vertical.newScroll)
$[wps] 'refresh'
# For each axis, check to see if any waypoints have been crossed.
# Also determine the direction it's being crossed and sort/reverse all
# crossed waypoints accordingly. And, of course, trigger the waypoints.
$.each axes, (aKey, axis) =>
triggered = []
isForward = axis.newScroll > axis.oldScroll
direction = if isForward then axis.forward else axis.backward
$.each @waypoints[aKey], (wKey, waypoint) ->
if axis.oldScroll < waypoint.offset <= axis.newScroll
triggered.push waypoint
else if axis.newScroll < waypoint.offset <= axis.oldScroll
triggered.push waypoint
triggered.sort (a, b) -> a.offset - b.offset
triggered.reverse() unless isForward
$.each triggered, (i, waypoint) ->
if waypoint.options.continuous or i is triggered.length - 1
waypoint.trigger [direction]
# Now that we're done with the check, the new scroll values become
# the old scroll values for the next check.
@oldScroll =
x: axes.horizontal.newScroll
y: axes.vertical.newScroll
# refresh()
# Runs through all of the waypoints in the context and recalculates
# their offsets (the scroll value at which the waypoint is triggered.)
# If a change in offset also happens to cross the context's current
# scroll value, the waypoint will be triggered in the appropriate direction
# unless prevented by the "onlyOnScroll" waypoint option.
refresh: () ->
isWin = $.isWindow @element
cOffset = @$element.offset()
# Make sure we have the most up-to-date scroll values for our context.
@doScroll()
# Each axis recalculation needs to know some things:
# - contextOffset: The distance between the edge of the document and
# the context element.
# - contextScroll: The scroll value of the context. However, if the
# context is the window this needs to be 0 because this value only
# comes into play when used in adjustment calculations for non-window
# context waypoints.
# - contextDimension: Width or height of the context.
# - oldScroll: The scroll value of the context. Unlike "contextScroll",
# this is the same no matter the type of context, and is used when
# determining whether a newly added waypoint should immediately fire
# on its first offset calculation.
# - forward: Direction string passed to forward waypoint triggers.
# - backward: Direction string passed to backward waypoint triggers.
# - offsetProp: Key of the .offset() object for this axis.
axes =
horizontal:
contextOffset: if isWin then 0 else cOffset.left
contextScroll: if isWin then 0 else @oldScroll.x
contextDimension: @$element.width()
oldScroll: @oldScroll.x
forward: 'right'
backward: 'left'
offsetProp: 'left'
vertical:
contextOffset: if isWin then 0 else cOffset.top
contextScroll: if isWin then 0 else @oldScroll.y
contextDimension: if isWin then $[wps]('viewportHeight') else \
@$element.height()
oldScroll: @oldScroll.y
forward: 'down'
backward: 'up'
offsetProp: 'top'
# For each axis, run through the waypoints. Store the old offset.
# Recalculate the new offset. Check the difference against the context's
# current scroll value and trigger any crossed waypoints accordingly.
$.each axes, (aKey, axis) =>
$.each @waypoints[aKey], (i, waypoint) ->
adjustment = waypoint.options.offset
oldOffset = waypoint.offset
elementOffset = if $.isWindow waypoint.element then 0 else \
waypoint.$element.offset()[axis.offsetProp]
# The "offset" waypoint option (which we call "adjustment" here) can
# be a number, percentage string, keyword string (bottom-in-view),
# or a function. So we deal with all of these types here.
if $.isFunction adjustment
adjustment = adjustment.apply waypoint.element
else if typeof adjustment is 'string'
adjustment = parseFloat adjustment
if waypoint.options.offset.indexOf('%') > -1
adjustment = Math.ceil(axis.contextDimension * adjustment / 100)
# We've finally calculated all the crazy little adjustments that
# can come from using non-window contexts and the "offset" option.
# Store the damn thing.
waypoint.offset = elementOffset \
- axis.contextOffset \
+ axis.contextScroll \
- adjustment
# "onlyOnScroll" tells us to not even consider triggering waypoints
# during refresh, so we can eject early.
return if (waypoint.options.onlyOnScroll and oldOffset?) or \
!waypoint.enabled
# Case where the refresh causes a backward trigger.
if oldOffset isnt null and \
oldOffset < axis.oldScroll <= waypoint.offset
waypoint.trigger [axis.backward]
# Now the forward case.
else if oldOffset isnt null and \
oldOffset > axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# "oldOffset" values of null mean this is the first calculation of
# the waypoint's offset. It's a special time in a waypoint's life.
else if oldOffset is null and axis.oldScroll >= waypoint.offset
waypoint.trigger [axis.forward]
# checkEmpty()
# Looks at the waypoints hashes. If they are empty, the context removes
# itself from the global contexts hash.
checkEmpty: ->
if $.isEmptyObject(@waypoints.horizontal) and \
$.isEmptyObject(@waypoints.vertical)
@$element.unbind [resizeEvent, scrollEvent].join(' ')
delete contexts[@id]
# Waypoint: Represents a single callback function tied to an element. An
# element can have multiple waypoints with multiple offsets.
# Properties:
# - $element: jQuery object containing the waypoint element.
# - element: The raw HTMLNode of the waypoint element.
# - axis: 'horizontal' || 'vertical' - The axis on which this waypoint lives.
# - callback: The function that is fired when the waypoint is triggered.
# - context: A reference to the context this waypoint belongs to.
# - enabled: Boolean indicating whether this waypoint is enabled or not.
# Disabled waypoints are still returned in functions that aggregate
# waypoints, but do not fire their callbacks.
# - id: A unique identifier for the waypoint.
# - offset: The scroll offset at which the waypoint should trigger.
# - options: A hash containing the various waypoint options.
# See $.fn.waypoint.defaults for more information on those options.
class Waypoint
constructor: ($element, context, options) ->
if options.offset is 'bottom-in-view'
options.offset = ->
contextHeight = $[wps] 'viewportHeight'
unless $.isWindow context.element
contextHeight = context.$element.height()
contextHeight - $(this).outerHeight()
@$element = $element
@element = $element[0]
@axis = if options.horizontal then 'horizontal' else 'vertical'
@callback = options.handler
@context = context
@enabled = options.enabled
@id = 'waypoints' + waypointCounter++
@offset = null
@options = options
# Add our new waypoint to its context.
context.waypoints[@axis][@id] = this
# Add it to the global hash.
allWaypoints[@axis][@id] = this
# Add the waypoint's id to the element's waypoint id list.
idList = @element[waypointKey] ? []
idList.push @id
@element[waypointKey] = idList
# trigger(array)
# Calls the waypoint's callback function, passing to it the arguments
# supplied in the "args" array.
trigger: (args) ->
return unless @enabled
if @callback?
@callback.apply @element, args
if @options.triggerOnce
@destroy()
# disable()
# Temporarily disables a waypoint from firing its callback.
disable: ->
@enabled = false
# enable()
# Breathe life back into the waypoint.
enable: ->
@context.refresh()
@enabled = true
# destroy()
# Kills the waypoint for good.
destroy: ->
delete allWaypoints[@axis][@id]
delete @context.waypoints[@axis][@id]
@context.checkEmpty()
# Waypoint.getWaypointsByElement(HTMLNode)
# Returns an array of all Waypoint instances attached to the "element"
# HTMLNode. Returns an empty array if there are no attached waypoints.
@getWaypointsByElement: (element) ->
ids = element[waypointKey]
return [] unless ids
all = $.extend {}, allWaypoints.horizontal, allWaypoints.vertical
$.map ids, (id) ->
all[id]
# These methods are available on the $.fn object by using the method
# name as the first argument to .waypoint. Ex: $('div').waypoint('destroy')
methods =
# init(function, object)
# Creates a new waypoint (and if needed, a new context) using the supplied
# callback function and options.
# The "f" function and the "options" object are both optional, but at least
# one must be supplied. So acceptable signatures are:
# - .waypoint(f)
# - .waypoint(options)
# - .waypoint(f, options)
# This "init" method should never need to be called explicity by the user.
# It is the default method that is delegated to when .waypoint is called
# with one of the above signatures.
# Ex: $('div').waypoint(function(direction) {
# // Do things
# }, { offset: '100%' });
init: (f, options) ->
options = $.extend {}, $.fn[wp].defaults, options
options.handler ?= f
@each ->
$this = $ this
contextElement = options.context ? $.fn[wp].defaults.context
unless $.isWindow contextElement
contextElement = $this.closest contextElement
contextElement = $ contextElement
context = contexts[contextElement[0][contextKey]]
context = new Context contextElement unless context
new Waypoint $this, context, options
$[wps] 'refresh'
this
# Disable, enable, and destroy all just delegate to the instance methods
# of the waypoints attached to the subject elements.
disable: -> methods._invoke.call this, 'disable'
enable: -> methods._invoke.call this, 'enable'
destroy: -> methods._invoke.call this, 'destroy'
# .waypoint('prev', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing previous waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's prev
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
prev: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index-1] if index > 0
# .waypoint('next', string, string|HTMLNode|jQuery)
# Returns a jQuery object containing next waypoint elements. This
# creates a new entry in the jQuery object stack just like jQuery's next
# function. "axis" indicates the axis on which to traverse
# ('horizontal' | 'vertical') and "selector" indicates which context
# element to use. The defaults are 'vertical' and window respectively.
next: (axis, selector) ->
methods._traverse.call this, axis, selector, (stack, index, waypoints) ->
stack.push waypoints[index+1] if index < waypoints.length-1
# Internal: Aggregates waypoints on a given axis of a context, and applies
# a "push" callback for each element in the subject jQuery object. This
# callback builds the element array to push to the jQuery stack.
_traverse: (axis = 'vertical', selector = window, push) ->
waypoints = jQMethods.aggregate selector
stack = []
@each ->
index = $.inArray this, waypoints[axis]
push stack, index, waypoints[axis]
@pushStack stack
# Internal: Finds all waypoints on a given set of "$elements" and invokes
# "method" on each instance.
_invoke: (method) ->
this.each ->
waypoints = Waypoint.getWaypointsByElement this
$.each waypoints, (i, waypoint) ->
waypoint[method]()
true
this
# $.fn.waypoint. Let's just hook this guy up to our methods hash and
# add some trivial error reporting for bogus calls.
$.fn[wp] = (method, args...) ->
if methods[method]
methods[method].apply this, args
else if $.isFunction(method)
methods.init.apply this, arguments
else if $.isPlainObject(method)
methods.init.apply this, [null, method]
else if !method
$.error "jQuery Waypoints needs a callback function or handler option."
else
$.error "The #{method} method does not exist in jQuery Waypoints."
# The default options object for a waypoint.
# - context: string|HTMLNode|jQuery - The scrollable element that the
# waypoint acts within. The waypoint will look for the closest ancestor
# element that matches this selector or node.
# - continuous: Multiple waypoints may be triggered by a single scroll check.
# If you would like a waypoint to only trigger if it is the last waypoint
# in a scroll check, set this to false.
# - enabled: Should this waypoint start enabled (true) or disabled (false)?
# - handler: This option is not defined by default, but can be used as an
# alternate way to pass the waypoint callback function, rather than as
# the first argument to .waypoint.
# Ex: $('div').waypoint({
# handler: function(direction) { ... }
# });
# - horizontal: Set this to true if the waypoint is, well, horizontal.
# - offset: number|string|function - Determines how far from the top (or left
# if the waypoint is horizontal) of the context's viewport to trigger the
# waypoint. The default of 0 means that the waypoint is triggered when the
# top of the waypoint element hits the top of the window/context-element.
# An offset of 50 would mean the waypoint triggers when the top of the
# element is 50 pixels from the top of the window.
# A % string is translated into a percentage of the width/height of
# the context.
# If a function is passed, that function should return a number. The "this"
# keyword within this function will be set to the raw HTMLNode of the
# waypoint element.
# - triggerOnce: If true, the waypoint will destroy itself after
# first trigger.
$.fn[wp].defaults =
context: window
continuous: true
enabled: true
horizontal: false
offset: 0
triggerOnce: false
# These methods are available on the $ object by using the method name as
# the first argument to .waypoint. Ex: $.waypoints('refresh')
jQMethods =
# $.waypoints('refresh')
# Forces a refresh on all contexts, recalculating all waypoint offsets.
# This is done automatically on waypoint addition and during resize events,
# but if a user does something to change the DOM, CSS, or in some way
# change the layout of a page and its elements, they might need to call
# this method manually.
refresh: ->
$.each contexts, (i, context) -> context.refresh()
# $.waypoints('viewportHeight')
# A utility method that returns the window height, but takes into account
# inconsistencies that come with just using jQuery's .height() on iOS.
viewportHeight: ->
window.innerHeight ? $w.height()
# $.waypoints(['aggregate'], [contextSelector])
# Returns an object containing two HTMLNode arrays, one for each axis:
# {
# horizontal: [ HTMLNode... ]
# vertical: [ HTMLNode... ]
# }
# This is the default method used when calling $.waypoints(). If
# "contextSelector" is not supplied, it returns all waypoints. If
# "contextSelector" is supplied it only returns waypoints for that context.
# The array of waypoint elements is returned sorted by calculated offset,
# the order in which they would be triggered on the page.
aggregate: (contextSelector) ->
collection = allWaypoints
if contextSelector
collection = contexts[$(contextSelector)[0][contextKey]]?.waypoints
return [] unless collection
waypoints =
horizontal: []
vertical: []
$.each waypoints, (axis, arr) ->
$.each collection[axis], (key, waypoint) ->
arr.push waypoint
arr.sort (a, b) -> a.offset - b.offset
waypoints[axis] = $.map arr, (waypoint) -> waypoint.element
waypoints[axis] = $.unique waypoints[axis]
waypoints
# $.waypoints('above', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie above the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
above: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset <= context.oldScroll.y
# $.waypoints('below', [string|HTMLNode|jQuery])
# Returns all vertical waypoints that lie below the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
below: (contextSelector = window) ->
jQMethods._filter contextSelector, 'vertical', (context, waypoint) ->
waypoint.offset > context.oldScroll.y
# $.waypoints('left', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints left of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
left: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset <= context.oldScroll.x
# $.waypoints('right', [string|HTMLNode|jQuery])
# Returns all horizontal waypoints right of the current scroll position
# of the context specified by "contextSelector". If no "contextSelector"
# is supplied, it defaults to the window.
right: (contextSelector = window) ->
jQMethods._filter contextSelector, 'horizontal', (context, waypoint) ->
waypoint.offset > context.oldScroll.x
# $.waypoints('enable/disable/destroy')
# These methods delegate to the enable/disable/destroy instance methods
# for all waypoints.
enable: -> jQMethods._invoke 'enable'
disable: -> jQMethods._invoke 'disable'
destroy: -> jQMethods._invoke 'destroy'
# $.waypoints('extendFn', string, function)
# Extends the $.fn.waypoint method object with a new method, "f". This
# just lets other modules piggyback on the .waypoint namespace.
extendFn: (methodName, f) ->
methods[methodName] = f
# Internal: Invokes "method" on all waypoints.
_invoke: (method) ->
waypoints = $.extend {}, allWaypoints.vertical, allWaypoints.horizontal
$.each waypoints, (key, waypoint) ->
waypoint[method]()
true
# Internal: Returns an array of all HTMLNodes for each waypoint that passes
# the "test" function. Only waypoints within the "selector" context on the
# "axis" axis are tested. As with .aggregate, the array is sorted by
# calculated offset (trigger order).
_filter: (selector, axis, test) ->
context = contexts[$(selector)[0][contextKey]]
return [] unless context
waypoints = []
$.each context.waypoints[axis], (i, waypoint) ->
waypoints.push waypoint if test context, waypoint
waypoints.sort (a, b) -> a.offset - b.offset
$.map waypoints, (waypoint) -> waypoint.element
# Hook up jQMethods to the $.waypoints namespace.
$[wps] = (method, args...) ->
if jQMethods[method]
jQMethods[method].apply null, args
else
jQMethods.aggregate.call null, method
# Plugin-wide settings:
# - resizeThrottle: For performance reasons, the refresh performed during
# resizes is throttled. This value is the rate-limit in milliseconds
# between resize refreshes. For more information on throttling, check out
# PI:NAME:<NAME>END_PI Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
# - scrollThrottle: For performance reasons, checking for any crossed
# waypoints during a scroll event is throttled. This value is the
# rate-limit in milliseconds between scroll checks. For more information
# on throttling, check out PI:NAME:<NAME>END_PI Alman’s throttle / debounce plugin.
# http://benalman.com/projects/jquery-throttle-debounce-plugin/
$[wps].settings =
resizeThrottle: 100
scrollThrottle: 30
# Ensure a refresh on page load. Newly loaded images often shift layout.
$w.on 'load.waypoints', -> $[wps] 'refresh'
|
[
{
"context": "pOption.click()\n modal.inputs.username.setValue username\n modal.inputs.password.setValue '123'\n moda",
"end": 383,
"score": 0.8638009428977966,
"start": 375,
"tag": "USERNAME",
"value": "username"
},
{
"context": "alue username\n modal.inputs.password.setValue '123'\n modal.inputs.verifyPassword.setValue '123'\n ",
"end": 423,
"score": 0.9994542002677917,
"start": 420,
"tag": "PASSWORD",
"value": "123"
},
{
"context": "e '123'\n modal.inputs.verifyPassword.setValue '123'\n modal.buttons.submit.click()\n @When /^I cli",
"end": 470,
"score": 0.9994430541992188,
"start": 467,
"tag": "PASSWORD",
"value": "123"
}
] | features/step_definitions/inbox.coffee | eribeiro9/CoLabs | 0 | require.call this, '../lib/util.coffee'
app = require '../lib/app.coffee'
modal = app.modals.loginOrRegister
nav = app.views.nav
module.exports = ->
@When /^I create user "(.*)"$/, (username) ->
if !nav.links.signIn.isDisplayed()
nav.buttons.collapse.click()
nav.links.signIn.click()
modal.inputs.signupOption.click()
modal.inputs.username.setValue username
modal.inputs.password.setValue '123'
modal.inputs.verifyPassword.setValue '123'
modal.buttons.submit.click()
@When /^I click message user$/, ->
app.pages.user.buttons.messageUser.click()
@When /^I click the first message$/, ->
client.pause 500
messageId = client.element('#message').value.ELEMENT
client.elementIdClick(messageId)
@When /^I send "(.*)"$/, (text) ->
app.pages.inboxChat.inputs.chat.setValue text
app.pages.inboxChat.buttons.submit.click()
@Then /^I see the "(.*)" label$/, (text) ->
labelId = client.element('#contactName').value.ELEMENT
label = client.elementIdText(labelId).value
expect(label).toBe text
@Then /^I see a new message$/, ->
client.pause 500
messages = client.elements('.msg').value
expect(messages.length).toBe 1
| 31221 | require.call this, '../lib/util.coffee'
app = require '../lib/app.coffee'
modal = app.modals.loginOrRegister
nav = app.views.nav
module.exports = ->
@When /^I create user "(.*)"$/, (username) ->
if !nav.links.signIn.isDisplayed()
nav.buttons.collapse.click()
nav.links.signIn.click()
modal.inputs.signupOption.click()
modal.inputs.username.setValue username
modal.inputs.password.setValue '<PASSWORD>'
modal.inputs.verifyPassword.setValue '<PASSWORD>'
modal.buttons.submit.click()
@When /^I click message user$/, ->
app.pages.user.buttons.messageUser.click()
@When /^I click the first message$/, ->
client.pause 500
messageId = client.element('#message').value.ELEMENT
client.elementIdClick(messageId)
@When /^I send "(.*)"$/, (text) ->
app.pages.inboxChat.inputs.chat.setValue text
app.pages.inboxChat.buttons.submit.click()
@Then /^I see the "(.*)" label$/, (text) ->
labelId = client.element('#contactName').value.ELEMENT
label = client.elementIdText(labelId).value
expect(label).toBe text
@Then /^I see a new message$/, ->
client.pause 500
messages = client.elements('.msg').value
expect(messages.length).toBe 1
| true | require.call this, '../lib/util.coffee'
app = require '../lib/app.coffee'
modal = app.modals.loginOrRegister
nav = app.views.nav
module.exports = ->
@When /^I create user "(.*)"$/, (username) ->
if !nav.links.signIn.isDisplayed()
nav.buttons.collapse.click()
nav.links.signIn.click()
modal.inputs.signupOption.click()
modal.inputs.username.setValue username
modal.inputs.password.setValue 'PI:PASSWORD:<PASSWORD>END_PI'
modal.inputs.verifyPassword.setValue 'PI:PASSWORD:<PASSWORD>END_PI'
modal.buttons.submit.click()
@When /^I click message user$/, ->
app.pages.user.buttons.messageUser.click()
@When /^I click the first message$/, ->
client.pause 500
messageId = client.element('#message').value.ELEMENT
client.elementIdClick(messageId)
@When /^I send "(.*)"$/, (text) ->
app.pages.inboxChat.inputs.chat.setValue text
app.pages.inboxChat.buttons.submit.click()
@Then /^I see the "(.*)" label$/, (text) ->
labelId = client.element('#contactName').value.ELEMENT
label = client.elementIdText(labelId).value
expect(label).toBe text
@Then /^I see a new message$/, ->
client.pause 500
messages = client.elements('.msg').value
expect(messages.length).toBe 1
|
[
{
"context": "e'\n publicKey: 'node'\n privateKey: 'node'\n ))\n\n http.get '/not_found', (err, res",
"end": 1023,
"score": 0.6240950226783752,
"start": 1019,
"tag": "KEY",
"value": "node"
},
{
"context": "on\n merchantId: 'node'\n publicKey: 'node'\n privateKey: 'node'\n ))\n\n http.",
"end": 1367,
"score": 0.6306199431419373,
"start": 1363,
"tag": "KEY",
"value": "node"
},
{
"context": "e'\n publicKey: 'node'\n privateKey: 'node'\n ))\n\n http.get '/not_found', (err, res",
"end": 1394,
"score": 0.7425879240036011,
"start": 1390,
"tag": "KEY",
"value": "node"
},
{
"context": "e'\n publicKey: 'node'\n privateKey: 'node'\n ))\n\n http.timeout = 1\n http.get ",
"end": 1915,
"score": 0.917027473449707,
"start": 1911,
"tag": "KEY",
"value": "node"
}
] | spec/integration/braintree/http_spec.coffee | StreamCo/braintree_node | 0 | require('../../spec_helper')
braintree = specHelper.braintree
{Config} = require('../../../lib/braintree/config')
{Http} = require('../../../lib/braintree/http')
{Environment} = require('../../../lib/braintree/environment')
describe "Http", ->
describe "request", ->
it "returns a ServerError for 500s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/error', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "returns a down for maintenance error for 503s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/maintenance', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
it "can hit the sandbox", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Sandbox
merchantId: 'node'
publicKey: 'node'
privateKey: 'node'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "can hit production", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Production
merchantId: 'node'
publicKey: 'node'
privateKey: 'node'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
xit "returns errors to the callback", (done) ->
# This test only fails due to the 'done' callback being called twice.
# Invalid test, which randomly fails.
http = new Http(new Config(
environment: new Environment('not_a_subdomain.braintreegateway.com', '12345', false)
merchantId: 'node'
publicKey: 'node'
privateKey: 'node'
))
http.timeout = 1
http.get '/not_a_real_url', (err, response) ->
assert.equal(err.type, braintree.errorTypes.unexpectedError)
| 32555 | require('../../spec_helper')
braintree = specHelper.braintree
{Config} = require('../../../lib/braintree/config')
{Http} = require('../../../lib/braintree/http')
{Environment} = require('../../../lib/braintree/environment')
describe "Http", ->
describe "request", ->
it "returns a ServerError for 500s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/error', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "returns a down for maintenance error for 503s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/maintenance', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
it "can hit the sandbox", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Sandbox
merchantId: 'node'
publicKey: 'node'
privateKey: '<KEY>'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "can hit production", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Production
merchantId: 'node'
publicKey: '<KEY>'
privateKey: '<KEY>'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
xit "returns errors to the callback", (done) ->
# This test only fails due to the 'done' callback being called twice.
# Invalid test, which randomly fails.
http = new Http(new Config(
environment: new Environment('not_a_subdomain.braintreegateway.com', '12345', false)
merchantId: 'node'
publicKey: 'node'
privateKey: '<KEY>'
))
http.timeout = 1
http.get '/not_a_real_url', (err, response) ->
assert.equal(err.type, braintree.errorTypes.unexpectedError)
| true | require('../../spec_helper')
braintree = specHelper.braintree
{Config} = require('../../../lib/braintree/config')
{Http} = require('../../../lib/braintree/http')
{Environment} = require('../../../lib/braintree/environment')
describe "Http", ->
describe "request", ->
it "returns a ServerError for 500s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/error', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "returns a down for maintenance error for 503s", (done) ->
http = new Http(new Config(specHelper.defaultConfig))
http.post '/test/maintenance', '', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
it "can hit the sandbox", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Sandbox
merchantId: 'node'
publicKey: 'node'
privateKey: 'PI:KEY:<KEY>END_PI'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
it "can hit production", (done) ->
@timeout 10000
http = new Http(new Config(
environment: braintree.Environment.Production
merchantId: 'node'
publicKey: 'PI:KEY:<KEY>END_PI'
privateKey: 'PI:KEY:<KEY>END_PI'
))
http.get '/not_found', (err, response) ->
assert.equal(err.type, braintree.errorTypes.notFoundError)
done()
xit "returns errors to the callback", (done) ->
# This test only fails due to the 'done' callback being called twice.
# Invalid test, which randomly fails.
http = new Http(new Config(
environment: new Environment('not_a_subdomain.braintreegateway.com', '12345', false)
merchantId: 'node'
publicKey: 'node'
privateKey: 'PI:KEY:<KEY>END_PI'
))
http.timeout = 1
http.get '/not_a_real_url', (err, response) ->
assert.equal(err.type, braintree.errorTypes.unexpectedError)
|
[
{
"context": "---------------------------------\n# Copyright 2013 I.B.M.\n# \n# Licensed under the Apache License, Version ",
"end": 3182,
"score": 0.999767005443573,
"start": 3177,
"tag": "NAME",
"value": "I.B.M"
}
] | lib-src/coffee/node/middleware.coffee | pmuellr/nodprof | 6 | # Licensed under the Apache License. See footer for details.
path = require "path"
express = require "express"
Services = require("./services").Services
logger = require "../common/logger"
utils = require "../common/utils"
config = require "./config"
WWWDIR = path.join __dirname, "../../www"
VENDOR = path.join __dirname, "../../vendor"
defaultConfig = config.getConfiguration([]).config
#-------------------------------------------------------------------------------
module.exports = (config = defaultConfig) ->
app = express()
app.on "error", (error) -> logger.log error
app.set "services", new Services config
app.use CORSify
# app.use log
app.get "/api/files.json", getFiles
app.get "/api/files/:file.json", getFile
app.post "/api/profileStart", profileStart
app.post "/api/profileStop", profileStop
app.post "/api/heapSnapshot", heapSnapshot
app.use express.static(WWWDIR)
app.use "/vendor", express.static(VENDOR)
return app
#-------------------------------------------------------------------------------
getFiles = (request, response) ->
services = request.app.get "services"
services.getFiles (err, data) ->
if err?
message = "error processing getFiles(): #{err}"
logger.log message
response.send 500, message
return
response.send data
return
#-------------------------------------------------------------------------------
getFile = (request, response) ->
services = request.app.get "services"
fileName = "#{request.params.file}.json"
services.getFile fileName, (err, data) ->
if err?
message = "error processing getFile(#{fileName}): #{err}"
logger.log message
response.send 500, message
return
response.send data
#-------------------------------------------------------------------------------
profileStart = (request, response) ->
services = request.app.get "services"
services.profileStart()
response.send "profile started"
return
#-------------------------------------------------------------------------------
profileStop = (request, response) ->
services = request.app.get "services"
services.profileStop()
response.send "profile stopped"
return
#-------------------------------------------------------------------------------
heapSnapshot = (request, response) ->
services = request.app.get "services"
services.heapSnapshot()
response.send "heap snapshot generated"
return
#-------------------------------------------------------------------------------
CORSify = (request, response, next) ->
response.header "Access-Control-Allow-Origin:", "*"
response.header "Access-Control-Allow-Methods", "OPTIONS, GET, POST"
next()
return
#-------------------------------------------------------------------------------
log = (request, response, next) ->
logger.log utils.JL request.url
# console.log request
next()
#-------------------------------------------------------------------------------
# Copyright 2013 I.B.M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
| 212120 | # Licensed under the Apache License. See footer for details.
path = require "path"
express = require "express"
Services = require("./services").Services
logger = require "../common/logger"
utils = require "../common/utils"
config = require "./config"
WWWDIR = path.join __dirname, "../../www"
VENDOR = path.join __dirname, "../../vendor"
defaultConfig = config.getConfiguration([]).config
#-------------------------------------------------------------------------------
module.exports = (config = defaultConfig) ->
app = express()
app.on "error", (error) -> logger.log error
app.set "services", new Services config
app.use CORSify
# app.use log
app.get "/api/files.json", getFiles
app.get "/api/files/:file.json", getFile
app.post "/api/profileStart", profileStart
app.post "/api/profileStop", profileStop
app.post "/api/heapSnapshot", heapSnapshot
app.use express.static(WWWDIR)
app.use "/vendor", express.static(VENDOR)
return app
#-------------------------------------------------------------------------------
getFiles = (request, response) ->
services = request.app.get "services"
services.getFiles (err, data) ->
if err?
message = "error processing getFiles(): #{err}"
logger.log message
response.send 500, message
return
response.send data
return
#-------------------------------------------------------------------------------
getFile = (request, response) ->
services = request.app.get "services"
fileName = "#{request.params.file}.json"
services.getFile fileName, (err, data) ->
if err?
message = "error processing getFile(#{fileName}): #{err}"
logger.log message
response.send 500, message
return
response.send data
#-------------------------------------------------------------------------------
profileStart = (request, response) ->
services = request.app.get "services"
services.profileStart()
response.send "profile started"
return
#-------------------------------------------------------------------------------
profileStop = (request, response) ->
services = request.app.get "services"
services.profileStop()
response.send "profile stopped"
return
#-------------------------------------------------------------------------------
heapSnapshot = (request, response) ->
services = request.app.get "services"
services.heapSnapshot()
response.send "heap snapshot generated"
return
#-------------------------------------------------------------------------------
CORSify = (request, response, next) ->
response.header "Access-Control-Allow-Origin:", "*"
response.header "Access-Control-Allow-Methods", "OPTIONS, GET, POST"
next()
return
#-------------------------------------------------------------------------------
log = (request, response, next) ->
logger.log utils.JL request.url
# console.log request
next()
#-------------------------------------------------------------------------------
# Copyright 2013 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
| true | # Licensed under the Apache License. See footer for details.
path = require "path"
express = require "express"
Services = require("./services").Services
logger = require "../common/logger"
utils = require "../common/utils"
config = require "./config"
WWWDIR = path.join __dirname, "../../www"
VENDOR = path.join __dirname, "../../vendor"
defaultConfig = config.getConfiguration([]).config
#-------------------------------------------------------------------------------
module.exports = (config = defaultConfig) ->
app = express()
app.on "error", (error) -> logger.log error
app.set "services", new Services config
app.use CORSify
# app.use log
app.get "/api/files.json", getFiles
app.get "/api/files/:file.json", getFile
app.post "/api/profileStart", profileStart
app.post "/api/profileStop", profileStop
app.post "/api/heapSnapshot", heapSnapshot
app.use express.static(WWWDIR)
app.use "/vendor", express.static(VENDOR)
return app
#-------------------------------------------------------------------------------
getFiles = (request, response) ->
services = request.app.get "services"
services.getFiles (err, data) ->
if err?
message = "error processing getFiles(): #{err}"
logger.log message
response.send 500, message
return
response.send data
return
#-------------------------------------------------------------------------------
getFile = (request, response) ->
services = request.app.get "services"
fileName = "#{request.params.file}.json"
services.getFile fileName, (err, data) ->
if err?
message = "error processing getFile(#{fileName}): #{err}"
logger.log message
response.send 500, message
return
response.send data
#-------------------------------------------------------------------------------
profileStart = (request, response) ->
services = request.app.get "services"
services.profileStart()
response.send "profile started"
return
#-------------------------------------------------------------------------------
profileStop = (request, response) ->
services = request.app.get "services"
services.profileStop()
response.send "profile stopped"
return
#-------------------------------------------------------------------------------
heapSnapshot = (request, response) ->
services = request.app.get "services"
services.heapSnapshot()
response.send "heap snapshot generated"
return
#-------------------------------------------------------------------------------
CORSify = (request, response, next) ->
response.header "Access-Control-Allow-Origin:", "*"
response.header "Access-Control-Allow-Methods", "OPTIONS, GET, POST"
next()
return
#-------------------------------------------------------------------------------
log = (request, response, next) ->
logger.log utils.JL request.url
# console.log request
next()
#-------------------------------------------------------------------------------
# Copyright 2013 PI:NAME:<NAME>END_PI.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
|
[
{
"context": "DRESS\"\n user: \"YOUR_MYSQL_USER\"\n password: \"YOUR_MYSQL_PASSWORD\"\n database: \"YOUR_MYSQL_DB\"\n\n \n ###*\n Mongo",
"end": 1676,
"score": 0.9994156956672668,
"start": 1657,
"tag": "PASSWORD",
"value": "YOUR_MYSQL_PASSWORD"
},
{
"context": " host: \"localhost\"\n port: 27017\n\n \n # user: 'username',\n # password: 'password',\n # database: 'your_m",
"end": 2068,
"score": 0.9987152814865112,
"start": 2060,
"tag": "USERNAME",
"value": "username"
},
{
"context": "t: 27017\n\n \n # user: 'username',\n # password: 'password',\n # database: 'your_mongo_db_name_here'\n \n ##",
"end": 2094,
"score": 0.9994925260543823,
"start": 2086,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "SS\"\n user: \"YOUR_POSTGRES_USER\"\n password: \"YOUR_POSTGRES_PASSWORD\"\n database: \"YOUR_POSTGRES_DB\"\n\n###*\nMore adap",
"end": 2567,
"score": 0.9993200302124023,
"start": 2545,
"tag": "PASSWORD",
"value": "YOUR_POSTGRES_PASSWORD"
},
{
"context": "TGRES_DB\"\n\n###*\nMore adapters: https://github.com/balderdashy/sails *\n###\n",
"end": 2653,
"score": 0.9985854625701904,
"start": 2642,
"tag": "USERNAME",
"value": "balderdashy"
}
] | config/connections.coffee | Negaihoshi/sails-starter-toolkit | 2 | ###*
Connections
(sails.config.connections)
`Connections` are like "saved settings" for your adapters. What's the difference between
a connection and an adapter, you might ask? An adapter (e.g. `sails-mysql`) is generic--
it needs some additional information to work (e.g. your database host, password, user, etc.)
A `connection` is that additional information.
Each model must have a `connection` property (a string) which is references the name of one
of these connections. If it doesn't, the default `connection` configured in `config/models.js`
will be applied. Of course, a connection can (and usually is) shared by multiple models.
.
Note: If you're using version control, you should put your passwords/api keys
in `config/local.js`, environment variables, or use another strategy.
(this is to prevent you inadvertently sensitive credentials up to your repository.)
For more information on configuration, check out:
http://sailsjs.org/#/documentation/reference/sails.config/sails.config.connections.html
###
module.exports.connections =
###*
Local disk storage for DEVELOPMENT ONLY *
Installed by default. *
###
localDiskDb:
adapter: "sails-disk"
###*
MySQL is the world's most popular relational database. *
http://en.wikipedia.org/wiki/MySQL *
Run: npm install sails-mysql *
###
someMysqlServer:
adapter: "sails-mysql"
host: "YOUR_MYSQL_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_MYSQL_USER"
password: "YOUR_MYSQL_PASSWORD"
database: "YOUR_MYSQL_DB"
###*
MongoDB is the leading NoSQL database. *
http://en.wikipedia.org/wiki/MongoDB *
Run: npm install sails-mongo *
###
someMongodbServer:
adapter: "sails-mongo"
host: "localhost"
port: 27017
# user: 'username',
# password: 'password',
# database: 'your_mongo_db_name_here'
###*
PostgreSQL is another officially supported relational database. *
http://en.wikipedia.org/wiki/PostgreSQL *
Run: npm install sails-postgresql *
###
somePostgresqlServer:
adapter: "sails-postgresql"
host: "YOUR_POSTGRES_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_POSTGRES_USER"
password: "YOUR_POSTGRES_PASSWORD"
database: "YOUR_POSTGRES_DB"
###*
More adapters: https://github.com/balderdashy/sails *
###
| 147669 | ###*
Connections
(sails.config.connections)
`Connections` are like "saved settings" for your adapters. What's the difference between
a connection and an adapter, you might ask? An adapter (e.g. `sails-mysql`) is generic--
it needs some additional information to work (e.g. your database host, password, user, etc.)
A `connection` is that additional information.
Each model must have a `connection` property (a string) which is references the name of one
of these connections. If it doesn't, the default `connection` configured in `config/models.js`
will be applied. Of course, a connection can (and usually is) shared by multiple models.
.
Note: If you're using version control, you should put your passwords/api keys
in `config/local.js`, environment variables, or use another strategy.
(this is to prevent you inadvertently sensitive credentials up to your repository.)
For more information on configuration, check out:
http://sailsjs.org/#/documentation/reference/sails.config/sails.config.connections.html
###
module.exports.connections =
###*
Local disk storage for DEVELOPMENT ONLY *
Installed by default. *
###
localDiskDb:
adapter: "sails-disk"
###*
MySQL is the world's most popular relational database. *
http://en.wikipedia.org/wiki/MySQL *
Run: npm install sails-mysql *
###
someMysqlServer:
adapter: "sails-mysql"
host: "YOUR_MYSQL_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_MYSQL_USER"
password: "<PASSWORD>"
database: "YOUR_MYSQL_DB"
###*
MongoDB is the leading NoSQL database. *
http://en.wikipedia.org/wiki/MongoDB *
Run: npm install sails-mongo *
###
someMongodbServer:
adapter: "sails-mongo"
host: "localhost"
port: 27017
# user: 'username',
# password: '<PASSWORD>',
# database: 'your_mongo_db_name_here'
###*
PostgreSQL is another officially supported relational database. *
http://en.wikipedia.org/wiki/PostgreSQL *
Run: npm install sails-postgresql *
###
somePostgresqlServer:
adapter: "sails-postgresql"
host: "YOUR_POSTGRES_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_POSTGRES_USER"
password: "<PASSWORD>"
database: "YOUR_POSTGRES_DB"
###*
More adapters: https://github.com/balderdashy/sails *
###
| true | ###*
Connections
(sails.config.connections)
`Connections` are like "saved settings" for your adapters. What's the difference between
a connection and an adapter, you might ask? An adapter (e.g. `sails-mysql`) is generic--
it needs some additional information to work (e.g. your database host, password, user, etc.)
A `connection` is that additional information.
Each model must have a `connection` property (a string) which is references the name of one
of these connections. If it doesn't, the default `connection` configured in `config/models.js`
will be applied. Of course, a connection can (and usually is) shared by multiple models.
.
Note: If you're using version control, you should put your passwords/api keys
in `config/local.js`, environment variables, or use another strategy.
(this is to prevent you inadvertently sensitive credentials up to your repository.)
For more information on configuration, check out:
http://sailsjs.org/#/documentation/reference/sails.config/sails.config.connections.html
###
module.exports.connections =
###*
Local disk storage for DEVELOPMENT ONLY *
Installed by default. *
###
localDiskDb:
adapter: "sails-disk"
###*
MySQL is the world's most popular relational database. *
http://en.wikipedia.org/wiki/MySQL *
Run: npm install sails-mysql *
###
someMysqlServer:
adapter: "sails-mysql"
host: "YOUR_MYSQL_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_MYSQL_USER"
password: "PI:PASSWORD:<PASSWORD>END_PI"
database: "YOUR_MYSQL_DB"
###*
MongoDB is the leading NoSQL database. *
http://en.wikipedia.org/wiki/MongoDB *
Run: npm install sails-mongo *
###
someMongodbServer:
adapter: "sails-mongo"
host: "localhost"
port: 27017
# user: 'username',
# password: 'PI:PASSWORD:<PASSWORD>END_PI',
# database: 'your_mongo_db_name_here'
###*
PostgreSQL is another officially supported relational database. *
http://en.wikipedia.org/wiki/PostgreSQL *
Run: npm install sails-postgresql *
###
somePostgresqlServer:
adapter: "sails-postgresql"
host: "YOUR_POSTGRES_SERVER_HOSTNAME_OR_IP_ADDRESS"
user: "YOUR_POSTGRES_USER"
password: "PI:PASSWORD:<PASSWORD>END_PI"
database: "YOUR_POSTGRES_DB"
###*
More adapters: https://github.com/balderdashy/sails *
###
|
[
{
"context": "'use strict'\n#\n# Ethan Mick\n# Demonstate a long running task\n#\nprocess.on 'me",
"end": 27,
"score": 0.9997745156288147,
"start": 17,
"tag": "NAME",
"value": "Ethan Mick"
}
] | test/fixtures/worker3.coffee | ethanmick/coffee-mule | 0 | 'use strict'
#
# Ethan Mick
# Demonstate a long running task
#
process.on 'message', (message)->
setTimeout ->
process.send('done')
, 5000
process.send('READY')
| 111955 | 'use strict'
#
# <NAME>
# Demonstate a long running task
#
process.on 'message', (message)->
setTimeout ->
process.send('done')
, 5000
process.send('READY')
| true | 'use strict'
#
# PI:NAME:<NAME>END_PI
# Demonstate a long running task
#
process.on 'message', (message)->
setTimeout ->
process.send('done')
, 5000
process.send('READY')
|
[
{
"context": ">\n data.section.get('title').should.equal 'Vennice Biennalez'\n\n it 'fetches related articles for article in",
"end": 1354,
"score": 0.982366681098938,
"start": 1337,
"tag": "NAME",
"value": "Vennice Biennalez"
}
] | test/models/article.coffee | kanaabe/microgravity | 0 | _ = require 'underscore'
Q = require 'bluebird-q'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Article = require '../../models/article.coffee'
sinon = require 'sinon'
fixtures = require '../helpers/fixtures.coffee'
describe "Article", ->
beforeEach ->
@article = new Article fixtures.article
afterEach ->
Backbone.sync.restore()
describe '#fetchRelated', ->
it 'works for sectionless articles', ->
article = _.extend {}, fixtures.article,
id: 'id-1'
sections: []
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', article
.returns Q.resolve article
@article.set 'id', 'article-1'
@article.is_super_article = false
@article.sections = []
@article.fetchRelated success: (data) ->
data.article.get('id').should.equal 'article-1'
it 'only fetches section content', ->
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', fixtures.section
.returns Q.resolve fixtures.section
.onCall 1
.yieldsTo 'success', []
.returns Q.resolve []
@article.is_super_article = false
@article.set
section_ids: ['foo']
id: 'article-1'
@article.fetchRelated success: (data) ->
data.section.get('title').should.equal 'Vennice Biennalez'
it 'fetches related articles for article in super article', ->
relatedArticle1 = _.extend {}, fixtures.article,
id: 'id-1'
title: 'RelatedArticle 1',
sections: []
relatedArticle2 = _.extend {}, fixtures.article,
id: 'id-2'
title: 'RelatedArticle 2',
sections: []
superArticle = _.extend {}, fixtures.article,
id: 'id-3'
title: 'SuperArticle',
is_super_article: true
sections: []
super_article:
related_articles: ['id-1', 'id-2']
@article.set
section_ids: []
id: 'article-1'
sinon.stub Backbone, 'sync'
.onCall 0
.returns Q.resolve []
.onCall 1
.yieldsTo 'success', {results: superArticle}
.returns Q.resolve {results: superArticle}
.onCall 2
.yieldsTo 'success', relatedArticle2
.returns Q.resolve relatedArticle2
.onCall 3
.yieldsTo 'success', relatedArticle1
.returns Q.resolve relatedArticle1
@article.fetchRelated success: (data) ->
data.superArticle.get('title').should.equal 'SuperArticle'
data.relatedArticles.models[0].get('title').should.equal 'RelatedArticle 1'
data.relatedArticles.models[1].get('title').should.equal 'RelatedArticle 2'
| 177242 | _ = require 'underscore'
Q = require 'bluebird-q'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Article = require '../../models/article.coffee'
sinon = require 'sinon'
fixtures = require '../helpers/fixtures.coffee'
describe "Article", ->
beforeEach ->
@article = new Article fixtures.article
afterEach ->
Backbone.sync.restore()
describe '#fetchRelated', ->
it 'works for sectionless articles', ->
article = _.extend {}, fixtures.article,
id: 'id-1'
sections: []
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', article
.returns Q.resolve article
@article.set 'id', 'article-1'
@article.is_super_article = false
@article.sections = []
@article.fetchRelated success: (data) ->
data.article.get('id').should.equal 'article-1'
it 'only fetches section content', ->
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', fixtures.section
.returns Q.resolve fixtures.section
.onCall 1
.yieldsTo 'success', []
.returns Q.resolve []
@article.is_super_article = false
@article.set
section_ids: ['foo']
id: 'article-1'
@article.fetchRelated success: (data) ->
data.section.get('title').should.equal '<NAME>'
it 'fetches related articles for article in super article', ->
relatedArticle1 = _.extend {}, fixtures.article,
id: 'id-1'
title: 'RelatedArticle 1',
sections: []
relatedArticle2 = _.extend {}, fixtures.article,
id: 'id-2'
title: 'RelatedArticle 2',
sections: []
superArticle = _.extend {}, fixtures.article,
id: 'id-3'
title: 'SuperArticle',
is_super_article: true
sections: []
super_article:
related_articles: ['id-1', 'id-2']
@article.set
section_ids: []
id: 'article-1'
sinon.stub Backbone, 'sync'
.onCall 0
.returns Q.resolve []
.onCall 1
.yieldsTo 'success', {results: superArticle}
.returns Q.resolve {results: superArticle}
.onCall 2
.yieldsTo 'success', relatedArticle2
.returns Q.resolve relatedArticle2
.onCall 3
.yieldsTo 'success', relatedArticle1
.returns Q.resolve relatedArticle1
@article.fetchRelated success: (data) ->
data.superArticle.get('title').should.equal 'SuperArticle'
data.relatedArticles.models[0].get('title').should.equal 'RelatedArticle 1'
data.relatedArticles.models[1].get('title').should.equal 'RelatedArticle 2'
| true | _ = require 'underscore'
Q = require 'bluebird-q'
Backbone = require 'backbone'
{ fabricate } = require 'antigravity'
Article = require '../../models/article.coffee'
sinon = require 'sinon'
fixtures = require '../helpers/fixtures.coffee'
describe "Article", ->
beforeEach ->
@article = new Article fixtures.article
afterEach ->
Backbone.sync.restore()
describe '#fetchRelated', ->
it 'works for sectionless articles', ->
article = _.extend {}, fixtures.article,
id: 'id-1'
sections: []
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', article
.returns Q.resolve article
@article.set 'id', 'article-1'
@article.is_super_article = false
@article.sections = []
@article.fetchRelated success: (data) ->
data.article.get('id').should.equal 'article-1'
it 'only fetches section content', ->
sinon.stub Backbone, 'sync'
.onCall 0
.yieldsTo 'success', fixtures.section
.returns Q.resolve fixtures.section
.onCall 1
.yieldsTo 'success', []
.returns Q.resolve []
@article.is_super_article = false
@article.set
section_ids: ['foo']
id: 'article-1'
@article.fetchRelated success: (data) ->
data.section.get('title').should.equal 'PI:NAME:<NAME>END_PI'
it 'fetches related articles for article in super article', ->
relatedArticle1 = _.extend {}, fixtures.article,
id: 'id-1'
title: 'RelatedArticle 1',
sections: []
relatedArticle2 = _.extend {}, fixtures.article,
id: 'id-2'
title: 'RelatedArticle 2',
sections: []
superArticle = _.extend {}, fixtures.article,
id: 'id-3'
title: 'SuperArticle',
is_super_article: true
sections: []
super_article:
related_articles: ['id-1', 'id-2']
@article.set
section_ids: []
id: 'article-1'
sinon.stub Backbone, 'sync'
.onCall 0
.returns Q.resolve []
.onCall 1
.yieldsTo 'success', {results: superArticle}
.returns Q.resolve {results: superArticle}
.onCall 2
.yieldsTo 'success', relatedArticle2
.returns Q.resolve relatedArticle2
.onCall 3
.yieldsTo 'success', relatedArticle1
.returns Q.resolve relatedArticle1
@article.fetchRelated success: (data) ->
data.superArticle.get('title').should.equal 'SuperArticle'
data.relatedArticles.models[0].get('title').should.equal 'RelatedArticle 1'
data.relatedArticles.models[1].get('title').should.equal 'RelatedArticle 2'
|
[
{
"context": " options =\n auth:\n username: 'irritable-captian'\n password: 'poop-deck'\n json",
"end": 2395,
"score": 0.9994602799415588,
"start": 2378,
"tag": "USERNAME",
"value": "irritable-captian"
},
{
"context": "rname: 'irritable-captian'\n password: 'poop-deck'\n json: type: 'dinosaur'\n\n head",
"end": 2429,
"score": 0.9993355870246887,
"start": 2420,
"tag": "PASSWORD",
"value": "poop-deck"
},
{
"context": " uuid: 'irritable-captian'\n token: 'poop-deck'\n\n it 'should send the search body as the da",
"end": 3075,
"score": 0.9975779056549072,
"start": 3066,
"tag": "PASSWORD",
"value": "poop-deck"
}
] | test/integration/search-token-spec.coffee | iotrentil/meshblu-core-protocol-adapter-http | 0 | _ = require 'lodash'
UUID = require 'uuid'
request = require 'request'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
{ JobManagerResponder } = require 'meshblu-core-job-manager'
describe 'POST /search/tokens', ->
beforeEach (done) ->
@responseQueueId = UUID.v4()
@requestQueueName = "request:queue:#{@responseQueueId}"
@responseQueueName = "response:queue:#{@responseQueueId}"
@namespace = 'test:meshblu-http'
@jobLogQueue = 'test:meshblu:job-log'
@redisUri = 'redis://localhost'
@port = 0xd00d
@sut = new Server {
@port
disableLogging: true
jobTimeoutSeconds: 1
@namespace
@jobLogQueue
jobLogRedisUri: @redisUri
jobLogSampleRate: 1
redisUri: @redisUri
cacheRedisUri: @redisUri
@requestQueueName
@responseQueueName
}
@sut.run done
afterEach ->
@sut.stop()
beforeEach (done) ->
@redis = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@redis.on 'ready', done
afterEach (done) ->
@redis.del @requestQueueName, @responseQueueName, done
return # avoid returning redis
beforeEach (done) ->
@workerFunc = (@request, callback=_.noop) =>
@jobManagerDo @request, callback
@jobManager = new JobManagerResponder {
@redisUri
@namespace
@workerFunc
maxConnections: 1
queueTimeoutSeconds: 1
jobTimeoutSeconds: 1
jobLogSampleRate: 1
requestQueueName: @requestQueueName
responseQueueName: @responseQueueName
}
@jobManager.start done
beforeEach ->
@jobManager.do = (@jobManagerDo) =>
afterEach ->
@jobManager.stop()
describe '->search', ->
context 'when the request is successful', ->
beforeEach ->
@jobManager.do (@request, callback) =>
response =
metadata:
code: 200
responseId: @request.metadata.responseId
name: 'dinosaur-getter'
data: [
{uuid: 't-rex'}
{uuid: 'megalodon'}
{uuid: 'killasaurus'}
]
callback null, response
beforeEach (done) ->
options =
auth:
username: 'irritable-captian'
password: 'poop-deck'
json: type: 'dinosaur'
headers:
'x-meshblu-as': 'treasure-map'
'x-meshblu-erik-feature': 'custom-headers'
request.post "http://localhost:#{@port}/search/tokens", options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should dispatch the correct metadata', ->
expect(@request).to.containSubset
metadata:
fromUuid: 'treasure-map'
erikFeature: 'custom-headers'
auth:
uuid: 'irritable-captian'
token: 'poop-deck'
it 'should send the search body as the data of the job', ->
data = JSON.parse @request.rawData
expect(data).to.containSubset type: 'dinosaur'
it 'should have a tokens array in the response', ->
expect(@body).to.be.an 'array'
expect(@body.length).to.equal 3
| 64591 | _ = require 'lodash'
UUID = require 'uuid'
request = require 'request'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
{ JobManagerResponder } = require 'meshblu-core-job-manager'
describe 'POST /search/tokens', ->
beforeEach (done) ->
@responseQueueId = UUID.v4()
@requestQueueName = "request:queue:#{@responseQueueId}"
@responseQueueName = "response:queue:#{@responseQueueId}"
@namespace = 'test:meshblu-http'
@jobLogQueue = 'test:meshblu:job-log'
@redisUri = 'redis://localhost'
@port = 0xd00d
@sut = new Server {
@port
disableLogging: true
jobTimeoutSeconds: 1
@namespace
@jobLogQueue
jobLogRedisUri: @redisUri
jobLogSampleRate: 1
redisUri: @redisUri
cacheRedisUri: @redisUri
@requestQueueName
@responseQueueName
}
@sut.run done
afterEach ->
@sut.stop()
beforeEach (done) ->
@redis = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@redis.on 'ready', done
afterEach (done) ->
@redis.del @requestQueueName, @responseQueueName, done
return # avoid returning redis
beforeEach (done) ->
@workerFunc = (@request, callback=_.noop) =>
@jobManagerDo @request, callback
@jobManager = new JobManagerResponder {
@redisUri
@namespace
@workerFunc
maxConnections: 1
queueTimeoutSeconds: 1
jobTimeoutSeconds: 1
jobLogSampleRate: 1
requestQueueName: @requestQueueName
responseQueueName: @responseQueueName
}
@jobManager.start done
beforeEach ->
@jobManager.do = (@jobManagerDo) =>
afterEach ->
@jobManager.stop()
describe '->search', ->
context 'when the request is successful', ->
beforeEach ->
@jobManager.do (@request, callback) =>
response =
metadata:
code: 200
responseId: @request.metadata.responseId
name: 'dinosaur-getter'
data: [
{uuid: 't-rex'}
{uuid: 'megalodon'}
{uuid: 'killasaurus'}
]
callback null, response
beforeEach (done) ->
options =
auth:
username: 'irritable-captian'
password: '<PASSWORD>'
json: type: 'dinosaur'
headers:
'x-meshblu-as': 'treasure-map'
'x-meshblu-erik-feature': 'custom-headers'
request.post "http://localhost:#{@port}/search/tokens", options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should dispatch the correct metadata', ->
expect(@request).to.containSubset
metadata:
fromUuid: 'treasure-map'
erikFeature: 'custom-headers'
auth:
uuid: 'irritable-captian'
token: '<PASSWORD>'
it 'should send the search body as the data of the job', ->
data = JSON.parse @request.rawData
expect(data).to.containSubset type: 'dinosaur'
it 'should have a tokens array in the response', ->
expect(@body).to.be.an 'array'
expect(@body.length).to.equal 3
| true | _ = require 'lodash'
UUID = require 'uuid'
request = require 'request'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
{ JobManagerResponder } = require 'meshblu-core-job-manager'
describe 'POST /search/tokens', ->
beforeEach (done) ->
@responseQueueId = UUID.v4()
@requestQueueName = "request:queue:#{@responseQueueId}"
@responseQueueName = "response:queue:#{@responseQueueId}"
@namespace = 'test:meshblu-http'
@jobLogQueue = 'test:meshblu:job-log'
@redisUri = 'redis://localhost'
@port = 0xd00d
@sut = new Server {
@port
disableLogging: true
jobTimeoutSeconds: 1
@namespace
@jobLogQueue
jobLogRedisUri: @redisUri
jobLogSampleRate: 1
redisUri: @redisUri
cacheRedisUri: @redisUri
@requestQueueName
@responseQueueName
}
@sut.run done
afterEach ->
@sut.stop()
beforeEach (done) ->
@redis = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@redis.on 'ready', done
afterEach (done) ->
@redis.del @requestQueueName, @responseQueueName, done
return # avoid returning redis
beforeEach (done) ->
@workerFunc = (@request, callback=_.noop) =>
@jobManagerDo @request, callback
@jobManager = new JobManagerResponder {
@redisUri
@namespace
@workerFunc
maxConnections: 1
queueTimeoutSeconds: 1
jobTimeoutSeconds: 1
jobLogSampleRate: 1
requestQueueName: @requestQueueName
responseQueueName: @responseQueueName
}
@jobManager.start done
beforeEach ->
@jobManager.do = (@jobManagerDo) =>
afterEach ->
@jobManager.stop()
describe '->search', ->
context 'when the request is successful', ->
beforeEach ->
@jobManager.do (@request, callback) =>
response =
metadata:
code: 200
responseId: @request.metadata.responseId
name: 'dinosaur-getter'
data: [
{uuid: 't-rex'}
{uuid: 'megalodon'}
{uuid: 'killasaurus'}
]
callback null, response
beforeEach (done) ->
options =
auth:
username: 'irritable-captian'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json: type: 'dinosaur'
headers:
'x-meshblu-as': 'treasure-map'
'x-meshblu-erik-feature': 'custom-headers'
request.post "http://localhost:#{@port}/search/tokens", options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should dispatch the correct metadata', ->
expect(@request).to.containSubset
metadata:
fromUuid: 'treasure-map'
erikFeature: 'custom-headers'
auth:
uuid: 'irritable-captian'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
it 'should send the search body as the data of the job', ->
data = JSON.parse @request.rawData
expect(data).to.containSubset type: 'dinosaur'
it 'should have a tokens array in the response', ->
expect(@body).to.be.an 'array'
expect(@body.length).to.equal 3
|
[
{
"context": "###\nCopyright (c) 2014, Groupon\nAll rights reserved.\n\nRedistribution and use in s",
"end": 31,
"score": 0.9569953680038452,
"start": 24,
"tag": "NAME",
"value": "Groupon"
}
] | src/server/models/receiver.coffee | Mefiso/greenscreen | 729 | ###
Copyright (c) 2014, Groupon
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
_ = require "underscore"
Alert = require "./alert"
Channel = require "./channel"
Model = require "./model"
Takeover = require "./takeover"
db = require "../db"
module.exports = class Receiver extends Model
@type: "receiver"
@all: (cb) ->
db.allWithType @type, (err, docs) =>
return cb(err) if err
receivers = docs.map (doc) => new this(doc)
Channel.all (err, channels) ->
return cb(err) if err
receivers.forEach (r) ->
r.channelName = _(channels).detect((ch) -> ch.id == r.channelId)?.name
cb null, receivers
@findById: (id, cb) ->
db.get id, (err, doc) =>
return cb(err) if err
receiver = new Receiver(doc)
Takeover.singleton (err, takeover) ->
receiver.channelId = takeover.channelId if takeover
Alert.forReceiver receiver, (err, alert) ->
return cb(err) if err
receiver.alert = alert
cb null, receiver
constructor: (data={}) ->
@type = "receiver"
@id = data.id || data._id
@rev = data._rev
@name = data.name
@location = data.location
@groups = data.groups || []
@channelId = data.channelId
| 164699 | ###
Copyright (c) 2014, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
_ = require "underscore"
Alert = require "./alert"
Channel = require "./channel"
Model = require "./model"
Takeover = require "./takeover"
db = require "../db"
module.exports = class Receiver extends Model
@type: "receiver"
@all: (cb) ->
db.allWithType @type, (err, docs) =>
return cb(err) if err
receivers = docs.map (doc) => new this(doc)
Channel.all (err, channels) ->
return cb(err) if err
receivers.forEach (r) ->
r.channelName = _(channels).detect((ch) -> ch.id == r.channelId)?.name
cb null, receivers
@findById: (id, cb) ->
db.get id, (err, doc) =>
return cb(err) if err
receiver = new Receiver(doc)
Takeover.singleton (err, takeover) ->
receiver.channelId = takeover.channelId if takeover
Alert.forReceiver receiver, (err, alert) ->
return cb(err) if err
receiver.alert = alert
cb null, receiver
constructor: (data={}) ->
@type = "receiver"
@id = data.id || data._id
@rev = data._rev
@name = data.name
@location = data.location
@groups = data.groups || []
@channelId = data.channelId
| true | ###
Copyright (c) 2014, PI:NAME:<NAME>END_PI
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.###
_ = require "underscore"
Alert = require "./alert"
Channel = require "./channel"
Model = require "./model"
Takeover = require "./takeover"
db = require "../db"
module.exports = class Receiver extends Model
@type: "receiver"
@all: (cb) ->
db.allWithType @type, (err, docs) =>
return cb(err) if err
receivers = docs.map (doc) => new this(doc)
Channel.all (err, channels) ->
return cb(err) if err
receivers.forEach (r) ->
r.channelName = _(channels).detect((ch) -> ch.id == r.channelId)?.name
cb null, receivers
@findById: (id, cb) ->
db.get id, (err, doc) =>
return cb(err) if err
receiver = new Receiver(doc)
Takeover.singleton (err, takeover) ->
receiver.channelId = takeover.channelId if takeover
Alert.forReceiver receiver, (err, alert) ->
return cb(err) if err
receiver.alert = alert
cb null, receiver
constructor: (data={}) ->
@type = "receiver"
@id = data.id || data._id
@rev = data._rev
@name = data.name
@location = data.location
@groups = data.groups || []
@channelId = data.channelId
|
[
{
"context": "rname, \"fixtures\"\n\nconnString = \"mongodb://heroku:flk3ungh0x3anflx1bab@staff.mongohq.com:10092/app1321916260066\"\nexpectedCommand = \"mongor",
"end": 168,
"score": 0.9998781085014343,
"start": 130,
"tag": "EMAIL",
"value": "flk3ungh0x3anflx1bab@staff.mongohq.com"
},
{
"context": " '--host' 'staff.mongohq.com:10092' '--username' 'heroku' '--password' 'flk3ungh0x3anflx1bab' '--drop' '#{",
"end": 306,
"score": 0.7750442028045654,
"start": 300,
"tag": "USERNAME",
"value": "heroku"
},
{
"context": "hq.com:10092' '--username' 'heroku' '--password' 'flk3ungh0x3anflx1bab' '--drop' '#{fixturesDir}/fake-dump-dir/databasen",
"end": 342,
"score": 0.9993117451667786,
"start": 322,
"tag": "PASSWORD",
"value": "flk3ungh0x3anflx1bab"
}
] | test/makeRestoreCommand.coffee | Radiergummi/mongo-utils | 6 | assert = require "assert"
path = require "path"
fixturesDir = path.resolve __dirname, "fixtures"
connString = "mongodb://heroku:flk3ungh0x3anflx1bab@staff.mongohq.com:10092/app1321916260066"
expectedCommand = "mongorestore '--db' 'app1321916260066' '--host' 'staff.mongohq.com:10092' '--username' 'heroku' '--password' 'flk3ungh0x3anflx1bab' '--drop' '#{fixturesDir}/fake-dump-dir/databasename'"
utils = require "../"
describe "makeRestoreCommand", ->
it "converts query string and dirname to a mongorestore command", ->
dirName = "#{fixturesDir}/fake-dump-dir"
command = utils.makeRestoreCommand connString, dirName
assert.equal command, expectedCommand
it "throws an error if source directory does not exist", ->
dirName = "#{fixturesDir}/not-existing"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if source directory contains more than subdirectory", ->
dirName = "#{fixturesDir}/invalid-dump-dir"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if no dirName is given", ->
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
| 11646 | assert = require "assert"
path = require "path"
fixturesDir = path.resolve __dirname, "fixtures"
connString = "mongodb://heroku:<EMAIL>:10092/app1321916260066"
expectedCommand = "mongorestore '--db' 'app1321916260066' '--host' 'staff.mongohq.com:10092' '--username' 'heroku' '--password' '<PASSWORD>' '--drop' '#{fixturesDir}/fake-dump-dir/databasename'"
utils = require "../"
describe "makeRestoreCommand", ->
it "converts query string and dirname to a mongorestore command", ->
dirName = "#{fixturesDir}/fake-dump-dir"
command = utils.makeRestoreCommand connString, dirName
assert.equal command, expectedCommand
it "throws an error if source directory does not exist", ->
dirName = "#{fixturesDir}/not-existing"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if source directory contains more than subdirectory", ->
dirName = "#{fixturesDir}/invalid-dump-dir"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if no dirName is given", ->
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
| true | assert = require "assert"
path = require "path"
fixturesDir = path.resolve __dirname, "fixtures"
connString = "mongodb://heroku:PI:EMAIL:<EMAIL>END_PI:10092/app1321916260066"
expectedCommand = "mongorestore '--db' 'app1321916260066' '--host' 'staff.mongohq.com:10092' '--username' 'heroku' '--password' 'PI:PASSWORD:<PASSWORD>END_PI' '--drop' '#{fixturesDir}/fake-dump-dir/databasename'"
utils = require "../"
describe "makeRestoreCommand", ->
it "converts query string and dirname to a mongorestore command", ->
dirName = "#{fixturesDir}/fake-dump-dir"
command = utils.makeRestoreCommand connString, dirName
assert.equal command, expectedCommand
it "throws an error if source directory does not exist", ->
dirName = "#{fixturesDir}/not-existing"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if source directory contains more than subdirectory", ->
dirName = "#{fixturesDir}/invalid-dump-dir"
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
it "throws an error if no dirName is given", ->
try
utils.makeDumpCommand connString
catch error
return assert.ok true
assert.ok false, "it did not throw an error."
|
[
{
"context": " dataType: \"jsonp\"\n data:\n user: 'user'\n password: 'pass'\n msisdn: to\n ",
"end": 240,
"score": 0.9627045392990112,
"start": 236,
"tag": "USERNAME",
"value": "user"
},
{
"context": " data:\n user: 'user'\n password: 'pass'\n msisdn: to\n message: @get \"text\"\n",
"end": 265,
"score": 0.999291181564331,
"start": 261,
"tag": "PASSWORD",
"value": "pass"
}
] | _attachments/models/Message.coffee | chrisekelley/coconut-kiwi-demo | 2 | class Message extends Backbone.Model
url: "/message"
sendSMS: (options) ->
to = (@get "to").replace(/^07/,"2557")
$.ajax
url: 'https://CHANGEME/bulksms/dispatch.php'
dataType: "jsonp"
data:
user: 'user'
password: 'pass'
msisdn: to
message: @get "text"
success: ->
options.success()
error: (error) ->
console.log error
if error.statusText is "success"
options.success()
else
options.error(error)
| 135702 | class Message extends Backbone.Model
url: "/message"
sendSMS: (options) ->
to = (@get "to").replace(/^07/,"2557")
$.ajax
url: 'https://CHANGEME/bulksms/dispatch.php'
dataType: "jsonp"
data:
user: 'user'
password: '<PASSWORD>'
msisdn: to
message: @get "text"
success: ->
options.success()
error: (error) ->
console.log error
if error.statusText is "success"
options.success()
else
options.error(error)
| true | class Message extends Backbone.Model
url: "/message"
sendSMS: (options) ->
to = (@get "to").replace(/^07/,"2557")
$.ajax
url: 'https://CHANGEME/bulksms/dispatch.php'
dataType: "jsonp"
data:
user: 'user'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
msisdn: to
message: @get "text"
success: ->
options.success()
error: (error) ->
console.log error
if error.statusText is "success"
options.success()
else
options.error(error)
|
[
{
"context": "011<br />\n# Publication date: 06/17/2011<br />\n#\t\tPierre Corsini (pcorsini@polytech.unice.fr)<br />\n#\t\tNicolas Dup",
"end": 81,
"score": 0.9998836517333984,
"start": 67,
"tag": "NAME",
"value": "Pierre Corsini"
},
{
"context": "ication date: 06/17/2011<br />\n#\t\tPierre Corsini (pcorsini@polytech.unice.fr)<br />\n#\t\tNicolas Dupont (npg.dupont@gmail.com)<b",
"end": 109,
"score": 0.9999362826347351,
"start": 83,
"tag": "EMAIL",
"value": "pcorsini@polytech.unice.fr"
},
{
"context": "rre Corsini (pcorsini@polytech.unice.fr)<br />\n#\t\tNicolas Dupont (npg.dupont@gmail.com)<br />\n#\t\tNicolas Fernandez",
"end": 134,
"score": 0.9998852610588074,
"start": 120,
"tag": "NAME",
"value": "Nicolas Dupont"
},
{
"context": "rsini@polytech.unice.fr)<br />\n#\t\tNicolas Dupont (npg.dupont@gmail.com)<br />\n#\t\tNicolas Fernandez (fernande@polytech.un",
"end": 156,
"score": 0.9999340772628784,
"start": 136,
"tag": "EMAIL",
"value": "npg.dupont@gmail.com"
},
{
"context": "#\t\tNicolas Dupont (npg.dupont@gmail.com)<br />\n#\t\tNicolas Fernandez (fernande@polytech.unice.fr)<br />\n#\t\tNima Izadi ",
"end": 184,
"score": 0.9998841285705566,
"start": 167,
"tag": "NAME",
"value": "Nicolas Fernandez"
},
{
"context": "npg.dupont@gmail.com)<br />\n#\t\tNicolas Fernandez (fernande@polytech.unice.fr)<br />\n#\t\tNima Izadi (nim.izadi@gmail.com)<br />\n",
"end": 212,
"score": 0.9999339580535889,
"start": 186,
"tag": "EMAIL",
"value": "fernande@polytech.unice.fr"
},
{
"context": "s Fernandez (fernande@polytech.unice.fr)<br />\n#\t\tNima Izadi (nim.izadi@gmail.com)<br />\n#\t\tAnd supervised by ",
"end": 233,
"score": 0.9998937845230103,
"start": 223,
"tag": "NAME",
"value": "Nima Izadi"
},
{
"context": "(fernande@polytech.unice.fr)<br />\n#\t\tNima Izadi (nim.izadi@gmail.com)<br />\n#\t\tAnd supervised by Raphaël Bellec (r.bel",
"end": 254,
"score": 0.9999357461929321,
"start": 235,
"tag": "EMAIL",
"value": "nim.izadi@gmail.com"
},
{
"context": "i (nim.izadi@gmail.com)<br />\n#\t\tAnd supervised by Raphaël Bellec (r.bellec@structure-computation.com)<br />\n\n# Ana",
"end": 297,
"score": 0.9998998641967773,
"start": 283,
"tag": "NAME",
"value": "Raphaël Bellec"
},
{
"context": "l.com)<br />\n#\t\tAnd supervised by Raphaël Bellec (r.bellec@structure-computation.com)<br />\n\n# Analyse a gesture and \"notify\" the @tar",
"end": 333,
"score": 0.9999347925186157,
"start": 299,
"tag": "EMAIL",
"value": "r.bellec@structure-computation.com"
}
] | src/Analyzer.coffee | Ndpnt/CoffeeTouch.js | 1 | # Copyright (c) 2011<br />
# Publication date: 06/17/2011<br />
# Pierre Corsini (pcorsini@polytech.unice.fr)<br />
# Nicolas Dupont (npg.dupont@gmail.com)<br />
# Nicolas Fernandez (fernande@polytech.unice.fr)<br />
# Nima Izadi (nim.izadi@gmail.com)<br />
# And supervised by Raphaël Bellec (r.bellec@structure-computation.com)<br />
# Analyse a gesture and "notify" the @targetElement that "a given gesture" has been made.<br />
# Params:<br />
# totalNbFingers : Number of finger of the gesture beeing made.<br />
# targetElement : DOM Element which will be informed of the gesture
class Analyser
# Create an analyser object with total number of fingers and an array of all fingers as attribute
constructor: (@totalNbFingers, @targetElement) ->
@fingersArray = {} # Hash with fingerId: fingerGestureObject
@fingers = [] # Array with all fingers
@firstAnalysis = true # To know if we have to init the informations which will be returned
@informations = {} # All informations which will be send with the event gesture
@informations = {} # Informations corresponding to all fingers
@informations.fingers = []
@informations.firstTrigger = true
date = new Date()
@fingerArraySize = 0
@informations.timeStart = date.getTime()
# Notify the analyser of a gesture (gesture name, fingerId and parameters of new position etc)
notify: (fingerID, gestureName, @eventObj) ->
@informations.rotation = @eventObj.global.rotation
@informations.scale = @eventObj.global.scale
# Add all HTML targets of fingers
@informations.targets = []
for targetTouch in @eventObj.global.event.targetTouches
@informations.targets.push targetTouch.target
date = new Date()
@informations.timeElapsed = date.getTime() - @informations.timeStart
if @fingersArray[fingerID]?
@fingersArray[fingerID].update gestureName, @eventObj
else
@fingersArray[fingerID] = new FingerGesture(fingerID, gestureName, @eventObj)
@fingers.push @fingersArray[fingerID]
@fingerArraySize++
# Analyse event only when it receives the information from each fingers of the gesture.
@analyse @totalNbFingers if @fingerArraySize is @totalNbFingers
# Analayse precisly the gesture.
# Is called only when the analyser has been informed that all fingers have done a basic gesture.
analyse: (nbFingers) ->
@init() if @firstAnalysis
@gestureName = []
@gestureName.push finger.gestureName for finger in @fingers
@targetElement.makeGesture @gestureName, @informations
@triggerDrag()
@triggerFixed()
@triggerFlick()
@informations.firstTrigger = false if @informations.firstTrigger
# Sort fingers and initialize some informations that will be triggered to the user
# Is called before analysis
init: ->
# Sort fingers. Left to Right and Top to Bottom
@fingers = @fingers.sort (a,b) ->
return a.params.startY - b.params.startY if Math.abs(a.params.startX - b.params.startX) < 15
return a.params.startX - b.params.startX
@informations.nbFingers = @fingers.length
# For each finger, assigns to the information's event the information corresponding to this one.
for i in [0..@fingers.length - 1]
@informations.fingers[i] = @fingers[i].params
@firstAnalysis = false
# Trigger all names related to the drag event
triggerDrag: ->
if CoffeeTouch.Helper.arrayContains @gestureName, "drag"
@triggerDragDirections()
if @gestureName.length > 1
@triggerPinchOrSpread()
@triggerRotation()
# Trigger all names related to the drag direction
triggerDragDirections: ->
gestureName = []
gestureName.push finger.params.dragDirection for finger in @fingers
@targetElement.makeGesture gestureName, @informations if !CoffeeTouch.Helper.stringContains(gestureName, "unknown")
# Test if the drag is a pinch or a spread
triggerPinchOrSpread: ->
# Spread and Pinch detection
sameDirection = false
if @informations.scale < 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:pinch", @informations
@targetElement.makeGesture "pinch", @informations
else if @informations.scale > 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:spread", @informations
@targetElement.makeGesture "spread", @informations
# Trigger all names related to the fixed event
triggerFixed: ->
if @gestureName.length > 1 and CoffeeTouch.Helper.arrayContains(@gestureName, "fixed")
dontTrigger = false
gestureName = []
for finger in @fingers
if finger.gestureName == "drag" and finger.params.dragDirection == "triggerDrag"
dontTrigger = true
break
if finger.gestureName == "drag" then gestureName.push finger.params.dragDirection else gestureName.push "fixed"
if !dontTrigger
@targetElement.makeGesture gestureName, @informations
# Trigger all names related to the flick event
triggerFlick: ->
if CoffeeTouch.Helper.arrayContains(@gestureName, "dragend")
gestureName1 = []
gestureName2 = []
dontTrigger = false
for finger in @fingers
if finger.params.dragDirection == "unknown" then dontTrigger = true
if finger.isFlick
gestureName1.push "flick:#{finger.params.dragDirection}"
gestureName2.push "flick"
else
gestureName1.push finger.params.dragDirection
gestureName2.push finger.params.dragDirection
if !dontTrigger
@targetElement.makeGesture gestureName1, @informations
@targetElement.makeGesture gestureName2, @informations
# Trigger if it is a rotation, and specify if it is clockwise or counterclockwise
triggerRotation: ->
if !@lastRotation?
@lastRotation = @informations.rotation
rotationDirection = ""
if @informations.rotation > @lastRotation then rotationDirection = "rotate:cw" else rotationDirection = "rotate:ccw"
@targetElement.makeGesture rotationDirection, @informations
@targetElement.makeGesture "rotate", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:#{rotationDirection}", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:rotate", @informations
| 60816 | # Copyright (c) 2011<br />
# Publication date: 06/17/2011<br />
# <NAME> (<EMAIL>)<br />
# <NAME> (<EMAIL>)<br />
# <NAME> (<EMAIL>)<br />
# <NAME> (<EMAIL>)<br />
# And supervised by <NAME> (<EMAIL>)<br />
# Analyse a gesture and "notify" the @targetElement that "a given gesture" has been made.<br />
# Params:<br />
# totalNbFingers : Number of finger of the gesture beeing made.<br />
# targetElement : DOM Element which will be informed of the gesture
class Analyser
# Create an analyser object with total number of fingers and an array of all fingers as attribute
constructor: (@totalNbFingers, @targetElement) ->
@fingersArray = {} # Hash with fingerId: fingerGestureObject
@fingers = [] # Array with all fingers
@firstAnalysis = true # To know if we have to init the informations which will be returned
@informations = {} # All informations which will be send with the event gesture
@informations = {} # Informations corresponding to all fingers
@informations.fingers = []
@informations.firstTrigger = true
date = new Date()
@fingerArraySize = 0
@informations.timeStart = date.getTime()
# Notify the analyser of a gesture (gesture name, fingerId and parameters of new position etc)
notify: (fingerID, gestureName, @eventObj) ->
@informations.rotation = @eventObj.global.rotation
@informations.scale = @eventObj.global.scale
# Add all HTML targets of fingers
@informations.targets = []
for targetTouch in @eventObj.global.event.targetTouches
@informations.targets.push targetTouch.target
date = new Date()
@informations.timeElapsed = date.getTime() - @informations.timeStart
if @fingersArray[fingerID]?
@fingersArray[fingerID].update gestureName, @eventObj
else
@fingersArray[fingerID] = new FingerGesture(fingerID, gestureName, @eventObj)
@fingers.push @fingersArray[fingerID]
@fingerArraySize++
# Analyse event only when it receives the information from each fingers of the gesture.
@analyse @totalNbFingers if @fingerArraySize is @totalNbFingers
# Analayse precisly the gesture.
# Is called only when the analyser has been informed that all fingers have done a basic gesture.
analyse: (nbFingers) ->
@init() if @firstAnalysis
@gestureName = []
@gestureName.push finger.gestureName for finger in @fingers
@targetElement.makeGesture @gestureName, @informations
@triggerDrag()
@triggerFixed()
@triggerFlick()
@informations.firstTrigger = false if @informations.firstTrigger
# Sort fingers and initialize some informations that will be triggered to the user
# Is called before analysis
init: ->
# Sort fingers. Left to Right and Top to Bottom
@fingers = @fingers.sort (a,b) ->
return a.params.startY - b.params.startY if Math.abs(a.params.startX - b.params.startX) < 15
return a.params.startX - b.params.startX
@informations.nbFingers = @fingers.length
# For each finger, assigns to the information's event the information corresponding to this one.
for i in [0..@fingers.length - 1]
@informations.fingers[i] = @fingers[i].params
@firstAnalysis = false
# Trigger all names related to the drag event
triggerDrag: ->
if CoffeeTouch.Helper.arrayContains @gestureName, "drag"
@triggerDragDirections()
if @gestureName.length > 1
@triggerPinchOrSpread()
@triggerRotation()
# Trigger all names related to the drag direction
triggerDragDirections: ->
gestureName = []
gestureName.push finger.params.dragDirection for finger in @fingers
@targetElement.makeGesture gestureName, @informations if !CoffeeTouch.Helper.stringContains(gestureName, "unknown")
# Test if the drag is a pinch or a spread
triggerPinchOrSpread: ->
# Spread and Pinch detection
sameDirection = false
if @informations.scale < 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:pinch", @informations
@targetElement.makeGesture "pinch", @informations
else if @informations.scale > 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:spread", @informations
@targetElement.makeGesture "spread", @informations
# Trigger all names related to the fixed event
triggerFixed: ->
if @gestureName.length > 1 and CoffeeTouch.Helper.arrayContains(@gestureName, "fixed")
dontTrigger = false
gestureName = []
for finger in @fingers
if finger.gestureName == "drag" and finger.params.dragDirection == "triggerDrag"
dontTrigger = true
break
if finger.gestureName == "drag" then gestureName.push finger.params.dragDirection else gestureName.push "fixed"
if !dontTrigger
@targetElement.makeGesture gestureName, @informations
# Trigger all names related to the flick event
triggerFlick: ->
if CoffeeTouch.Helper.arrayContains(@gestureName, "dragend")
gestureName1 = []
gestureName2 = []
dontTrigger = false
for finger in @fingers
if finger.params.dragDirection == "unknown" then dontTrigger = true
if finger.isFlick
gestureName1.push "flick:#{finger.params.dragDirection}"
gestureName2.push "flick"
else
gestureName1.push finger.params.dragDirection
gestureName2.push finger.params.dragDirection
if !dontTrigger
@targetElement.makeGesture gestureName1, @informations
@targetElement.makeGesture gestureName2, @informations
# Trigger if it is a rotation, and specify if it is clockwise or counterclockwise
triggerRotation: ->
if !@lastRotation?
@lastRotation = @informations.rotation
rotationDirection = ""
if @informations.rotation > @lastRotation then rotationDirection = "rotate:cw" else rotationDirection = "rotate:ccw"
@targetElement.makeGesture rotationDirection, @informations
@targetElement.makeGesture "rotate", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:#{rotationDirection}", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:rotate", @informations
| true | # Copyright (c) 2011<br />
# Publication date: 06/17/2011<br />
# PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)<br />
# PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)<br />
# PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)<br />
# PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)<br />
# And supervised by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)<br />
# Analyse a gesture and "notify" the @targetElement that "a given gesture" has been made.<br />
# Params:<br />
# totalNbFingers : Number of finger of the gesture beeing made.<br />
# targetElement : DOM Element which will be informed of the gesture
class Analyser
# Create an analyser object with total number of fingers and an array of all fingers as attribute
constructor: (@totalNbFingers, @targetElement) ->
@fingersArray = {} # Hash with fingerId: fingerGestureObject
@fingers = [] # Array with all fingers
@firstAnalysis = true # To know if we have to init the informations which will be returned
@informations = {} # All informations which will be send with the event gesture
@informations = {} # Informations corresponding to all fingers
@informations.fingers = []
@informations.firstTrigger = true
date = new Date()
@fingerArraySize = 0
@informations.timeStart = date.getTime()
# Notify the analyser of a gesture (gesture name, fingerId and parameters of new position etc)
notify: (fingerID, gestureName, @eventObj) ->
@informations.rotation = @eventObj.global.rotation
@informations.scale = @eventObj.global.scale
# Add all HTML targets of fingers
@informations.targets = []
for targetTouch in @eventObj.global.event.targetTouches
@informations.targets.push targetTouch.target
date = new Date()
@informations.timeElapsed = date.getTime() - @informations.timeStart
if @fingersArray[fingerID]?
@fingersArray[fingerID].update gestureName, @eventObj
else
@fingersArray[fingerID] = new FingerGesture(fingerID, gestureName, @eventObj)
@fingers.push @fingersArray[fingerID]
@fingerArraySize++
# Analyse event only when it receives the information from each fingers of the gesture.
@analyse @totalNbFingers if @fingerArraySize is @totalNbFingers
# Analayse precisly the gesture.
# Is called only when the analyser has been informed that all fingers have done a basic gesture.
analyse: (nbFingers) ->
@init() if @firstAnalysis
@gestureName = []
@gestureName.push finger.gestureName for finger in @fingers
@targetElement.makeGesture @gestureName, @informations
@triggerDrag()
@triggerFixed()
@triggerFlick()
@informations.firstTrigger = false if @informations.firstTrigger
# Sort fingers and initialize some informations that will be triggered to the user
# Is called before analysis
init: ->
# Sort fingers. Left to Right and Top to Bottom
@fingers = @fingers.sort (a,b) ->
return a.params.startY - b.params.startY if Math.abs(a.params.startX - b.params.startX) < 15
return a.params.startX - b.params.startX
@informations.nbFingers = @fingers.length
# For each finger, assigns to the information's event the information corresponding to this one.
for i in [0..@fingers.length - 1]
@informations.fingers[i] = @fingers[i].params
@firstAnalysis = false
# Trigger all names related to the drag event
triggerDrag: ->
if CoffeeTouch.Helper.arrayContains @gestureName, "drag"
@triggerDragDirections()
if @gestureName.length > 1
@triggerPinchOrSpread()
@triggerRotation()
# Trigger all names related to the drag direction
triggerDragDirections: ->
gestureName = []
gestureName.push finger.params.dragDirection for finger in @fingers
@targetElement.makeGesture gestureName, @informations if !CoffeeTouch.Helper.stringContains(gestureName, "unknown")
# Test if the drag is a pinch or a spread
triggerPinchOrSpread: ->
# Spread and Pinch detection
sameDirection = false
if @informations.scale < 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:pinch", @informations
@targetElement.makeGesture "pinch", @informations
else if @informations.scale > 1.1 and !sameDirection
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:spread", @informations
@targetElement.makeGesture "spread", @informations
# Trigger all names related to the fixed event
triggerFixed: ->
if @gestureName.length > 1 and CoffeeTouch.Helper.arrayContains(@gestureName, "fixed")
dontTrigger = false
gestureName = []
for finger in @fingers
if finger.gestureName == "drag" and finger.params.dragDirection == "triggerDrag"
dontTrigger = true
break
if finger.gestureName == "drag" then gestureName.push finger.params.dragDirection else gestureName.push "fixed"
if !dontTrigger
@targetElement.makeGesture gestureName, @informations
# Trigger all names related to the flick event
triggerFlick: ->
if CoffeeTouch.Helper.arrayContains(@gestureName, "dragend")
gestureName1 = []
gestureName2 = []
dontTrigger = false
for finger in @fingers
if finger.params.dragDirection == "unknown" then dontTrigger = true
if finger.isFlick
gestureName1.push "flick:#{finger.params.dragDirection}"
gestureName2.push "flick"
else
gestureName1.push finger.params.dragDirection
gestureName2.push finger.params.dragDirection
if !dontTrigger
@targetElement.makeGesture gestureName1, @informations
@targetElement.makeGesture gestureName2, @informations
# Trigger if it is a rotation, and specify if it is clockwise or counterclockwise
triggerRotation: ->
if !@lastRotation?
@lastRotation = @informations.rotation
rotationDirection = ""
if @informations.rotation > @lastRotation then rotationDirection = "rotate:cw" else rotationDirection = "rotate:ccw"
@targetElement.makeGesture rotationDirection, @informations
@targetElement.makeGesture "rotate", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:#{rotationDirection}", @informations
@targetElement.makeGesture "#{CoffeeTouch.Helper.digit_name(@fingers.length)}:rotate", @informations
|
[
{
"context": "\nnikita = require '@nikitajs/core'\n{tags, ssh, scratch} = require '../test'\nth",
"end": 28,
"score": 0.5227403044700623,
"start": 26,
"tag": "USERNAME",
"value": "js"
},
{
"context": "tags.ipa\n\nipa =\n principal: 'admin'\n password: 'admin_pw'\n referer: 'https://ipa.nikita/ipa'\n url: 'http",
"end": 194,
"score": 0.9991097450256348,
"start": 186,
"tag": "PASSWORD",
"value": "admin_pw"
}
] | packages/ipa/test/user/show.coffee | chibanemourad/node-nikita | 0 |
nikita = require '@nikitajs/core'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.ipa
ipa =
principal: 'admin'
password: 'admin_pw'
referer: 'https://ipa.nikita/ipa'
url: 'https://ipa.nikita/ipa/session/json'
describe 'ipa.user.show', ->
they 'get single user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'admin'
, (err, {result}) ->
throw err if err
result.dn.should.match /^uid=admin,cn=users,cn=accounts,/
.promise()
they 'get missing user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'missing'
relax: true
, (err, {code, result}) ->
err.code.should.eql 4001
err.message.should.eql 'missing: user not found'
.promise()
| 36119 |
nikita = require '@nikitajs/core'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.ipa
ipa =
principal: 'admin'
password: '<PASSWORD>'
referer: 'https://ipa.nikita/ipa'
url: 'https://ipa.nikita/ipa/session/json'
describe 'ipa.user.show', ->
they 'get single user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'admin'
, (err, {result}) ->
throw err if err
result.dn.should.match /^uid=admin,cn=users,cn=accounts,/
.promise()
they 'get missing user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'missing'
relax: true
, (err, {code, result}) ->
err.code.should.eql 4001
err.message.should.eql 'missing: user not found'
.promise()
| true |
nikita = require '@nikitajs/core'
{tags, ssh, scratch} = require '../test'
they = require('ssh2-they').configure ssh...
return unless tags.ipa
ipa =
principal: 'admin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
referer: 'https://ipa.nikita/ipa'
url: 'https://ipa.nikita/ipa/session/json'
describe 'ipa.user.show', ->
they 'get single user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'admin'
, (err, {result}) ->
throw err if err
result.dn.should.match /^uid=admin,cn=users,cn=accounts,/
.promise()
they 'get missing user', ({ssh}) ->
nikita
ssh: ssh
.ipa.user.show ipa,
uid: 'missing'
relax: true
, (err, {code, result}) ->
err.code.should.eql 4001
err.message.should.eql 'missing: user not found'
.promise()
|
[
{
"context": "pi.soundcloud.com/tracks/321/stream?secret_token=s-123\"), \"http://api.soundcloud.com/tracks/321/stream?s",
"end": 912,
"score": 0.5772311687469482,
"start": 909,
"tag": "KEY",
"value": "123"
},
{
"context": "pi.soundcloud.com/tracks/321/stream?secret_token=s-123&client_id=YOUR_CLIENT_ID\"\n",
"end": 978,
"score": 0.4870157241821289,
"start": 976,
"tag": "KEY",
"value": "12"
},
{
"context": "soundcloud.com/tracks/321/stream?secret_token=s-123&client_id=YOUR_CLIENT_ID\"\n",
"end": 979,
"score": 0.34619224071502686,
"start": 978,
"tag": "PASSWORD",
"value": "3"
}
] | components/soundcloud/test/sc/stream-test.coffee | mizukai/sample | 91 | module "SC.whenStreamingReady"
asyncTest "should be able to handle multiple calls in a row", 2, ->
SC.whenStreamingReady ->
ok 1, "first was called"
SC.whenStreamingReady ->
ok 1, "second was called"
start()
module "SC._prepareStreamUrl"
test "should resolve id to /tracks/id/stream", ->
equal SC._prepareStreamUrl(123), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should resolve string id to /tracks/id/stream", ->
equal SC._prepareStreamUrl("123"), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should append the access token if connected", ->
SC.accessToken("hi")
equal SC._prepareStreamUrl("/tracks/123"), "https://api.soundcloud.com/tracks/123/stream?oauth_token=hi"
test "should preserve the secret token if passed", ->
equal SC._prepareStreamUrl("http://api.soundcloud.com/tracks/321/stream?secret_token=s-123"), "http://api.soundcloud.com/tracks/321/stream?secret_token=s-123&client_id=YOUR_CLIENT_ID"
| 98579 | module "SC.whenStreamingReady"
asyncTest "should be able to handle multiple calls in a row", 2, ->
SC.whenStreamingReady ->
ok 1, "first was called"
SC.whenStreamingReady ->
ok 1, "second was called"
start()
module "SC._prepareStreamUrl"
test "should resolve id to /tracks/id/stream", ->
equal SC._prepareStreamUrl(123), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should resolve string id to /tracks/id/stream", ->
equal SC._prepareStreamUrl("123"), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should append the access token if connected", ->
SC.accessToken("hi")
equal SC._prepareStreamUrl("/tracks/123"), "https://api.soundcloud.com/tracks/123/stream?oauth_token=hi"
test "should preserve the secret token if passed", ->
equal SC._prepareStreamUrl("http://api.soundcloud.com/tracks/321/stream?secret_token=s-<KEY>"), "http://api.soundcloud.com/tracks/321/stream?secret_token=s-<KEY> <PASSWORD>&client_id=YOUR_CLIENT_ID"
| true | module "SC.whenStreamingReady"
asyncTest "should be able to handle multiple calls in a row", 2, ->
SC.whenStreamingReady ->
ok 1, "first was called"
SC.whenStreamingReady ->
ok 1, "second was called"
start()
module "SC._prepareStreamUrl"
test "should resolve id to /tracks/id/stream", ->
equal SC._prepareStreamUrl(123), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should resolve string id to /tracks/id/stream", ->
equal SC._prepareStreamUrl("123"), "http://api.soundcloud.com/tracks/123/stream?client_id=YOUR_CLIENT_ID"
test "should append the access token if connected", ->
SC.accessToken("hi")
equal SC._prepareStreamUrl("/tracks/123"), "https://api.soundcloud.com/tracks/123/stream?oauth_token=hi"
test "should preserve the secret token if passed", ->
equal SC._prepareStreamUrl("http://api.soundcloud.com/tracks/321/stream?secret_token=s-PI:KEY:<KEY>END_PI"), "http://api.soundcloud.com/tracks/321/stream?secret_token=s-PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI&client_id=YOUR_CLIENT_ID"
|
[
{
"context": "e = @attributes[i].value\n if key.startsWith('on-')\n if !value?\n @$.off(key.substr(",
"end": 5343,
"score": 0.7835335731506348,
"start": 5340,
"tag": "KEY",
"value": "on-"
}
] | base.coffee | kornalius/hazel | 1 | { $, toRedraw, redrawing, hazeling, ccss, css, vdom, contents, renderable, div } = require('./hazel.coffee')
{ fromHTML, create, diff, patch } = vdom
{ relative } = css
# Kaffa = require('../kaffa/dist/kaffa.js')
Kaffa = require('../kaffa/kaffa.coffee')
{ Class } = Kaffa
_isEvent = (name) ->
name.startsWith('@')
_getMixinKeys = (proto, name) ->
r = []
for k, v of proto
if k == name or k.startsWith(name + '.')
r.push(k)
return r
_getAttributes = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getAttributes(view, ps) else []
v = []
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].attributes? then proto[k].attributes else []
v = _.union(mv, v)
_.union(sv, v)
_getStyle = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getStyle(view, ps) else {}
v = {}
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].style? then proto[k].style.call(view) else {}
_.deepExtend(v, mv)
_.deepExtend({}, sv, v)
_getTemplate = (view, proto, args...) ->
# ps = proto._superclass
t = if proto.layout?.template? then proto.layout.template else null
# if ps?.layout?.template?
# ps.layout.template.call(view, t, args...)
# else if t?
if t?
t.call(view, args...)
else
(renderable -> span "").call(view, args...)
# string to type
_deserializeAttributeValue = (value) ->
if !value?
return false
try
i = parseFloat(value)
if !_.isNaN(i)
return i
catch e
return if value.toLowerCase() in ['true', 'false'] then value.toLowerCase() == 'true' else if value == '' then true else value
# type to string
_serializeAttributeValue = (value) ->
if !value?
return ""
if _.isBoolean(value)
return (if value then 'true' else 'false')
else if _.isString(value)
if value == ''
return 'false'
else
return value
else if value.toString?
return value.toString()
else
return ""
_setVProperties = (v) ->
if v.children?
_setVProperties(c) for c in v.children
if v.properties?
if !v.properties.attributes?
v.properties.attributes = {}
for key, value of v.properties
if !(key in ['dataset', 'id', 'class'])
v.properties.attributes[key] = value
delete v.properties[key]
BaseView = Class 'BaseView',
extends: HTMLElement
layout:
attributes: []
style: ->
':host':
position: relative
display: 'inline-block'
cursor: 'default'
createdCallback: ->
@_properties = []
for k, v of @
if k.startsWith('$') and !_.isFunction(v)
nk = k.substr(1)
@[nk] = @[k]
@_properties.push(nk)
root = @createShadowRoot()
@noTemplate = false
@isReady = false
@isAttached = false
@_vdom = null
@_vdom_style = null
@_observers = []
@$ = $(@)
@[0] = @
@length = 1
@cash = true
if @created?
@created()
@_prepare()
_bindInputs: ->
that = @
@$.find(':root /deep/ input').each((el) ->
el = $(el)
path = el.prop('bind')
if path? and _.valueForKeyPath(that, path)?
if !el.attr('type')?
el.attr('type', 'text')
switch el.attr('type').toLowerCase()
when 'checkbox'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
when 'radio'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
else
el.on('keyup', (e) ->
_.setValueForKeyPath(that, path, el[0].value)
)
el[0].value = _.valueForKeyPath(that, path)
)
_removeEvents: ->
@$.eachDeep((el) ->
$(el).off()
)
_createEvents: ->
for k, v of @__proto__
if _isEvent(k)
kk = k
k = k.substr(1)
p = k.split(' ')
if p.length > 1
eventType = _.first(p)
selector = _.rest(p).join(' ')
els = @$.find(':root /deep/ ' + selector.trim())
else
eventType = k.trim()
selector = null
els = @$
if v?
els.on(eventType, v)
else
els.off(eventType)
_observeProperty: (name) ->
o = Kaffa.observe(@, name, ((args) -> @refresh()))
o._el = @
@_observers.push(o)
_createIds: ->
that = @
@$.eachDeep((el) ->
if !_.isEmpty(el.id)
that[_.camelize(el.id) + '$'] = el
)
_propertiesToAttributes: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @[key]?
@setAttribute(key, _serializeAttributeValue(@[key]))
@_observeProperty(key)
_attributesToProperties: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @hasAttribute(key)
# if !@hasAttribute(key)
# @setAttribute(key, _serializeAttributeValue(@[key]))
if !@[key]?
# @[key] = null
@[key] = _deserializeAttributeValue(@getAttribute(key))
@_observeProperty(key)
_attributesToEvents: ->
for i in [0...@attributes.length]
key = @attributes[i].name
value = @attributes[i].value
if key.startsWith('on-')
if !value?
@$.off(key.substr(3))
else if _.isFunction(value)
@$.on(key.substr(3), value)
else if _.isString(value)
if @[value]? and _.isFunction(@[value])
@$.on(key.substr(3), @[value])
else
@$.on(key.substr(3), new Function(['event'], value))
_prepare: ->
@_dom()
if @_el_style?
@shadowRoot.appendChild(@_el_style)
if @_el?
@shadowRoot.appendChild(@_el)
@_propertiesToAttributes()
attachedCallback: ->
@_propertiesToAttributes()
@_attributesToProperties()
if @ready?
@ready()
@isReady = true
@_removeEvents()
@_bindInputs()
@_attributesToEvents()
@_createEvents()
@_createIds()
for k in @_properties
@_observeProperty(k)
@redraw()
if @attached?
@attached()
# @refresh()
@isAttached = true
detachedCallback: ->
@_removeEvents()
for e in @_observers
e.close()
Kaffa.observers.splice(Kaffa.observers.indexOf(e), 1)
@_observers = []
if @detached?
@detached()
@isAttached = false
attributeChangedCallback: (name, oldValue, newValue) ->
# console.log "attributeChanged:", "#{@tagName.toLowerCase()}#{if !_.isEmpty(@id) then '#' + @id else ''}#{if !_.isEmpty(@className) then '.' + @className else ''}", name, oldValue, '->', newValue
if @isAttached
@refresh()
_dom: ->
return if hazeling()
try
i = parseInt(@textContent)
catch
i = NaN
if !_.isNaN(i)
content = contents[i]
else
content = null
st = '<style>' + ccss.compile(_getStyle(@, @__proto__)) + '</style>'
vs = fromHTML(st)
if !@_vdom_style?
@_el_style = create(vs)
else
patches = diff(@_vdom_style, vs);
@_el_style = patch(@_el_style, patches);
@_vdom_style = vs;
if @noTemplate == false
s = _getTemplate(@, @__proto__, content)
if _.isEmpty(s)
s = '<div></div>'
v = fromHTML(s)
if !@_vdom?
@_el = create(v)
else
patches = diff(@_vdom, v);
@_el = patch(@_el, patches);
@_vdom = v;
if @updated?
@updated()
redraw: ->
@_dom()
if _.contains(toRedraw, @)
_.remove(toRedraw, @)
# console.log "redraw", @
needsRedraw: ->
return _.contains(toRedraw, @)
refresh: ->
# console.log "refresh", @, toRedraw
if !_.contains(toRedraw, @)
toRedraw.push(@)
created: ->
ready: ->
attached: ->
detached: ->
updated: ->
for k of $.fn
if !(k in ['length', 'cash', 'init', 'extend']) and !BaseView.prototype[k]?
BaseView.prototype[k] = ( (fn) -> (args...) -> fn.call(@$, args...))($.fn[k])
module.exports.BaseView = BaseView
| 197383 | { $, toRedraw, redrawing, hazeling, ccss, css, vdom, contents, renderable, div } = require('./hazel.coffee')
{ fromHTML, create, diff, patch } = vdom
{ relative } = css
# Kaffa = require('../kaffa/dist/kaffa.js')
Kaffa = require('../kaffa/kaffa.coffee')
{ Class } = Kaffa
_isEvent = (name) ->
name.startsWith('@')
_getMixinKeys = (proto, name) ->
r = []
for k, v of proto
if k == name or k.startsWith(name + '.')
r.push(k)
return r
_getAttributes = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getAttributes(view, ps) else []
v = []
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].attributes? then proto[k].attributes else []
v = _.union(mv, v)
_.union(sv, v)
_getStyle = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getStyle(view, ps) else {}
v = {}
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].style? then proto[k].style.call(view) else {}
_.deepExtend(v, mv)
_.deepExtend({}, sv, v)
_getTemplate = (view, proto, args...) ->
# ps = proto._superclass
t = if proto.layout?.template? then proto.layout.template else null
# if ps?.layout?.template?
# ps.layout.template.call(view, t, args...)
# else if t?
if t?
t.call(view, args...)
else
(renderable -> span "").call(view, args...)
# string to type
_deserializeAttributeValue = (value) ->
if !value?
return false
try
i = parseFloat(value)
if !_.isNaN(i)
return i
catch e
return if value.toLowerCase() in ['true', 'false'] then value.toLowerCase() == 'true' else if value == '' then true else value
# type to string
_serializeAttributeValue = (value) ->
if !value?
return ""
if _.isBoolean(value)
return (if value then 'true' else 'false')
else if _.isString(value)
if value == ''
return 'false'
else
return value
else if value.toString?
return value.toString()
else
return ""
_setVProperties = (v) ->
if v.children?
_setVProperties(c) for c in v.children
if v.properties?
if !v.properties.attributes?
v.properties.attributes = {}
for key, value of v.properties
if !(key in ['dataset', 'id', 'class'])
v.properties.attributes[key] = value
delete v.properties[key]
BaseView = Class 'BaseView',
extends: HTMLElement
layout:
attributes: []
style: ->
':host':
position: relative
display: 'inline-block'
cursor: 'default'
createdCallback: ->
@_properties = []
for k, v of @
if k.startsWith('$') and !_.isFunction(v)
nk = k.substr(1)
@[nk] = @[k]
@_properties.push(nk)
root = @createShadowRoot()
@noTemplate = false
@isReady = false
@isAttached = false
@_vdom = null
@_vdom_style = null
@_observers = []
@$ = $(@)
@[0] = @
@length = 1
@cash = true
if @created?
@created()
@_prepare()
_bindInputs: ->
that = @
@$.find(':root /deep/ input').each((el) ->
el = $(el)
path = el.prop('bind')
if path? and _.valueForKeyPath(that, path)?
if !el.attr('type')?
el.attr('type', 'text')
switch el.attr('type').toLowerCase()
when 'checkbox'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
when 'radio'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
else
el.on('keyup', (e) ->
_.setValueForKeyPath(that, path, el[0].value)
)
el[0].value = _.valueForKeyPath(that, path)
)
_removeEvents: ->
@$.eachDeep((el) ->
$(el).off()
)
_createEvents: ->
for k, v of @__proto__
if _isEvent(k)
kk = k
k = k.substr(1)
p = k.split(' ')
if p.length > 1
eventType = _.first(p)
selector = _.rest(p).join(' ')
els = @$.find(':root /deep/ ' + selector.trim())
else
eventType = k.trim()
selector = null
els = @$
if v?
els.on(eventType, v)
else
els.off(eventType)
_observeProperty: (name) ->
o = Kaffa.observe(@, name, ((args) -> @refresh()))
o._el = @
@_observers.push(o)
_createIds: ->
that = @
@$.eachDeep((el) ->
if !_.isEmpty(el.id)
that[_.camelize(el.id) + '$'] = el
)
_propertiesToAttributes: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @[key]?
@setAttribute(key, _serializeAttributeValue(@[key]))
@_observeProperty(key)
_attributesToProperties: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @hasAttribute(key)
# if !@hasAttribute(key)
# @setAttribute(key, _serializeAttributeValue(@[key]))
if !@[key]?
# @[key] = null
@[key] = _deserializeAttributeValue(@getAttribute(key))
@_observeProperty(key)
_attributesToEvents: ->
for i in [0...@attributes.length]
key = @attributes[i].name
value = @attributes[i].value
if key.startsWith('<KEY>')
if !value?
@$.off(key.substr(3))
else if _.isFunction(value)
@$.on(key.substr(3), value)
else if _.isString(value)
if @[value]? and _.isFunction(@[value])
@$.on(key.substr(3), @[value])
else
@$.on(key.substr(3), new Function(['event'], value))
_prepare: ->
@_dom()
if @_el_style?
@shadowRoot.appendChild(@_el_style)
if @_el?
@shadowRoot.appendChild(@_el)
@_propertiesToAttributes()
attachedCallback: ->
@_propertiesToAttributes()
@_attributesToProperties()
if @ready?
@ready()
@isReady = true
@_removeEvents()
@_bindInputs()
@_attributesToEvents()
@_createEvents()
@_createIds()
for k in @_properties
@_observeProperty(k)
@redraw()
if @attached?
@attached()
# @refresh()
@isAttached = true
detachedCallback: ->
@_removeEvents()
for e in @_observers
e.close()
Kaffa.observers.splice(Kaffa.observers.indexOf(e), 1)
@_observers = []
if @detached?
@detached()
@isAttached = false
attributeChangedCallback: (name, oldValue, newValue) ->
# console.log "attributeChanged:", "#{@tagName.toLowerCase()}#{if !_.isEmpty(@id) then '#' + @id else ''}#{if !_.isEmpty(@className) then '.' + @className else ''}", name, oldValue, '->', newValue
if @isAttached
@refresh()
_dom: ->
return if hazeling()
try
i = parseInt(@textContent)
catch
i = NaN
if !_.isNaN(i)
content = contents[i]
else
content = null
st = '<style>' + ccss.compile(_getStyle(@, @__proto__)) + '</style>'
vs = fromHTML(st)
if !@_vdom_style?
@_el_style = create(vs)
else
patches = diff(@_vdom_style, vs);
@_el_style = patch(@_el_style, patches);
@_vdom_style = vs;
if @noTemplate == false
s = _getTemplate(@, @__proto__, content)
if _.isEmpty(s)
s = '<div></div>'
v = fromHTML(s)
if !@_vdom?
@_el = create(v)
else
patches = diff(@_vdom, v);
@_el = patch(@_el, patches);
@_vdom = v;
if @updated?
@updated()
redraw: ->
@_dom()
if _.contains(toRedraw, @)
_.remove(toRedraw, @)
# console.log "redraw", @
needsRedraw: ->
return _.contains(toRedraw, @)
refresh: ->
# console.log "refresh", @, toRedraw
if !_.contains(toRedraw, @)
toRedraw.push(@)
created: ->
ready: ->
attached: ->
detached: ->
updated: ->
for k of $.fn
if !(k in ['length', 'cash', 'init', 'extend']) and !BaseView.prototype[k]?
BaseView.prototype[k] = ( (fn) -> (args...) -> fn.call(@$, args...))($.fn[k])
module.exports.BaseView = BaseView
| true | { $, toRedraw, redrawing, hazeling, ccss, css, vdom, contents, renderable, div } = require('./hazel.coffee')
{ fromHTML, create, diff, patch } = vdom
{ relative } = css
# Kaffa = require('../kaffa/dist/kaffa.js')
Kaffa = require('../kaffa/kaffa.coffee')
{ Class } = Kaffa
_isEvent = (name) ->
name.startsWith('@')
_getMixinKeys = (proto, name) ->
r = []
for k, v of proto
if k == name or k.startsWith(name + '.')
r.push(k)
return r
_getAttributes = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getAttributes(view, ps) else []
v = []
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].attributes? then proto[k].attributes else []
v = _.union(mv, v)
_.union(sv, v)
_getStyle = (view, proto) ->
ps = proto._superclass
sv = if ps? then _getStyle(view, ps) else {}
v = {}
for k in _getMixinKeys(proto, 'layout')
mv = if proto[k].style? then proto[k].style.call(view) else {}
_.deepExtend(v, mv)
_.deepExtend({}, sv, v)
_getTemplate = (view, proto, args...) ->
# ps = proto._superclass
t = if proto.layout?.template? then proto.layout.template else null
# if ps?.layout?.template?
# ps.layout.template.call(view, t, args...)
# else if t?
if t?
t.call(view, args...)
else
(renderable -> span "").call(view, args...)
# string to type
_deserializeAttributeValue = (value) ->
if !value?
return false
try
i = parseFloat(value)
if !_.isNaN(i)
return i
catch e
return if value.toLowerCase() in ['true', 'false'] then value.toLowerCase() == 'true' else if value == '' then true else value
# type to string
_serializeAttributeValue = (value) ->
if !value?
return ""
if _.isBoolean(value)
return (if value then 'true' else 'false')
else if _.isString(value)
if value == ''
return 'false'
else
return value
else if value.toString?
return value.toString()
else
return ""
_setVProperties = (v) ->
if v.children?
_setVProperties(c) for c in v.children
if v.properties?
if !v.properties.attributes?
v.properties.attributes = {}
for key, value of v.properties
if !(key in ['dataset', 'id', 'class'])
v.properties.attributes[key] = value
delete v.properties[key]
BaseView = Class 'BaseView',
extends: HTMLElement
layout:
attributes: []
style: ->
':host':
position: relative
display: 'inline-block'
cursor: 'default'
createdCallback: ->
@_properties = []
for k, v of @
if k.startsWith('$') and !_.isFunction(v)
nk = k.substr(1)
@[nk] = @[k]
@_properties.push(nk)
root = @createShadowRoot()
@noTemplate = false
@isReady = false
@isAttached = false
@_vdom = null
@_vdom_style = null
@_observers = []
@$ = $(@)
@[0] = @
@length = 1
@cash = true
if @created?
@created()
@_prepare()
_bindInputs: ->
that = @
@$.find(':root /deep/ input').each((el) ->
el = $(el)
path = el.prop('bind')
if path? and _.valueForKeyPath(that, path)?
if !el.attr('type')?
el.attr('type', 'text')
switch el.attr('type').toLowerCase()
when 'checkbox'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
when 'radio'
el.on('change', (e) ->
_.setValueForKeyPath(that, path, el[0].checked)
)
el[0].checked = _.valueForKeyPath(that, path)
else
el.on('keyup', (e) ->
_.setValueForKeyPath(that, path, el[0].value)
)
el[0].value = _.valueForKeyPath(that, path)
)
_removeEvents: ->
@$.eachDeep((el) ->
$(el).off()
)
_createEvents: ->
for k, v of @__proto__
if _isEvent(k)
kk = k
k = k.substr(1)
p = k.split(' ')
if p.length > 1
eventType = _.first(p)
selector = _.rest(p).join(' ')
els = @$.find(':root /deep/ ' + selector.trim())
else
eventType = k.trim()
selector = null
els = @$
if v?
els.on(eventType, v)
else
els.off(eventType)
_observeProperty: (name) ->
o = Kaffa.observe(@, name, ((args) -> @refresh()))
o._el = @
@_observers.push(o)
_createIds: ->
that = @
@$.eachDeep((el) ->
if !_.isEmpty(el.id)
that[_.camelize(el.id) + '$'] = el
)
_propertiesToAttributes: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @[key]?
@setAttribute(key, _serializeAttributeValue(@[key]))
@_observeProperty(key)
_attributesToProperties: ->
for key in _getAttributes(@, @__proto__)
if !key.startsWith('on-') and @hasAttribute(key)
# if !@hasAttribute(key)
# @setAttribute(key, _serializeAttributeValue(@[key]))
if !@[key]?
# @[key] = null
@[key] = _deserializeAttributeValue(@getAttribute(key))
@_observeProperty(key)
_attributesToEvents: ->
for i in [0...@attributes.length]
key = @attributes[i].name
value = @attributes[i].value
if key.startsWith('PI:KEY:<KEY>END_PI')
if !value?
@$.off(key.substr(3))
else if _.isFunction(value)
@$.on(key.substr(3), value)
else if _.isString(value)
if @[value]? and _.isFunction(@[value])
@$.on(key.substr(3), @[value])
else
@$.on(key.substr(3), new Function(['event'], value))
_prepare: ->
@_dom()
if @_el_style?
@shadowRoot.appendChild(@_el_style)
if @_el?
@shadowRoot.appendChild(@_el)
@_propertiesToAttributes()
attachedCallback: ->
@_propertiesToAttributes()
@_attributesToProperties()
if @ready?
@ready()
@isReady = true
@_removeEvents()
@_bindInputs()
@_attributesToEvents()
@_createEvents()
@_createIds()
for k in @_properties
@_observeProperty(k)
@redraw()
if @attached?
@attached()
# @refresh()
@isAttached = true
detachedCallback: ->
@_removeEvents()
for e in @_observers
e.close()
Kaffa.observers.splice(Kaffa.observers.indexOf(e), 1)
@_observers = []
if @detached?
@detached()
@isAttached = false
attributeChangedCallback: (name, oldValue, newValue) ->
# console.log "attributeChanged:", "#{@tagName.toLowerCase()}#{if !_.isEmpty(@id) then '#' + @id else ''}#{if !_.isEmpty(@className) then '.' + @className else ''}", name, oldValue, '->', newValue
if @isAttached
@refresh()
_dom: ->
return if hazeling()
try
i = parseInt(@textContent)
catch
i = NaN
if !_.isNaN(i)
content = contents[i]
else
content = null
st = '<style>' + ccss.compile(_getStyle(@, @__proto__)) + '</style>'
vs = fromHTML(st)
if !@_vdom_style?
@_el_style = create(vs)
else
patches = diff(@_vdom_style, vs);
@_el_style = patch(@_el_style, patches);
@_vdom_style = vs;
if @noTemplate == false
s = _getTemplate(@, @__proto__, content)
if _.isEmpty(s)
s = '<div></div>'
v = fromHTML(s)
if !@_vdom?
@_el = create(v)
else
patches = diff(@_vdom, v);
@_el = patch(@_el, patches);
@_vdom = v;
if @updated?
@updated()
redraw: ->
@_dom()
if _.contains(toRedraw, @)
_.remove(toRedraw, @)
# console.log "redraw", @
needsRedraw: ->
return _.contains(toRedraw, @)
refresh: ->
# console.log "refresh", @, toRedraw
if !_.contains(toRedraw, @)
toRedraw.push(@)
created: ->
ready: ->
attached: ->
detached: ->
updated: ->
for k of $.fn
if !(k in ['length', 'cash', 'init', 'extend']) and !BaseView.prototype[k]?
BaseView.prototype[k] = ( (fn) -> (args...) -> fn.call(@$, args...))($.fn[k])
module.exports.BaseView = BaseView
|
[
{
"context": " continue if !year || !month || !val\n key = \"#{year}/#{\"0#{month}\".substr(-2)}\"\n @dataset[key] = val\n min = key if min ",
"end": 2160,
"score": 0.92247074842453,
"start": 2125,
"tag": "KEY",
"value": "\"#{year}/#{\"0#{month}\".substr(-2)}\""
}
] | src/app.coffee | sunny4381/economy-trend-index | 0 | class SearchForm
constructor: (el)->
@el = el
$(el).on 'submit', (e) =>
e.preventDefault()
@submit()
$("#{@el} .input-month").datepicker({
format: 'yyyy/mm'
viewMode: 'months'
minViewMode: 'months'
language: 'ja'
})
default_type = $("#{@el} a.economic-index-type:first")
@current_type = default_type.text()
@current_url = default_type.attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
$("#{@el} a.economic-index-type").on "click", (e) =>
e.preventDefault()
@current_type = $(e.target).text()
@current_url = $(e.target).attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
@submit()
startAt: ->
$("#{@el} .start-at").val()
endAt: ->
$("#{@el} .end-at").val()
update: (start_at, end_at) ->
$("#{@el} .start-at").datepicker('update', start_at)
$("#{@el} .end-at").datepicker('update', end_at)
type: ->
@current_type
url: ->
@current_url
onSubmit: (func) ->
@handlers = [] unless @handlers
@handlers.push(func)
submit: ->
return if !@handlers
for handler in @handlers
handler()
class EconomyIndexChart
constructor: (el, searchForm) ->
@el = el
@searchForm = searchForm
@searchForm.onSubmit(@search)
@ctx = $("#{@el} .economy-index-chart")[0].getContext("2d")
reloadData: ->
url = @searchForm.url()
return if url == @current_url
@current_url = url
$.ajax
url: @searchForm.url()
async: false
beforeSend: (xhr) =>
xhr.overrideMimeType('text/plain; charset=Shift_JIS')
success: (data) =>
csv = $.csv.toArrays(data)
@updateDataset(csv)
error: (xhr, status, error) =>
$(@el).html("data loading error: #{status}")
updateDataset: (csv) =>
@dataset = {}
min = '9999/99'
max = '0000/00'
for row, i in csv
continue if !row[3]
year = parseInt(row[1])
month = parseInt(row[2].replace(/月/, ''))
val = parseFloat(row[3])
continue if !year || !month || !val
key = "#{year}/#{"0#{month}".substr(-2)}"
@dataset[key] = val
min = key if min > key
max = key if max < key
@min = min unless @min
@max = max unless @max
@updateSearchForm()
updateSearchForm: ->
start_at = @searchForm.startAt() || @min
end_at = @searchForm.endAt() || @max
@searchForm.update(start_at, end_at)
drawChart: ->
labels = []
points = []
for key, val of @dataset
continue if @min && key < @min
continue if @max && key > @max
labels.push(key)
points.push(val)
options =
responsive: true
chartDef =
labels: labels
datasets: [
{
fillColor: "rgba(220,220,220,0.2)"
strokeColor: "rgba(220,220,220,1)"
pointColor: "rgba(220,220,220,1)"
pointStrokeColor: "#fff"
pointHighlightFill: "#fff"
pointHighlightStroke: "rgba(220,220,220,1)"
data: points
}
]
new Chart(@ctx).Line(chartDef, options)
search: =>
@min = @searchForm.startAt()
@max = @searchForm.endAt()
@reloadData()
@drawChart()
$ ->
searchForm = new SearchForm('#search')
economyIndexChart = new EconomyIndexChart('#content', searchForm)
searchForm.submit()
| 95533 | class SearchForm
constructor: (el)->
@el = el
$(el).on 'submit', (e) =>
e.preventDefault()
@submit()
$("#{@el} .input-month").datepicker({
format: 'yyyy/mm'
viewMode: 'months'
minViewMode: 'months'
language: 'ja'
})
default_type = $("#{@el} a.economic-index-type:first")
@current_type = default_type.text()
@current_url = default_type.attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
$("#{@el} a.economic-index-type").on "click", (e) =>
e.preventDefault()
@current_type = $(e.target).text()
@current_url = $(e.target).attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
@submit()
startAt: ->
$("#{@el} .start-at").val()
endAt: ->
$("#{@el} .end-at").val()
update: (start_at, end_at) ->
$("#{@el} .start-at").datepicker('update', start_at)
$("#{@el} .end-at").datepicker('update', end_at)
type: ->
@current_type
url: ->
@current_url
onSubmit: (func) ->
@handlers = [] unless @handlers
@handlers.push(func)
submit: ->
return if !@handlers
for handler in @handlers
handler()
class EconomyIndexChart
constructor: (el, searchForm) ->
@el = el
@searchForm = searchForm
@searchForm.onSubmit(@search)
@ctx = $("#{@el} .economy-index-chart")[0].getContext("2d")
reloadData: ->
url = @searchForm.url()
return if url == @current_url
@current_url = url
$.ajax
url: @searchForm.url()
async: false
beforeSend: (xhr) =>
xhr.overrideMimeType('text/plain; charset=Shift_JIS')
success: (data) =>
csv = $.csv.toArrays(data)
@updateDataset(csv)
error: (xhr, status, error) =>
$(@el).html("data loading error: #{status}")
updateDataset: (csv) =>
@dataset = {}
min = '9999/99'
max = '0000/00'
for row, i in csv
continue if !row[3]
year = parseInt(row[1])
month = parseInt(row[2].replace(/月/, ''))
val = parseFloat(row[3])
continue if !year || !month || !val
key = <KEY>
@dataset[key] = val
min = key if min > key
max = key if max < key
@min = min unless @min
@max = max unless @max
@updateSearchForm()
updateSearchForm: ->
start_at = @searchForm.startAt() || @min
end_at = @searchForm.endAt() || @max
@searchForm.update(start_at, end_at)
drawChart: ->
labels = []
points = []
for key, val of @dataset
continue if @min && key < @min
continue if @max && key > @max
labels.push(key)
points.push(val)
options =
responsive: true
chartDef =
labels: labels
datasets: [
{
fillColor: "rgba(220,220,220,0.2)"
strokeColor: "rgba(220,220,220,1)"
pointColor: "rgba(220,220,220,1)"
pointStrokeColor: "#fff"
pointHighlightFill: "#fff"
pointHighlightStroke: "rgba(220,220,220,1)"
data: points
}
]
new Chart(@ctx).Line(chartDef, options)
search: =>
@min = @searchForm.startAt()
@max = @searchForm.endAt()
@reloadData()
@drawChart()
$ ->
searchForm = new SearchForm('#search')
economyIndexChart = new EconomyIndexChart('#content', searchForm)
searchForm.submit()
| true | class SearchForm
constructor: (el)->
@el = el
$(el).on 'submit', (e) =>
e.preventDefault()
@submit()
$("#{@el} .input-month").datepicker({
format: 'yyyy/mm'
viewMode: 'months'
minViewMode: 'months'
language: 'ja'
})
default_type = $("#{@el} a.economic-index-type:first")
@current_type = default_type.text()
@current_url = default_type.attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
$("#{@el} a.economic-index-type").on "click", (e) =>
e.preventDefault()
@current_type = $(e.target).text()
@current_url = $(e.target).attr('href')
$("#{@el} span.economic-index-type__label").text(@current_type)
@submit()
startAt: ->
$("#{@el} .start-at").val()
endAt: ->
$("#{@el} .end-at").val()
update: (start_at, end_at) ->
$("#{@el} .start-at").datepicker('update', start_at)
$("#{@el} .end-at").datepicker('update', end_at)
type: ->
@current_type
url: ->
@current_url
onSubmit: (func) ->
@handlers = [] unless @handlers
@handlers.push(func)
submit: ->
return if !@handlers
for handler in @handlers
handler()
class EconomyIndexChart
constructor: (el, searchForm) ->
@el = el
@searchForm = searchForm
@searchForm.onSubmit(@search)
@ctx = $("#{@el} .economy-index-chart")[0].getContext("2d")
reloadData: ->
url = @searchForm.url()
return if url == @current_url
@current_url = url
$.ajax
url: @searchForm.url()
async: false
beforeSend: (xhr) =>
xhr.overrideMimeType('text/plain; charset=Shift_JIS')
success: (data) =>
csv = $.csv.toArrays(data)
@updateDataset(csv)
error: (xhr, status, error) =>
$(@el).html("data loading error: #{status}")
updateDataset: (csv) =>
@dataset = {}
min = '9999/99'
max = '0000/00'
for row, i in csv
continue if !row[3]
year = parseInt(row[1])
month = parseInt(row[2].replace(/月/, ''))
val = parseFloat(row[3])
continue if !year || !month || !val
key = PI:KEY:<KEY>END_PI
@dataset[key] = val
min = key if min > key
max = key if max < key
@min = min unless @min
@max = max unless @max
@updateSearchForm()
updateSearchForm: ->
start_at = @searchForm.startAt() || @min
end_at = @searchForm.endAt() || @max
@searchForm.update(start_at, end_at)
drawChart: ->
labels = []
points = []
for key, val of @dataset
continue if @min && key < @min
continue if @max && key > @max
labels.push(key)
points.push(val)
options =
responsive: true
chartDef =
labels: labels
datasets: [
{
fillColor: "rgba(220,220,220,0.2)"
strokeColor: "rgba(220,220,220,1)"
pointColor: "rgba(220,220,220,1)"
pointStrokeColor: "#fff"
pointHighlightFill: "#fff"
pointHighlightStroke: "rgba(220,220,220,1)"
data: points
}
]
new Chart(@ctx).Line(chartDef, options)
search: =>
@min = @searchForm.startAt()
@max = @searchForm.endAt()
@reloadData()
@drawChart()
$ ->
searchForm = new SearchForm('#search')
economyIndexChart = new EconomyIndexChart('#content', searchForm)
searchForm.submit()
|
[
{
"context": "orgName: 'Arizona Geological Survey'\n orgEmail: 'metadata@usgin.org'\n defaultMetadataContact:\n OrganizationName: ",
"end": 118,
"score": 0.9999201893806458,
"start": 100,
"tag": "EMAIL",
"value": "metadata@usgin.org"
},
{
"context": "mation:\n Phone: '520-770-3500'\n email: 'metadata@azgs.az.gov'\n Address:\n Street: '416 W. Congress ",
"end": 282,
"score": 0.999923586845398,
"start": 262,
"tag": "EMAIL",
"value": "metadata@azgs.az.gov"
}
] | src/organization-config.coffee | usgin/metadata-server | 0 | module.exports =
orgUrl: 'http://azgs.az.gov'
orgName: 'Arizona Geological Survey'
orgEmail: 'metadata@usgin.org'
defaultMetadataContact:
OrganizationName: 'Arizona Geological Survey'
ContactInformation:
Phone: '520-770-3500'
email: 'metadata@azgs.az.gov'
Address:
Street: '416 W. Congress St. Ste. 100'
City: 'Tucson'
State: 'Arizona'
Zip: '85701'
| 18699 | module.exports =
orgUrl: 'http://azgs.az.gov'
orgName: 'Arizona Geological Survey'
orgEmail: '<EMAIL>'
defaultMetadataContact:
OrganizationName: 'Arizona Geological Survey'
ContactInformation:
Phone: '520-770-3500'
email: '<EMAIL>'
Address:
Street: '416 W. Congress St. Ste. 100'
City: 'Tucson'
State: 'Arizona'
Zip: '85701'
| true | module.exports =
orgUrl: 'http://azgs.az.gov'
orgName: 'Arizona Geological Survey'
orgEmail: 'PI:EMAIL:<EMAIL>END_PI'
defaultMetadataContact:
OrganizationName: 'Arizona Geological Survey'
ContactInformation:
Phone: '520-770-3500'
email: 'PI:EMAIL:<EMAIL>END_PI'
Address:
Street: '416 W. Congress St. Ste. 100'
City: 'Tucson'
State: 'Arizona'
Zip: '85701'
|
[
{
"context": "rses any AP data\n# that they spit out \n# Author: Robbie Saunders http://eibbors.com/[/p/reavetard]\n# =============",
"end": 180,
"score": 0.9998588562011719,
"start": 165,
"tag": "NAME",
"value": "Robbie Saunders"
}
] | src/wash.coffee | eibbors/reavetard | 1 | # Reavetard - Reaver WPS (+Wash) extension scripts
# wash.coffee :: Module that spawns wash child processes and parses any AP data
# that they spit out
# Author: Robbie Saunders http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
events = require 'events'
{spawn, exec} = require 'child_process'
# Arguments supported by the wash command in the format:
# key: [shortFlag, fullFlag, description, includesValue]
WASH_ARGS =
interface: ['-i', '--interface=<iface>', 'Interface to capture packets on', true]
file: ['-f', '--file [FILE1 FILE2 FILE3 ...]', 'Read packets from capture files', true]
channel: ['-c', '--channel=<num>', 'Channel to listen on [auto]', true]
outFile: ['-o', '--out-file=<file>', 'Write data to file', true]
probes: ['-n', '--probes=<num>', 'Maximum number of probes to send to each AP in scan mode [15]', true]
daemonize: ['-D', '--daemonize', 'Daemonize wash', false]
ignoreFCS: ['-C', '--ignore-fcs', 'Ignore frame checksum errors', false]
use5ghz: ['-5', '--5ghz', 'Use 5GHz 802.11 channels', false]
scan: ['-s', '--scan', 'Use scan mode', false]
survey: ['-u', '--survey', 'Use survey mode [default]', false]
help: ['-h', '--help', 'Show help', false]
### Child process wrapper for spawned wash processes ###
class Wash extends events.EventEmitter
constructor: (options) ->
for own key, value of options
@[key] = value
@interface ?= 'mon0'
@scan ?= true
@proc = null
# Setting a duration will switch to exec vs. real time parsing (spawn)
start: (args, duration=0) ->
# create an arguments array, if not provided by caller
if not args?
args = []
for key, value of WASH_ARGS when @[key]?
[flag, option, desc, inclVal] = value
if @[key] or inclVal then args.push flag
if inclVal then args.push @[key]
# kill the existing process, then spawn a new one and bind to the data event
@stop()
if duration > 0
@proc = undefined
exec 'wash', args, (output) =>
for line in output.split('\n')
@process line
else
@proc = spawn 'wash', args
@proc.stdout.on 'data', @process
@proc.stderr.on 'data', @process
stop: () ->
if @proc
@proc.kill()
@emit 'exit', true
# parse and emit any discovered stations
process: (data) =>
ap = ///
(\w\w(:\w\w)+)\s+ # bssid
(\d+)\s+ # channel
(-\d+)\s+ #rssi
(\d\.\d)\s+ # wps version
(Yes|No)\s+ # wps locked?
(.*) # essid
///.exec(data.toString())
if ap then @emit 'ap',
bssid: ap[1]
channel: ap[3]
rssi: ap[4]
version: ap[5]
locked: (ap[6] is 'Yes')
essid: ap[7]
module.exports = Wash
| 29529 | # Reavetard - Reaver WPS (+Wash) extension scripts
# wash.coffee :: Module that spawns wash child processes and parses any AP data
# that they spit out
# Author: <NAME> http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
events = require 'events'
{spawn, exec} = require 'child_process'
# Arguments supported by the wash command in the format:
# key: [shortFlag, fullFlag, description, includesValue]
WASH_ARGS =
interface: ['-i', '--interface=<iface>', 'Interface to capture packets on', true]
file: ['-f', '--file [FILE1 FILE2 FILE3 ...]', 'Read packets from capture files', true]
channel: ['-c', '--channel=<num>', 'Channel to listen on [auto]', true]
outFile: ['-o', '--out-file=<file>', 'Write data to file', true]
probes: ['-n', '--probes=<num>', 'Maximum number of probes to send to each AP in scan mode [15]', true]
daemonize: ['-D', '--daemonize', 'Daemonize wash', false]
ignoreFCS: ['-C', '--ignore-fcs', 'Ignore frame checksum errors', false]
use5ghz: ['-5', '--5ghz', 'Use 5GHz 802.11 channels', false]
scan: ['-s', '--scan', 'Use scan mode', false]
survey: ['-u', '--survey', 'Use survey mode [default]', false]
help: ['-h', '--help', 'Show help', false]
### Child process wrapper for spawned wash processes ###
class Wash extends events.EventEmitter
constructor: (options) ->
for own key, value of options
@[key] = value
@interface ?= 'mon0'
@scan ?= true
@proc = null
# Setting a duration will switch to exec vs. real time parsing (spawn)
start: (args, duration=0) ->
# create an arguments array, if not provided by caller
if not args?
args = []
for key, value of WASH_ARGS when @[key]?
[flag, option, desc, inclVal] = value
if @[key] or inclVal then args.push flag
if inclVal then args.push @[key]
# kill the existing process, then spawn a new one and bind to the data event
@stop()
if duration > 0
@proc = undefined
exec 'wash', args, (output) =>
for line in output.split('\n')
@process line
else
@proc = spawn 'wash', args
@proc.stdout.on 'data', @process
@proc.stderr.on 'data', @process
stop: () ->
if @proc
@proc.kill()
@emit 'exit', true
# parse and emit any discovered stations
process: (data) =>
ap = ///
(\w\w(:\w\w)+)\s+ # bssid
(\d+)\s+ # channel
(-\d+)\s+ #rssi
(\d\.\d)\s+ # wps version
(Yes|No)\s+ # wps locked?
(.*) # essid
///.exec(data.toString())
if ap then @emit 'ap',
bssid: ap[1]
channel: ap[3]
rssi: ap[4]
version: ap[5]
locked: (ap[6] is 'Yes')
essid: ap[7]
module.exports = Wash
| true | # Reavetard - Reaver WPS (+Wash) extension scripts
# wash.coffee :: Module that spawns wash child processes and parses any AP data
# that they spit out
# Author: PI:NAME:<NAME>END_PI http://eibbors.com/[/p/reavetard]
# ==============================================================================
# Module dependencies
# -------------------------
events = require 'events'
{spawn, exec} = require 'child_process'
# Arguments supported by the wash command in the format:
# key: [shortFlag, fullFlag, description, includesValue]
WASH_ARGS =
interface: ['-i', '--interface=<iface>', 'Interface to capture packets on', true]
file: ['-f', '--file [FILE1 FILE2 FILE3 ...]', 'Read packets from capture files', true]
channel: ['-c', '--channel=<num>', 'Channel to listen on [auto]', true]
outFile: ['-o', '--out-file=<file>', 'Write data to file', true]
probes: ['-n', '--probes=<num>', 'Maximum number of probes to send to each AP in scan mode [15]', true]
daemonize: ['-D', '--daemonize', 'Daemonize wash', false]
ignoreFCS: ['-C', '--ignore-fcs', 'Ignore frame checksum errors', false]
use5ghz: ['-5', '--5ghz', 'Use 5GHz 802.11 channels', false]
scan: ['-s', '--scan', 'Use scan mode', false]
survey: ['-u', '--survey', 'Use survey mode [default]', false]
help: ['-h', '--help', 'Show help', false]
### Child process wrapper for spawned wash processes ###
class Wash extends events.EventEmitter
constructor: (options) ->
for own key, value of options
@[key] = value
@interface ?= 'mon0'
@scan ?= true
@proc = null
# Setting a duration will switch to exec vs. real time parsing (spawn)
start: (args, duration=0) ->
# create an arguments array, if not provided by caller
if not args?
args = []
for key, value of WASH_ARGS when @[key]?
[flag, option, desc, inclVal] = value
if @[key] or inclVal then args.push flag
if inclVal then args.push @[key]
# kill the existing process, then spawn a new one and bind to the data event
@stop()
if duration > 0
@proc = undefined
exec 'wash', args, (output) =>
for line in output.split('\n')
@process line
else
@proc = spawn 'wash', args
@proc.stdout.on 'data', @process
@proc.stderr.on 'data', @process
stop: () ->
if @proc
@proc.kill()
@emit 'exit', true
# parse and emit any discovered stations
process: (data) =>
ap = ///
(\w\w(:\w\w)+)\s+ # bssid
(\d+)\s+ # channel
(-\d+)\s+ #rssi
(\d\.\d)\s+ # wps version
(Yes|No)\s+ # wps locked?
(.*) # essid
///.exec(data.toString())
if ap then @emit 'ap',
bssid: ap[1]
channel: ap[3]
rssi: ap[4]
version: ap[5]
locked: (ap[6] is 'Yes')
essid: ap[7]
module.exports = Wash
|
[
{
"context": " indicatorGutter)\n\n\nindicatorData = [\n\t{\n\t\tname: \"Ounces\",\n\t},\n\t{\n\t\tname: \"Cups\",\n\t},\n]\n\n\n\n# Design Compon",
"end": 400,
"score": 0.9994551539421082,
"start": 394,
"tag": "NAME",
"value": "Ounces"
},
{
"context": "atorData = [\n\t{\n\t\tname: \"Ounces\",\n\t},\n\t{\n\t\tname: \"Cups\",\n\t},\n]\n\n\n\n# Design Component - Positioning -----",
"end": 423,
"score": 0.9993981719017029,
"start": 419,
"tag": "NAME",
"value": "Cups"
}
] | src/public/framer.framer/app.coffee | jmanhart/personal-portfolio-17 | 0 |
# Style Variables ----------------------------------
spacer = 20
blue = "#383838"
lightBlue = "#BBB9B9"
otherBlue = "#DBDBDB"
# Data ---------------------------------------------
actionAmount = 0
gutter = 10
indicators = []
indicatorChevOffset = 50
indicatorGutter = 10
indicatorSize = 8
indicatorContWidth = (2 * indicatorSize) + ((2 - 1) * indicatorGutter)
indicatorData = [
{
name: "Ounces",
},
{
name: "Cups",
},
]
# Design Component - Positioning -------------------
indicatorCont.x = Align.center()
indicatorCont.y = Align.center()
# shape.x = Align.center()
# shape.y = Align.top(0)
minusBtn.y = Align.center()
minusBtn.x = Align.center(0)
addBtn.y = Align.center()
addBtn.x = Align.center(0)
actionBtnLabelCancel.y = Align.bottom(spacer*2)
fill.backgroundColor = otherBlue
fillTop.backgroundColor = otherBlue
# Code Components ----------------------------------
# Text
# Create PageComponent
pageScroller = new PageComponent
parent: indicatorCont
x: Align.center()
y: Align.center()
width: indicatorCont.width
height: indicatorCont.height / 2
scrollVertical: false
clip: true
backgroundColor: null
dotIndicatorCont = new Layer
width: indicatorContWidth
height: 20
x: Align.center(3)
y: Align.bottom(-spacer/1.35)
backgroundColor: null
parent: indicatorCont
labels = []
for i in [0...2]
page = new Layer
parent: pageScroller.content
width: pageScroller.width
height: pageScroller.height
x: (pageScroller.width + gutter) * i
backgroundColor: null
actionLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.top(-spacer)
fontSize: 72
text: actionAmount
color: blue
unitLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.bottom(-spacer/2)
fontSize: 14
text: indicatorData[i].name
color: blue
labels.push actionLabel
# creating the indicator
indicator = new Layer
parent: dotIndicatorCont
size: indicatorSize
borderRadius: indicatorCont.height
x: (indicatorSize + indicatorGutter) * i
y: Align.center()
name: i
backgroundColor: lightBlue
# creating states for the indicator
indicator.states =
active:
backgroundColor: blue
inactive:
backgroundColor: lightBlue
#pushing indicators into array
indicators.push(indicator)
# Component - States -------------------------------
# shape.states =
# active:
# x: 0
minusBtn.states =
active:
x: Align.center(-indicatorCont.width/1.25)
addBtn.states =
active:
x: Align.center(indicatorCont.width/1.25)
actionBtn.states =
active:
backgroundColor: 'white'
actionBtnLabel.states =
active:
y: Align.top(-spacer*2)
actionBtnLabelCancel.states =
active:
y: Align.center()
actionLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
unitLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
indicatorCont.states =
goalNotMet:
backgroundColor: "white"
goalMet:
backgroundColor: blue
indicatorFillCont.states =
goalNotMet:
opacity: 1
goalMet:
opacity: 0
fillTop.states =
shiftForward:
x: Align.center(10)
shiftBack:
x: Align.center(-10)
# Component - Animations -------------------------
addBtn.animationOptions =
curve: Spring(damping: .8)
time: 0.95
minusBtn.animationOptions = addBtn.animationOptions
actionBtnLabel.animationOptions = addBtn.animationOptions
actionBtnLabelCancel.animationOptions = addBtn.animationOptions
indicatorFillCont.animationOptions = addBtn.animationOptions
indicatorCont.animationOptions = addBtn.animationOptions
unitLabel.animationOptions = addBtn.animationOptions
actionLabel.animationOptions = addBtn.animationOptions
# Component - Interactions -----------------------
indicatorCont.onTap ->
minusBtn.stateCycle()
addBtn.stateCycle()
actionBtnLabel.stateCycle()
actionBtnLabelCancel.stateCycle()
actionBtn.stateCycle()
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Ounces
if current == 0
addBtn.onTap ->
actionAmount += 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
# Cups
if current == 1
addBtn.onTap ->
actionAmount += 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
print current
# addBtn.onTap ->
# actionAmount += 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# labels[0].x = Align.center()
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height > indicatorCont.height
# indicatorFillCont.stateCycle("goalMet")
# indicatorCont.stateCycle("goalMet")
# actionLabel.stateCycle("goalMet")
# unitLabel.stateCycle("goalMet")
#
# else
#
#
# minusBtn.onTap ->
# actionAmount -= 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height < indicatorCont.height
# indicatorFillCont.stateCycle("goalNotMet")
# indicatorCont.stateCycle("goalNotMet")
# actionLabel.stateCycle("goalNotMet")
# unitLabel.stateCycle("goalNotMet")
# else
# Component - Z-Index ----------------------------
addBtn.sendToBack()
minusBtn.sendToBack() | 38552 |
# Style Variables ----------------------------------
spacer = 20
blue = "#383838"
lightBlue = "#BBB9B9"
otherBlue = "#DBDBDB"
# Data ---------------------------------------------
actionAmount = 0
gutter = 10
indicators = []
indicatorChevOffset = 50
indicatorGutter = 10
indicatorSize = 8
indicatorContWidth = (2 * indicatorSize) + ((2 - 1) * indicatorGutter)
indicatorData = [
{
name: "<NAME>",
},
{
name: "<NAME>",
},
]
# Design Component - Positioning -------------------
indicatorCont.x = Align.center()
indicatorCont.y = Align.center()
# shape.x = Align.center()
# shape.y = Align.top(0)
minusBtn.y = Align.center()
minusBtn.x = Align.center(0)
addBtn.y = Align.center()
addBtn.x = Align.center(0)
actionBtnLabelCancel.y = Align.bottom(spacer*2)
fill.backgroundColor = otherBlue
fillTop.backgroundColor = otherBlue
# Code Components ----------------------------------
# Text
# Create PageComponent
pageScroller = new PageComponent
parent: indicatorCont
x: Align.center()
y: Align.center()
width: indicatorCont.width
height: indicatorCont.height / 2
scrollVertical: false
clip: true
backgroundColor: null
dotIndicatorCont = new Layer
width: indicatorContWidth
height: 20
x: Align.center(3)
y: Align.bottom(-spacer/1.35)
backgroundColor: null
parent: indicatorCont
labels = []
for i in [0...2]
page = new Layer
parent: pageScroller.content
width: pageScroller.width
height: pageScroller.height
x: (pageScroller.width + gutter) * i
backgroundColor: null
actionLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.top(-spacer)
fontSize: 72
text: actionAmount
color: blue
unitLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.bottom(-spacer/2)
fontSize: 14
text: indicatorData[i].name
color: blue
labels.push actionLabel
# creating the indicator
indicator = new Layer
parent: dotIndicatorCont
size: indicatorSize
borderRadius: indicatorCont.height
x: (indicatorSize + indicatorGutter) * i
y: Align.center()
name: i
backgroundColor: lightBlue
# creating states for the indicator
indicator.states =
active:
backgroundColor: blue
inactive:
backgroundColor: lightBlue
#pushing indicators into array
indicators.push(indicator)
# Component - States -------------------------------
# shape.states =
# active:
# x: 0
minusBtn.states =
active:
x: Align.center(-indicatorCont.width/1.25)
addBtn.states =
active:
x: Align.center(indicatorCont.width/1.25)
actionBtn.states =
active:
backgroundColor: 'white'
actionBtnLabel.states =
active:
y: Align.top(-spacer*2)
actionBtnLabelCancel.states =
active:
y: Align.center()
actionLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
unitLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
indicatorCont.states =
goalNotMet:
backgroundColor: "white"
goalMet:
backgroundColor: blue
indicatorFillCont.states =
goalNotMet:
opacity: 1
goalMet:
opacity: 0
fillTop.states =
shiftForward:
x: Align.center(10)
shiftBack:
x: Align.center(-10)
# Component - Animations -------------------------
addBtn.animationOptions =
curve: Spring(damping: .8)
time: 0.95
minusBtn.animationOptions = addBtn.animationOptions
actionBtnLabel.animationOptions = addBtn.animationOptions
actionBtnLabelCancel.animationOptions = addBtn.animationOptions
indicatorFillCont.animationOptions = addBtn.animationOptions
indicatorCont.animationOptions = addBtn.animationOptions
unitLabel.animationOptions = addBtn.animationOptions
actionLabel.animationOptions = addBtn.animationOptions
# Component - Interactions -----------------------
indicatorCont.onTap ->
minusBtn.stateCycle()
addBtn.stateCycle()
actionBtnLabel.stateCycle()
actionBtnLabelCancel.stateCycle()
actionBtn.stateCycle()
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Ounces
if current == 0
addBtn.onTap ->
actionAmount += 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
# Cups
if current == 1
addBtn.onTap ->
actionAmount += 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
print current
# addBtn.onTap ->
# actionAmount += 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# labels[0].x = Align.center()
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height > indicatorCont.height
# indicatorFillCont.stateCycle("goalMet")
# indicatorCont.stateCycle("goalMet")
# actionLabel.stateCycle("goalMet")
# unitLabel.stateCycle("goalMet")
#
# else
#
#
# minusBtn.onTap ->
# actionAmount -= 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height < indicatorCont.height
# indicatorFillCont.stateCycle("goalNotMet")
# indicatorCont.stateCycle("goalNotMet")
# actionLabel.stateCycle("goalNotMet")
# unitLabel.stateCycle("goalNotMet")
# else
# Component - Z-Index ----------------------------
addBtn.sendToBack()
minusBtn.sendToBack() | true |
# Style Variables ----------------------------------
spacer = 20
blue = "#383838"
lightBlue = "#BBB9B9"
otherBlue = "#DBDBDB"
# Data ---------------------------------------------
actionAmount = 0
gutter = 10
indicators = []
indicatorChevOffset = 50
indicatorGutter = 10
indicatorSize = 8
indicatorContWidth = (2 * indicatorSize) + ((2 - 1) * indicatorGutter)
indicatorData = [
{
name: "PI:NAME:<NAME>END_PI",
},
{
name: "PI:NAME:<NAME>END_PI",
},
]
# Design Component - Positioning -------------------
indicatorCont.x = Align.center()
indicatorCont.y = Align.center()
# shape.x = Align.center()
# shape.y = Align.top(0)
minusBtn.y = Align.center()
minusBtn.x = Align.center(0)
addBtn.y = Align.center()
addBtn.x = Align.center(0)
actionBtnLabelCancel.y = Align.bottom(spacer*2)
fill.backgroundColor = otherBlue
fillTop.backgroundColor = otherBlue
# Code Components ----------------------------------
# Text
# Create PageComponent
pageScroller = new PageComponent
parent: indicatorCont
x: Align.center()
y: Align.center()
width: indicatorCont.width
height: indicatorCont.height / 2
scrollVertical: false
clip: true
backgroundColor: null
dotIndicatorCont = new Layer
width: indicatorContWidth
height: 20
x: Align.center(3)
y: Align.bottom(-spacer/1.35)
backgroundColor: null
parent: indicatorCont
labels = []
for i in [0...2]
page = new Layer
parent: pageScroller.content
width: pageScroller.width
height: pageScroller.height
x: (pageScroller.width + gutter) * i
backgroundColor: null
actionLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.top(-spacer)
fontSize: 72
text: actionAmount
color: blue
unitLabel = new TextLayer
parent: page
x: Align.center(3)
y: Align.bottom(-spacer/2)
fontSize: 14
text: indicatorData[i].name
color: blue
labels.push actionLabel
# creating the indicator
indicator = new Layer
parent: dotIndicatorCont
size: indicatorSize
borderRadius: indicatorCont.height
x: (indicatorSize + indicatorGutter) * i
y: Align.center()
name: i
backgroundColor: lightBlue
# creating states for the indicator
indicator.states =
active:
backgroundColor: blue
inactive:
backgroundColor: lightBlue
#pushing indicators into array
indicators.push(indicator)
# Component - States -------------------------------
# shape.states =
# active:
# x: 0
minusBtn.states =
active:
x: Align.center(-indicatorCont.width/1.25)
addBtn.states =
active:
x: Align.center(indicatorCont.width/1.25)
actionBtn.states =
active:
backgroundColor: 'white'
actionBtnLabel.states =
active:
y: Align.top(-spacer*2)
actionBtnLabelCancel.states =
active:
y: Align.center()
actionLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
unitLabel.states =
goalNotMet:
color: blue
goalMet:
color: "white"
indicatorCont.states =
goalNotMet:
backgroundColor: "white"
goalMet:
backgroundColor: blue
indicatorFillCont.states =
goalNotMet:
opacity: 1
goalMet:
opacity: 0
fillTop.states =
shiftForward:
x: Align.center(10)
shiftBack:
x: Align.center(-10)
# Component - Animations -------------------------
addBtn.animationOptions =
curve: Spring(damping: .8)
time: 0.95
minusBtn.animationOptions = addBtn.animationOptions
actionBtnLabel.animationOptions = addBtn.animationOptions
actionBtnLabelCancel.animationOptions = addBtn.animationOptions
indicatorFillCont.animationOptions = addBtn.animationOptions
indicatorCont.animationOptions = addBtn.animationOptions
unitLabel.animationOptions = addBtn.animationOptions
actionLabel.animationOptions = addBtn.animationOptions
# Component - Interactions -----------------------
indicatorCont.onTap ->
minusBtn.stateCycle()
addBtn.stateCycle()
actionBtnLabel.stateCycle()
actionBtnLabelCancel.stateCycle()
actionBtn.stateCycle()
# Making the first indicator active
pageScroller.snapToPage(pageScroller.content.children[0])
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Changing indicator state on page change
pageScroller.on "change:currentPage", ->
indicator.states.switch("default") for indicator in indicators
current = pageScroller.horizontalPageIndex(pageScroller.currentPage)
indicators[current].states.switch("active")
# Ounces
if current == 0
addBtn.onTap ->
actionAmount += 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[0].text = actionAmount * 8
labels[0].x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
# Cups
if current == 1
addBtn.onTap ->
actionAmount += 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
minusBtn.onTap ->
actionAmount -= 1;
labels[1].text = actionAmount
labels[1].x = Align.center()
actionLabel.x = Align.center()
indicatorFillCont.height = actionAmount * 25
fillTop.stateCycle()
print current
# addBtn.onTap ->
# actionAmount += 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# labels[0].x = Align.center()
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height > indicatorCont.height
# indicatorFillCont.stateCycle("goalMet")
# indicatorCont.stateCycle("goalMet")
# actionLabel.stateCycle("goalMet")
# unitLabel.stateCycle("goalMet")
#
# else
#
#
# minusBtn.onTap ->
# actionAmount -= 1;
# labels[1].text = actionAmount
# labels[0].text = actionAmount * 8
# actionLabel.x = Align.center()
# indicatorFillCont.height = actionAmount * 25
# fillTop.stateCycle()
#
# if indicatorFillCont.height < indicatorCont.height
# indicatorFillCont.stateCycle("goalNotMet")
# indicatorCont.stateCycle("goalNotMet")
# actionLabel.stateCycle("goalNotMet")
# unitLabel.stateCycle("goalNotMet")
# else
# Component - Z-Index ----------------------------
addBtn.sendToBack()
minusBtn.sendToBack() |
[
{
"context": "# 'distance' module example project V1.0\n# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n",
"end": 56,
"score": 0.9998852014541626,
"start": 46,
"tag": "NAME",
"value": "Marc Krenn"
},
{
"context": "example project V1.0\n# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n\n# Include module\ndistance = requir",
"end": 91,
"score": 0.99992835521698,
"start": 71,
"tag": "EMAIL",
"value": "marc.krenn@gmail.com"
},
{
"context": "by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn\n\n# Include module\ndistance = require \"distance\"\n\n",
"end": 105,
"score": 0.9994835257530212,
"start": 94,
"tag": "USERNAME",
"value": "@marc_krenn"
}
] | distanceExample.framer/app.coffee | marckrenn/framer-distance | 7 | # 'distance' module example project V1.0
# by Marc Krenn, Sept. 2015 | marc.krenn@gmail.com | @marc_krenn
# Include module
distance = require "distance"
# Set background
bg = new BackgroundLayer backgroundColor: "#28affa"
# Create layer
layerA = new Layer
width: 100
height: 100
backgroundColor: "#fff"
borderRadius: "100%"
# Demo reference point
reference = new Layer
width: 20
height: 20
backgroundColor: "#fff"
borderRadius: "100%"
opacity: 0
index: 0
layerA.center()
# Enable dragging
layerA.draggable.enabled = true
# Show demo reference point
layerA.on Events.DragStart, ->
reference.x = layerA.midX - reference.width/2
reference.y = layerA.midY - reference.height/2
reference.opacity = 0.5
layerA.on Events.DragMove, ->
# Returns distance between current and start position of a draggable layer ('this' = 'layerA')
print distance.toDragStart(this)
# Returns distance between two points (point1x, point1y, point2x, point2y)
# print distance.twoPoints(0,0,layerA.midX,layerA.midY)
# Hide demo reference point
layerA.on Events.DragEnd, ->
reference.opacity = 0 | 57618 | # 'distance' module example project V1.0
# by <NAME>, Sept. 2015 | <EMAIL> | @marc_krenn
# Include module
distance = require "distance"
# Set background
bg = new BackgroundLayer backgroundColor: "#28affa"
# Create layer
layerA = new Layer
width: 100
height: 100
backgroundColor: "#fff"
borderRadius: "100%"
# Demo reference point
reference = new Layer
width: 20
height: 20
backgroundColor: "#fff"
borderRadius: "100%"
opacity: 0
index: 0
layerA.center()
# Enable dragging
layerA.draggable.enabled = true
# Show demo reference point
layerA.on Events.DragStart, ->
reference.x = layerA.midX - reference.width/2
reference.y = layerA.midY - reference.height/2
reference.opacity = 0.5
layerA.on Events.DragMove, ->
# Returns distance between current and start position of a draggable layer ('this' = 'layerA')
print distance.toDragStart(this)
# Returns distance between two points (point1x, point1y, point2x, point2y)
# print distance.twoPoints(0,0,layerA.midX,layerA.midY)
# Hide demo reference point
layerA.on Events.DragEnd, ->
reference.opacity = 0 | true | # 'distance' module example project V1.0
# by PI:NAME:<NAME>END_PI, Sept. 2015 | PI:EMAIL:<EMAIL>END_PI | @marc_krenn
# Include module
distance = require "distance"
# Set background
bg = new BackgroundLayer backgroundColor: "#28affa"
# Create layer
layerA = new Layer
width: 100
height: 100
backgroundColor: "#fff"
borderRadius: "100%"
# Demo reference point
reference = new Layer
width: 20
height: 20
backgroundColor: "#fff"
borderRadius: "100%"
opacity: 0
index: 0
layerA.center()
# Enable dragging
layerA.draggable.enabled = true
# Show demo reference point
layerA.on Events.DragStart, ->
reference.x = layerA.midX - reference.width/2
reference.y = layerA.midY - reference.height/2
reference.opacity = 0.5
layerA.on Events.DragMove, ->
# Returns distance between current and start position of a draggable layer ('this' = 'layerA')
print distance.toDragStart(this)
# Returns distance between two points (point1x, point1y, point2x, point2y)
# print distance.twoPoints(0,0,layerA.midX,layerA.midY)
# Hide demo reference point
layerA.on Events.DragEnd, ->
reference.opacity = 0 |
[
{
"context": ").then (s3Options) ->\n key = s3Options.folder + (new Date()).getTime() + '-' +\n S3Uploader",
"end": 1254,
"score": 0.561073899269104,
"start": 1253,
"tag": "KEY",
"value": "+"
},
{
"context": "en (s3Options) ->\n key = s3Options.folder + (new Date()).getTime() + '-' +\n S3Uploader.randomString(16) + \".mp3\"\n ",
"end": 1285,
"score": 0.9368858337402344,
"start": 1256,
"tag": "KEY",
"value": "new Date()).getTime() + '-' +"
},
{
"context": "me() + '-' +\n S3Uploader.randomString(16) + \".mp3\"\n opts = angular.extend({\n submitOnCh",
"end": 1329,
"score": 0.6813743114471436,
"start": 1324,
"tag": "KEY",
"value": "\".mp3"
},
{
"context": "ull\n acl: 'private'\n uploadingKey: 'uploading'\n folder: 'songs/'\n enableValidatio",
"end": 1515,
"score": 0.8115392923355103,
"start": 1506,
"tag": "KEY",
"value": "uploading"
}
] | app/js/features/transmit/transmit_service.coffee | wallpond/song-a-day-ionic | 0 | ###
A simple example service that returns some data.
###
angular.module("songaday")
.factory "TransmitService",($rootScope,$firebaseObject,
$firebaseArray,FBURL,S3Uploader, ngS3Config,SongService,AccountService) ->
# Might use a resource here that returns a JSON array
ref = new Firebase(FBURL+'songs').limit(4)
cloudFrontURI:() ->
'http://d1hmps6uc7xmb3.cloudfront.net/'
awsParamsURI: () ->
'/config/aws.json'
awsFolder: () ->
'songs/'
s3Bucket:()->
'songadays'
transmit:(song,callback) ->
songs = SongService.some()
songs.$loaded ()->
console.log(song)
songs.$add(song).then (new_ref) ->
console.log(new_ref)
callback(new_ref.key())
return
lastTransmission:(callback) ->
AccountService.refresh (myself) ->
console.log(myself)
ref = new Firebase (FBURL+'/songs/' + myself.last_transmission)
last_transmission=$firebaseObject(ref)
last_transmission.$loaded (err) ->
if callback
callback last_transmission
uploadBlob:(blob,callback)->
cloudFront = @cloudFrontURI()
s3Uri = 'https://' + @s3Bucket() + '.s3.amazonaws.com/'
S3Uploader.getUploadOptions(@awsParamsURI()).then (s3Options) ->
key = s3Options.folder + (new Date()).getTime() + '-' +
S3Uploader.randomString(16) + ".mp3"
opts = angular.extend({
submitOnChange: true
getOptionsUri: '/getS3Options'
getManualOptions: null
acl: 'private'
uploadingKey: 'uploading'
folder: 'songs/'
enableValidation: true
targetFilename: null
}, opts)
S3Uploader.upload($rootScope, s3Uri,
key, opts.acl, blob.type,
s3Options.key, s3Options.policy,
s3Options.signature, blob ).then (obj) ->
callback(cloudFront+key)
return
| 105445 | ###
A simple example service that returns some data.
###
angular.module("songaday")
.factory "TransmitService",($rootScope,$firebaseObject,
$firebaseArray,FBURL,S3Uploader, ngS3Config,SongService,AccountService) ->
# Might use a resource here that returns a JSON array
ref = new Firebase(FBURL+'songs').limit(4)
cloudFrontURI:() ->
'http://d1hmps6uc7xmb3.cloudfront.net/'
awsParamsURI: () ->
'/config/aws.json'
awsFolder: () ->
'songs/'
s3Bucket:()->
'songadays'
transmit:(song,callback) ->
songs = SongService.some()
songs.$loaded ()->
console.log(song)
songs.$add(song).then (new_ref) ->
console.log(new_ref)
callback(new_ref.key())
return
lastTransmission:(callback) ->
AccountService.refresh (myself) ->
console.log(myself)
ref = new Firebase (FBURL+'/songs/' + myself.last_transmission)
last_transmission=$firebaseObject(ref)
last_transmission.$loaded (err) ->
if callback
callback last_transmission
uploadBlob:(blob,callback)->
cloudFront = @cloudFrontURI()
s3Uri = 'https://' + @s3Bucket() + '.s3.amazonaws.com/'
S3Uploader.getUploadOptions(@awsParamsURI()).then (s3Options) ->
key = s3Options.folder <KEY> (<KEY>
S3Uploader.randomString(16) + <KEY>"
opts = angular.extend({
submitOnChange: true
getOptionsUri: '/getS3Options'
getManualOptions: null
acl: 'private'
uploadingKey: '<KEY>'
folder: 'songs/'
enableValidation: true
targetFilename: null
}, opts)
S3Uploader.upload($rootScope, s3Uri,
key, opts.acl, blob.type,
s3Options.key, s3Options.policy,
s3Options.signature, blob ).then (obj) ->
callback(cloudFront+key)
return
| true | ###
A simple example service that returns some data.
###
angular.module("songaday")
.factory "TransmitService",($rootScope,$firebaseObject,
$firebaseArray,FBURL,S3Uploader, ngS3Config,SongService,AccountService) ->
# Might use a resource here that returns a JSON array
ref = new Firebase(FBURL+'songs').limit(4)
cloudFrontURI:() ->
'http://d1hmps6uc7xmb3.cloudfront.net/'
awsParamsURI: () ->
'/config/aws.json'
awsFolder: () ->
'songs/'
s3Bucket:()->
'songadays'
transmit:(song,callback) ->
songs = SongService.some()
songs.$loaded ()->
console.log(song)
songs.$add(song).then (new_ref) ->
console.log(new_ref)
callback(new_ref.key())
return
lastTransmission:(callback) ->
AccountService.refresh (myself) ->
console.log(myself)
ref = new Firebase (FBURL+'/songs/' + myself.last_transmission)
last_transmission=$firebaseObject(ref)
last_transmission.$loaded (err) ->
if callback
callback last_transmission
uploadBlob:(blob,callback)->
cloudFront = @cloudFrontURI()
s3Uri = 'https://' + @s3Bucket() + '.s3.amazonaws.com/'
S3Uploader.getUploadOptions(@awsParamsURI()).then (s3Options) ->
key = s3Options.folder PI:KEY:<KEY>END_PI (PI:KEY:<KEY>END_PI
S3Uploader.randomString(16) + PI:KEY:<KEY>END_PI"
opts = angular.extend({
submitOnChange: true
getOptionsUri: '/getS3Options'
getManualOptions: null
acl: 'private'
uploadingKey: 'PI:KEY:<KEY>END_PI'
folder: 'songs/'
enableValidation: true
targetFilename: null
}, opts)
S3Uploader.upload($rootScope, s3Uri,
key, opts.acl, blob.type,
s3Options.key, s3Options.policy,
s3Options.signature, blob ).then (obj) ->
callback(cloudFront+key)
return
|
[
{
"context": "###\n# gettext for jQuery\n#\n# Copyright (c) 2008 Sabin Iacob (m0n5t3r) <iacobs@m0n5t3r.info>\n# This program is",
"end": 59,
"score": 0.9998281598091125,
"start": 48,
"tag": "NAME",
"value": "Sabin Iacob"
},
{
"context": "t for jQuery\n#\n# Copyright (c) 2008 Sabin Iacob (m0n5t3r) <iacobs@m0n5t3r.info>\n# This program is free sof",
"end": 68,
"score": 0.5715003609657288,
"start": 62,
"tag": "USERNAME",
"value": "0n5t3r"
},
{
"context": "ery\n#\n# Copyright (c) 2008 Sabin Iacob (m0n5t3r) <iacobs@m0n5t3r.info>\n# This program is free software: you can redistr",
"end": 90,
"score": 0.9999289512634277,
"start": 71,
"tag": "EMAIL",
"value": "iacobs@m0n5t3r.info"
}
] | index.coffee | printercu/jquery.gettext | 3 | ###
# gettext for jQuery
#
# Copyright (c) 2008 Sabin Iacob (m0n5t3r) <iacobs@m0n5t3r.info>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# @license http://www.gnu.org/licenses/gpl.html
# @project jquery.gettext
#
# Usage:
#
# This plugin expects its input data to be a JSON object like
# {"": header, "string": "translation", ...}
#
# After getting the server side set up (either as a static file - my choice - or
# as a web service), the client side is simple:
# add to the head section of the page something like
# <link href="path/to/translation.json" lang="ro" rel="gettext"/>
# in your script, use $.gt.gettext(string) or _(string); for plural forms, use
# $.gt.ngettext(sg, pl1[, pl2, ...], count) or n_(sg, pl1[, pl2, ...], count)
# to extract strings to a .po file, you can use standard gettext utilities like
# xgettext and msgfmt; to generate the JSON, one could use the following Python
# snippet, assuming a domain.mo file exists under path/lang/LC_MESSAGES:
#
# import simplejson as enc
#
# def gettext_json(domain, path, lang = [], indent = False):
# try:
# tr = gettext.translation(domain, path, lang)
# return enc.dumps(tr._catalog, ensure_ascii = False, indent = indent)
# except IOError:
# return None
#
# why go through the additional hassle of gettext? well, it's a matter of
# preference, the main advantags I see are:
# well known editing tools like KBabel, poEdit, gtranslator, Emacs PO mode,
# etc.
# translation memory, fuzzy matches and other features that get really
# helpful when your application is big and you have hundreds of strings
###
RE_PLURAL = ///
^
Plural-Forms: \s*
nplurals \s* = \s* (\d+); \s*
plural \s* = \s* ([^a-zA-Z0-9\$]* ([a-zA-Z0-9\$]+).+)
$
///m
isArray = (obj) ->
typeof obj is 'object' and obj.constructor is Array
class Gettext
@defaultDomain: 'default'
@interpolate: (str, options) ->
options = lang: options if typeof options is 'string'
{lang, domain} = options
domain ||= @defaultDomain
str.replace(/:lang/g, lang).replace(/:domain/g, domain)
@ajaxPath: '/locale/:lang/LC_MESSAGES/:domain.json'
@ajaxLoad: (file, success, error) ->
xhr = jQuery.getJSON(file, (data) ->
gt = new Gettext(data)
success?.call this, gt
)
xhr.error error if error
xhr
@ajaxLoadLang: (options, args...) ->
@ajaxLoad @interpolate(@ajaxPath, options), args...
@fsPath: ''
@loadFile: (file, callback) ->
require('fs').readFile file, (err, json) =>
return callback err if err
try
data = JSON.parse json
catch e
return callback e
callback null, new @(data)
@loadLangFile: (options, callback) ->
@loadFile @interpolate(@fsPath, options), callback
@loadFileSync: (file) ->
new @(JSON.parse require('fs').readFileSync file)
@loadLangFileSync: (options) ->
@loadFileSync @interpolate(@fsPath, options)
constructor: (@messages = {}) ->
pl = RE_PLURAL.exec @messages[""]
if pl
np = pl[1]
expr = pl[2]
v = pl[3]
try
@plural = eval "(function(#{v}) {return #{expr};})"
plural: (n) ->
n isnt 1
gettext: (msgstr) ->
trans = @messages[msgstr]
# console.log(trans)
return trans if typeof trans is 'string' # regular action
# the translation contains plural(s), yet gettext was called
return trans[0] if isArray(trans)
msgstr
ngettext: (pls..., cnt) ->
sg = pls[0]
trans = @messages[sg] ? pls
# called ngettext, but no plural forms available :-?
return trans if typeof trans is 'string'
if isArray trans
pl = @plural(cnt)
pl = (if pl then 1 else 0) if typeof pl is 'boolean' and pls.length is 2
return trans[pl] if typeof pl is 'number' and pl < trans.length
sg
if module?.exports
module.exports = Gettext
if jQuery?
do ($ = jQuery) ->
$.Gettext = Gettext
$.gt = new Gettext()
lang = $('html').attr('lang')
link = $("link[rel=\"gettext\"][lang=\"#{lang}\"]")
if link.length
Gettext.ajaxLoad link.attr('href'), (gt) -> $.gt = gt
| 103654 | ###
# gettext for jQuery
#
# Copyright (c) 2008 <NAME> (m0n5t3r) <<EMAIL>>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# @license http://www.gnu.org/licenses/gpl.html
# @project jquery.gettext
#
# Usage:
#
# This plugin expects its input data to be a JSON object like
# {"": header, "string": "translation", ...}
#
# After getting the server side set up (either as a static file - my choice - or
# as a web service), the client side is simple:
# add to the head section of the page something like
# <link href="path/to/translation.json" lang="ro" rel="gettext"/>
# in your script, use $.gt.gettext(string) or _(string); for plural forms, use
# $.gt.ngettext(sg, pl1[, pl2, ...], count) or n_(sg, pl1[, pl2, ...], count)
# to extract strings to a .po file, you can use standard gettext utilities like
# xgettext and msgfmt; to generate the JSON, one could use the following Python
# snippet, assuming a domain.mo file exists under path/lang/LC_MESSAGES:
#
# import simplejson as enc
#
# def gettext_json(domain, path, lang = [], indent = False):
# try:
# tr = gettext.translation(domain, path, lang)
# return enc.dumps(tr._catalog, ensure_ascii = False, indent = indent)
# except IOError:
# return None
#
# why go through the additional hassle of gettext? well, it's a matter of
# preference, the main advantags I see are:
# well known editing tools like KBabel, poEdit, gtranslator, Emacs PO mode,
# etc.
# translation memory, fuzzy matches and other features that get really
# helpful when your application is big and you have hundreds of strings
###
RE_PLURAL = ///
^
Plural-Forms: \s*
nplurals \s* = \s* (\d+); \s*
plural \s* = \s* ([^a-zA-Z0-9\$]* ([a-zA-Z0-9\$]+).+)
$
///m
isArray = (obj) ->
typeof obj is 'object' and obj.constructor is Array
class Gettext
@defaultDomain: 'default'
@interpolate: (str, options) ->
options = lang: options if typeof options is 'string'
{lang, domain} = options
domain ||= @defaultDomain
str.replace(/:lang/g, lang).replace(/:domain/g, domain)
@ajaxPath: '/locale/:lang/LC_MESSAGES/:domain.json'
@ajaxLoad: (file, success, error) ->
xhr = jQuery.getJSON(file, (data) ->
gt = new Gettext(data)
success?.call this, gt
)
xhr.error error if error
xhr
@ajaxLoadLang: (options, args...) ->
@ajaxLoad @interpolate(@ajaxPath, options), args...
@fsPath: ''
@loadFile: (file, callback) ->
require('fs').readFile file, (err, json) =>
return callback err if err
try
data = JSON.parse json
catch e
return callback e
callback null, new @(data)
@loadLangFile: (options, callback) ->
@loadFile @interpolate(@fsPath, options), callback
@loadFileSync: (file) ->
new @(JSON.parse require('fs').readFileSync file)
@loadLangFileSync: (options) ->
@loadFileSync @interpolate(@fsPath, options)
constructor: (@messages = {}) ->
pl = RE_PLURAL.exec @messages[""]
if pl
np = pl[1]
expr = pl[2]
v = pl[3]
try
@plural = eval "(function(#{v}) {return #{expr};})"
plural: (n) ->
n isnt 1
gettext: (msgstr) ->
trans = @messages[msgstr]
# console.log(trans)
return trans if typeof trans is 'string' # regular action
# the translation contains plural(s), yet gettext was called
return trans[0] if isArray(trans)
msgstr
ngettext: (pls..., cnt) ->
sg = pls[0]
trans = @messages[sg] ? pls
# called ngettext, but no plural forms available :-?
return trans if typeof trans is 'string'
if isArray trans
pl = @plural(cnt)
pl = (if pl then 1 else 0) if typeof pl is 'boolean' and pls.length is 2
return trans[pl] if typeof pl is 'number' and pl < trans.length
sg
if module?.exports
module.exports = Gettext
if jQuery?
do ($ = jQuery) ->
$.Gettext = Gettext
$.gt = new Gettext()
lang = $('html').attr('lang')
link = $("link[rel=\"gettext\"][lang=\"#{lang}\"]")
if link.length
Gettext.ajaxLoad link.attr('href'), (gt) -> $.gt = gt
| true | ###
# gettext for jQuery
#
# Copyright (c) 2008 PI:NAME:<NAME>END_PI (m0n5t3r) <PI:EMAIL:<EMAIL>END_PI>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# @license http://www.gnu.org/licenses/gpl.html
# @project jquery.gettext
#
# Usage:
#
# This plugin expects its input data to be a JSON object like
# {"": header, "string": "translation", ...}
#
# After getting the server side set up (either as a static file - my choice - or
# as a web service), the client side is simple:
# add to the head section of the page something like
# <link href="path/to/translation.json" lang="ro" rel="gettext"/>
# in your script, use $.gt.gettext(string) or _(string); for plural forms, use
# $.gt.ngettext(sg, pl1[, pl2, ...], count) or n_(sg, pl1[, pl2, ...], count)
# to extract strings to a .po file, you can use standard gettext utilities like
# xgettext and msgfmt; to generate the JSON, one could use the following Python
# snippet, assuming a domain.mo file exists under path/lang/LC_MESSAGES:
#
# import simplejson as enc
#
# def gettext_json(domain, path, lang = [], indent = False):
# try:
# tr = gettext.translation(domain, path, lang)
# return enc.dumps(tr._catalog, ensure_ascii = False, indent = indent)
# except IOError:
# return None
#
# why go through the additional hassle of gettext? well, it's a matter of
# preference, the main advantags I see are:
# well known editing tools like KBabel, poEdit, gtranslator, Emacs PO mode,
# etc.
# translation memory, fuzzy matches and other features that get really
# helpful when your application is big and you have hundreds of strings
###
RE_PLURAL = ///
^
Plural-Forms: \s*
nplurals \s* = \s* (\d+); \s*
plural \s* = \s* ([^a-zA-Z0-9\$]* ([a-zA-Z0-9\$]+).+)
$
///m
isArray = (obj) ->
typeof obj is 'object' and obj.constructor is Array
class Gettext
@defaultDomain: 'default'
@interpolate: (str, options) ->
options = lang: options if typeof options is 'string'
{lang, domain} = options
domain ||= @defaultDomain
str.replace(/:lang/g, lang).replace(/:domain/g, domain)
@ajaxPath: '/locale/:lang/LC_MESSAGES/:domain.json'
@ajaxLoad: (file, success, error) ->
xhr = jQuery.getJSON(file, (data) ->
gt = new Gettext(data)
success?.call this, gt
)
xhr.error error if error
xhr
@ajaxLoadLang: (options, args...) ->
@ajaxLoad @interpolate(@ajaxPath, options), args...
@fsPath: ''
@loadFile: (file, callback) ->
require('fs').readFile file, (err, json) =>
return callback err if err
try
data = JSON.parse json
catch e
return callback e
callback null, new @(data)
@loadLangFile: (options, callback) ->
@loadFile @interpolate(@fsPath, options), callback
@loadFileSync: (file) ->
new @(JSON.parse require('fs').readFileSync file)
@loadLangFileSync: (options) ->
@loadFileSync @interpolate(@fsPath, options)
constructor: (@messages = {}) ->
pl = RE_PLURAL.exec @messages[""]
if pl
np = pl[1]
expr = pl[2]
v = pl[3]
try
@plural = eval "(function(#{v}) {return #{expr};})"
plural: (n) ->
n isnt 1
gettext: (msgstr) ->
trans = @messages[msgstr]
# console.log(trans)
return trans if typeof trans is 'string' # regular action
# the translation contains plural(s), yet gettext was called
return trans[0] if isArray(trans)
msgstr
ngettext: (pls..., cnt) ->
sg = pls[0]
trans = @messages[sg] ? pls
# called ngettext, but no plural forms available :-?
return trans if typeof trans is 'string'
if isArray trans
pl = @plural(cnt)
pl = (if pl then 1 else 0) if typeof pl is 'boolean' and pls.length is 2
return trans[pl] if typeof pl is 'number' and pl < trans.length
sg
if module?.exports
module.exports = Gettext
if jQuery?
do ($ = jQuery) ->
$.Gettext = Gettext
$.gt = new Gettext()
lang = $('html').attr('lang')
link = $("link[rel=\"gettext\"][lang=\"#{lang}\"]")
if link.length
Gettext.ajaxLoad link.attr('href'), (gt) -> $.gt = gt
|
[
{
"context": "#\n# BelongsTo 'user', User, -> @where(username: 'foo')\n# leads to\n# user: -> relation = @bel",
"end": 222,
"score": 0.9989226460456848,
"start": 219,
"tag": "USERNAME",
"value": "foo"
},
{
"context": "related.forEach (obj) ->\n key = \"#{obj.tableName}:#{obj.id}\"\n unless options.destroyingCach",
"end": 3907,
"score": 0.9651778936386108,
"start": 3879,
"tag": "KEY",
"value": "\"#{obj.tableName}:#{obj.id}\""
},
{
"context": "\n if obj?\n key = \"#{obj.tableName}:#{obj.id}\"\n options.destroyingCache[key] ",
"end": 4188,
"score": 0.9795501232147217,
"start": 4160,
"tag": "KEY",
"value": "\"#{obj.tableName}:#{obj.id}\""
},
{
"context": "patchedWhereClauses: (knex, resp) ->\n key = \"__parent.#{@parentIdAttribute}\"\n knex[if resp then 'wh",
"end": 9525,
"score": 0.8116031885147095,
"start": 9513,
"tag": "KEY",
"value": "\"__parent.#{"
},
{
"context": "p) ->\n key = \"__parent.#{@parentIdAttribute}\"\n knex[if resp then 'whereIn' else 'where']",
"end": 9543,
"score": 0.8768579363822937,
"start": 9543,
"tag": "KEY",
"value": ""
},
{
"context": "ons) ->\n accessor = @accessor\n model[@name]().fetch(options).then (related) ->\n r",
"end": 10441,
"score": 0.6729112267494202,
"start": 10436,
"tag": "USERNAME",
"value": "@name"
},
{
"context": "ch (obj) ->\n key = \"#{obj.tableName}:#{obj.id}\"\n unless options.destroying",
"end": 10557,
"score": 0.6932268142700195,
"start": 10555,
"tag": "KEY",
"value": "#{"
},
{
"context": " ->\n key = \"#{obj.tableName}:#{obj.id}\"\n unless options.destroyingCache[ke",
"end": 10565,
"score": 0.7654884457588196,
"start": 10561,
"tag": "KEY",
"value": "id}\""
},
{
"context": " and model.get(polymorphicType)?\n model[@name]().fetch(options).then (obj) ->\n i",
"end": 13687,
"score": 0.9923291206359863,
"start": 13682,
"tag": "USERNAME",
"value": "@name"
}
] | src/relations.coffee | bgaeddert/bookshelf-schema | 44 | ###
#
# BelongsTo 'user', User
# leads to
# user: -> @belongsTo User
#
# BelongsTo User
# leads to
# <User.name.toLowerCase()>: -> @belongsTo User
#
# BelongsTo 'user', User, -> @where(username: 'foo')
# leads to
# user: -> relation = @belongsTo(User); f.call(relation)
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo
# ]
#
# class Photo extends db.Model
# tableName: 'photos'
# @schema [
# BelongsTo User
# ]
#
# Photo.forge(id: 1).fetch(withRelated: 'user').then (photo) ->
# photo.user # function
# photo.related('user') # Collection
# photo.$user # Collection
# photo.$user.assign(user) # set user_id to user.id and save
#
# User.forge(id: 1).fetch(withRelated: 'photos').then (user) ->
# user.photos # function
# user.related('photos') # Collection
# user.$photos # Collection
# user.$photos.assign(...) # detach all photos and attach listed
# user.$photos.attach(...) # attach listed photos and save them
# user.$photos.detach(...) # detach listed photos
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo, onDestroy: (cascade|cascade direct|detach|detach direct|reject|ignore)
# ]
#
###
{pluralize, singularize, camelize} = require 'inflection'
{Field, IntField, StringField} = require './fields'
{Fulfilled, Rejected, promiseFinally, values, upperFirst, lowerFirst} = require './utils'
pushField = (schema, name, field) ->
for f in schema when f instanceof Field
return if f.name is name
schema.push field
class Relation
@multiple: false
constructor: (model, options = {}) ->
return new Relation(arguments...) unless this instanceof Relation
@relatedModel = model
@options = options
@name = @_deduceName(@relatedModel)
pluginOption: (name, defaultVal) -> @model.__bookshelf_schema_options[name] or defaultVal
option: (name, pluginOptionName, defaultVal) ->
if arguments.length is 2
defaultVal = pluginOptionName
pluginOptionName = name
value = @options[name]
value = @pluginOption(pluginOptionName, defaultVal) unless value?
value
contributeToSchema: (schema) -> schema.push this
contributeToModel: (cls) ->
@model = cls
@accessor = @options.accessor or @_deduceAccessorName(@name)
cls::[@name] = @createRelation(cls) unless @name of cls.prototype
if @option('createProperty', 'createProperties', true)
@_createProperty(cls)
createRelation: (cls) ->
relation = @_createRelation()
relation = @_applyQuery(relation)
relation = @_applyThrough(relation)
self = this
-> self._augementRelated this, relation.apply(this, arguments)
createGetter: ->
self = this
->
related = @related(self.name)
unless related.__augemented
self._augementRelated this, related
related
createSetter: -> undefined
onDestroy: (model, options) ->
switch @option('onDestroy', 'ignore')
when 'ignore'
return
when 'cascade'
@_destroyCascade model, options
when 'reject'
@_destroyReject model, options
when 'detach'
@_destroyDetach model, options
_destroyCascade: (model, options) ->
if @constructor.multiple
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = "#{obj.tableName}:#{obj.id}"
unless options.destroyingCache[key]?
options.destroyingCache[key] = obj.destroy(options)
else
model[@name]().fetch(options).then (obj) ->
if obj?
key = "#{obj.tableName}:#{obj.id}"
options.destroyingCache[key] = obj.destroy(options)
_destroyReject: (model, options) ->
if @constructor.multiple
model[@accessor].fetch(options).then (related) ->
for obj in related when "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
return Rejected new Error("destroy was reject")
else
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: (model, options) ->
if @constructor.multiple
model[@accessor].assign [], options
else
model[@accessor].assign null, options
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
throughForeignKeyTarget = @options.throughForeignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> builder.call(this).through(interim, throughForeignKey, otherKey, throughForeignKeyTarget, otherKeyTarget)
_applyQuery: (builder) ->
return builder unless @options.query
query = @options.query
-> query.apply builder.call(this)
_augementRelated: (parent, related) ->
return related unless @constructor.injectedMethods
self = this
for name, method of @constructor.injectedMethods
do (method) ->
if name of related
related["_original#{upperFirst(name)}"] = related[name]
related[name] = (args...) ->
args = [parent, self].concat args
method.apply this, args
related.__augemented = true
related
_createProperty: (cls) ->
return if @name is 'id' or @accessor of cls.prototype
spec = {}
getter = @createGetter()
setter = @createSetter()
spec.get = getter if getter
spec.set = setter if setter
Object.defineProperty cls.prototype, @accessor, spec
_relatedModelName: ->
switch
when typeof @relatedModel is 'string'
@relatedModel
when @relatedModel.name
@relatedModel.name
when @relatedModel.displayName
@relatedModel.displayName
when @relatedModel::tableName
singularize camelize @relatedModel::tableName
else
throw new Error("Can't deduce related model name, try to pass \"name\" as an option")
_deduceName: ->
return @options.name if @options.name?
if @constructor.multiple
pluralize lowerFirst(@_relatedModelName())
else
lowerFirst(@_relatedModelName())
_deduceAccessorName: -> "#{@option('accessorPrefix', 'relationsAccessorPrefix', '$')}#{@name}"
class HasOne extends Relation
constructor: (model, options = {}) ->
return new HasOne(arguments...) unless this instanceof HasOne
super
@injectedMethods: require './relations/has_one'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasOne related, foreignKey, foreignKeyTarget
class BelongsTo extends Relation
constructor: (model, options = {}) ->
return new BelongsTo(arguments...) unless this instanceof BelongsTo
super
contributeToSchema: (schema) ->
super
foreignKey = @options.foreignKey or "#{@_relatedModelName().toLowerCase()}_id"
pushField schema, foreignKey, IntField(foreignKey)
@injectedMethods: require './relations/belongs_to'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @belongsTo related, foreignKey, foreignKeyTarget
# Patch returned relations joinClauses and whereClauses
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
->
relation = builder.call(this).through(interim, throughForeignKey, otherKey)
relation.relatedData.joinClauses = BelongsTo._patchedJoinClauses
relation.relatedData.whereClauses = BelongsTo._patchedWhereClauses
relation
@_patchedJoinClauses: (knex) ->
joinTable = @joinTable()
targetKey = @key('foreignKey')
knex.join \
joinTable,
joinTable + '.' + targetKey, '=',
@targetTableName + '.' + @targetIdAttribute
knex.join \
"#{@parentTableName} as __parent",
"#{joinTable}.#{@throughIdAttribute}", '=',
"__parent.#{@key('throughForeignKey')}"
@_patchedWhereClauses: (knex, resp) ->
key = "__parent.#{@parentIdAttribute}"
knex[if resp then 'whereIn' else 'where'](key, if resp then @eagerKeys(resp) else @parentFk)
class HasMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new HasMany(arguments...) unless this instanceof HasMany
super
@injectedMethods: require './relations/has_many'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasMany related, foreignKey, foreignKeyTarget
class BelongsToMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new BelongsToMany(arguments...) unless this instanceof BelongsToMany
super
@injectedMethods: require './relations/belongs_to_many'
_destroyCascade: (model, options) ->
accessor = @accessor
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = "#{obj.tableName}:#{obj.id}"
unless options.destroyingCache[key]?
pending = model[accessor]
.detach(obj, options)
.then -> obj.destroy(options)
options.destroyingCache[key] = pending
_createRelation: ->
related = @relatedModel
table = @options.table
foreignKey = @options.foreignKey
otherKey = @options.otherKey
foreignKeyTarget = @options.foreignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> @belongsToMany related, table, foreignKey, otherKey, foreignKeyTarget, otherKeyTarget
class MorphOne extends Relation
constructor: (model, polymorphicName, options = {}) ->
return new MorphOne(arguments...) unless this instanceof MorphOne
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_one'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphOne related, name, columnNames, morphValue
class MorphMany extends Relation
@multiple: true
constructor: (model, polymorphicName, options = {}) ->
return new MorphMany(arguments...) unless this instanceof MorphMany
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_many'
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphMany related, name, columnNames, morphValue
class MorphTo extends Relation
constructor: (polymorphicName, targets, options = {}) ->
return new MorphTo(arguments...) unless this instanceof MorphTo
options.name = polymorphicName
super targets, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_to'
contributeToSchema: (schema) ->
super
if @options.columnNames
idName = @options.polymorphicName[0]
typeName = @options.polymorphicName[1]
else
idName = "#{@polymorphicName}_id"
typeName = "#{@polymorphicName}_type"
pushField schema, idName, IntField(idName)
pushField schema, typeName, StringField(typeName)
_destroyReject: (model, options) ->
polymorphicId = if @options.columnNames
@options.columnNames[0]
else
"#{@polymorphicName}_id"
polymorphicType = if @options.columnNames
@options.columnNames[1]
else
"#{@polymorphicName}_type"
if model.get(polymorphicId)? \
and model.get(polymorphicType)?
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: ->
_createRelation: ->
args = [@polymorphicName]
args.push @options.columnNames if @options.columnNames
args = args.concat @relatedModel
-> @morphTo args...
module.exports =
HasOne: HasOne
BelongsTo: BelongsTo
HasMany: HasMany
BelongsToMany: BelongsToMany
MorphOne: MorphOne
MorphMany: MorphMany
MorphTo: MorphTo
| 177231 | ###
#
# BelongsTo 'user', User
# leads to
# user: -> @belongsTo User
#
# BelongsTo User
# leads to
# <User.name.toLowerCase()>: -> @belongsTo User
#
# BelongsTo 'user', User, -> @where(username: 'foo')
# leads to
# user: -> relation = @belongsTo(User); f.call(relation)
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo
# ]
#
# class Photo extends db.Model
# tableName: 'photos'
# @schema [
# BelongsTo User
# ]
#
# Photo.forge(id: 1).fetch(withRelated: 'user').then (photo) ->
# photo.user # function
# photo.related('user') # Collection
# photo.$user # Collection
# photo.$user.assign(user) # set user_id to user.id and save
#
# User.forge(id: 1).fetch(withRelated: 'photos').then (user) ->
# user.photos # function
# user.related('photos') # Collection
# user.$photos # Collection
# user.$photos.assign(...) # detach all photos and attach listed
# user.$photos.attach(...) # attach listed photos and save them
# user.$photos.detach(...) # detach listed photos
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo, onDestroy: (cascade|cascade direct|detach|detach direct|reject|ignore)
# ]
#
###
{pluralize, singularize, camelize} = require 'inflection'
{Field, IntField, StringField} = require './fields'
{Fulfilled, Rejected, promiseFinally, values, upperFirst, lowerFirst} = require './utils'
pushField = (schema, name, field) ->
for f in schema when f instanceof Field
return if f.name is name
schema.push field
class Relation
@multiple: false
constructor: (model, options = {}) ->
return new Relation(arguments...) unless this instanceof Relation
@relatedModel = model
@options = options
@name = @_deduceName(@relatedModel)
pluginOption: (name, defaultVal) -> @model.__bookshelf_schema_options[name] or defaultVal
option: (name, pluginOptionName, defaultVal) ->
if arguments.length is 2
defaultVal = pluginOptionName
pluginOptionName = name
value = @options[name]
value = @pluginOption(pluginOptionName, defaultVal) unless value?
value
contributeToSchema: (schema) -> schema.push this
contributeToModel: (cls) ->
@model = cls
@accessor = @options.accessor or @_deduceAccessorName(@name)
cls::[@name] = @createRelation(cls) unless @name of cls.prototype
if @option('createProperty', 'createProperties', true)
@_createProperty(cls)
createRelation: (cls) ->
relation = @_createRelation()
relation = @_applyQuery(relation)
relation = @_applyThrough(relation)
self = this
-> self._augementRelated this, relation.apply(this, arguments)
createGetter: ->
self = this
->
related = @related(self.name)
unless related.__augemented
self._augementRelated this, related
related
createSetter: -> undefined
onDestroy: (model, options) ->
switch @option('onDestroy', 'ignore')
when 'ignore'
return
when 'cascade'
@_destroyCascade model, options
when 'reject'
@_destroyReject model, options
when 'detach'
@_destroyDetach model, options
_destroyCascade: (model, options) ->
if @constructor.multiple
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = <KEY>
unless options.destroyingCache[key]?
options.destroyingCache[key] = obj.destroy(options)
else
model[@name]().fetch(options).then (obj) ->
if obj?
key = <KEY>
options.destroyingCache[key] = obj.destroy(options)
_destroyReject: (model, options) ->
if @constructor.multiple
model[@accessor].fetch(options).then (related) ->
for obj in related when "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
return Rejected new Error("destroy was reject")
else
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: (model, options) ->
if @constructor.multiple
model[@accessor].assign [], options
else
model[@accessor].assign null, options
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
throughForeignKeyTarget = @options.throughForeignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> builder.call(this).through(interim, throughForeignKey, otherKey, throughForeignKeyTarget, otherKeyTarget)
_applyQuery: (builder) ->
return builder unless @options.query
query = @options.query
-> query.apply builder.call(this)
_augementRelated: (parent, related) ->
return related unless @constructor.injectedMethods
self = this
for name, method of @constructor.injectedMethods
do (method) ->
if name of related
related["_original#{upperFirst(name)}"] = related[name]
related[name] = (args...) ->
args = [parent, self].concat args
method.apply this, args
related.__augemented = true
related
_createProperty: (cls) ->
return if @name is 'id' or @accessor of cls.prototype
spec = {}
getter = @createGetter()
setter = @createSetter()
spec.get = getter if getter
spec.set = setter if setter
Object.defineProperty cls.prototype, @accessor, spec
_relatedModelName: ->
switch
when typeof @relatedModel is 'string'
@relatedModel
when @relatedModel.name
@relatedModel.name
when @relatedModel.displayName
@relatedModel.displayName
when @relatedModel::tableName
singularize camelize @relatedModel::tableName
else
throw new Error("Can't deduce related model name, try to pass \"name\" as an option")
_deduceName: ->
return @options.name if @options.name?
if @constructor.multiple
pluralize lowerFirst(@_relatedModelName())
else
lowerFirst(@_relatedModelName())
_deduceAccessorName: -> "#{@option('accessorPrefix', 'relationsAccessorPrefix', '$')}#{@name}"
class HasOne extends Relation
constructor: (model, options = {}) ->
return new HasOne(arguments...) unless this instanceof HasOne
super
@injectedMethods: require './relations/has_one'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasOne related, foreignKey, foreignKeyTarget
class BelongsTo extends Relation
constructor: (model, options = {}) ->
return new BelongsTo(arguments...) unless this instanceof BelongsTo
super
contributeToSchema: (schema) ->
super
foreignKey = @options.foreignKey or "#{@_relatedModelName().toLowerCase()}_id"
pushField schema, foreignKey, IntField(foreignKey)
@injectedMethods: require './relations/belongs_to'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @belongsTo related, foreignKey, foreignKeyTarget
# Patch returned relations joinClauses and whereClauses
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
->
relation = builder.call(this).through(interim, throughForeignKey, otherKey)
relation.relatedData.joinClauses = BelongsTo._patchedJoinClauses
relation.relatedData.whereClauses = BelongsTo._patchedWhereClauses
relation
@_patchedJoinClauses: (knex) ->
joinTable = @joinTable()
targetKey = @key('foreignKey')
knex.join \
joinTable,
joinTable + '.' + targetKey, '=',
@targetTableName + '.' + @targetIdAttribute
knex.join \
"#{@parentTableName} as __parent",
"#{joinTable}.#{@throughIdAttribute}", '=',
"__parent.#{@key('throughForeignKey')}"
@_patchedWhereClauses: (knex, resp) ->
key = <KEY>@parentIdAttribute<KEY>}"
knex[if resp then 'whereIn' else 'where'](key, if resp then @eagerKeys(resp) else @parentFk)
class HasMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new HasMany(arguments...) unless this instanceof HasMany
super
@injectedMethods: require './relations/has_many'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasMany related, foreignKey, foreignKeyTarget
class BelongsToMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new BelongsToMany(arguments...) unless this instanceof BelongsToMany
super
@injectedMethods: require './relations/belongs_to_many'
_destroyCascade: (model, options) ->
accessor = @accessor
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = "#{obj.tableName}:<KEY>obj.<KEY>
unless options.destroyingCache[key]?
pending = model[accessor]
.detach(obj, options)
.then -> obj.destroy(options)
options.destroyingCache[key] = pending
_createRelation: ->
related = @relatedModel
table = @options.table
foreignKey = @options.foreignKey
otherKey = @options.otherKey
foreignKeyTarget = @options.foreignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> @belongsToMany related, table, foreignKey, otherKey, foreignKeyTarget, otherKeyTarget
class MorphOne extends Relation
constructor: (model, polymorphicName, options = {}) ->
return new MorphOne(arguments...) unless this instanceof MorphOne
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_one'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphOne related, name, columnNames, morphValue
class MorphMany extends Relation
@multiple: true
constructor: (model, polymorphicName, options = {}) ->
return new MorphMany(arguments...) unless this instanceof MorphMany
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_many'
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphMany related, name, columnNames, morphValue
class MorphTo extends Relation
constructor: (polymorphicName, targets, options = {}) ->
return new MorphTo(arguments...) unless this instanceof MorphTo
options.name = polymorphicName
super targets, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_to'
contributeToSchema: (schema) ->
super
if @options.columnNames
idName = @options.polymorphicName[0]
typeName = @options.polymorphicName[1]
else
idName = "#{@polymorphicName}_id"
typeName = "#{@polymorphicName}_type"
pushField schema, idName, IntField(idName)
pushField schema, typeName, StringField(typeName)
_destroyReject: (model, options) ->
polymorphicId = if @options.columnNames
@options.columnNames[0]
else
"#{@polymorphicName}_id"
polymorphicType = if @options.columnNames
@options.columnNames[1]
else
"#{@polymorphicName}_type"
if model.get(polymorphicId)? \
and model.get(polymorphicType)?
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: ->
_createRelation: ->
args = [@polymorphicName]
args.push @options.columnNames if @options.columnNames
args = args.concat @relatedModel
-> @morphTo args...
module.exports =
HasOne: HasOne
BelongsTo: BelongsTo
HasMany: HasMany
BelongsToMany: BelongsToMany
MorphOne: MorphOne
MorphMany: MorphMany
MorphTo: MorphTo
| true | ###
#
# BelongsTo 'user', User
# leads to
# user: -> @belongsTo User
#
# BelongsTo User
# leads to
# <User.name.toLowerCase()>: -> @belongsTo User
#
# BelongsTo 'user', User, -> @where(username: 'foo')
# leads to
# user: -> relation = @belongsTo(User); f.call(relation)
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo
# ]
#
# class Photo extends db.Model
# tableName: 'photos'
# @schema [
# BelongsTo User
# ]
#
# Photo.forge(id: 1).fetch(withRelated: 'user').then (photo) ->
# photo.user # function
# photo.related('user') # Collection
# photo.$user # Collection
# photo.$user.assign(user) # set user_id to user.id and save
#
# User.forge(id: 1).fetch(withRelated: 'photos').then (user) ->
# user.photos # function
# user.related('photos') # Collection
# user.$photos # Collection
# user.$photos.assign(...) # detach all photos and attach listed
# user.$photos.attach(...) # attach listed photos and save them
# user.$photos.detach(...) # detach listed photos
#
# class User extends db.Model
# tableName: 'users'
# @schema [
# HasMany Photo, onDestroy: (cascade|cascade direct|detach|detach direct|reject|ignore)
# ]
#
###
{pluralize, singularize, camelize} = require 'inflection'
{Field, IntField, StringField} = require './fields'
{Fulfilled, Rejected, promiseFinally, values, upperFirst, lowerFirst} = require './utils'
pushField = (schema, name, field) ->
for f in schema when f instanceof Field
return if f.name is name
schema.push field
class Relation
@multiple: false
constructor: (model, options = {}) ->
return new Relation(arguments...) unless this instanceof Relation
@relatedModel = model
@options = options
@name = @_deduceName(@relatedModel)
pluginOption: (name, defaultVal) -> @model.__bookshelf_schema_options[name] or defaultVal
option: (name, pluginOptionName, defaultVal) ->
if arguments.length is 2
defaultVal = pluginOptionName
pluginOptionName = name
value = @options[name]
value = @pluginOption(pluginOptionName, defaultVal) unless value?
value
contributeToSchema: (schema) -> schema.push this
contributeToModel: (cls) ->
@model = cls
@accessor = @options.accessor or @_deduceAccessorName(@name)
cls::[@name] = @createRelation(cls) unless @name of cls.prototype
if @option('createProperty', 'createProperties', true)
@_createProperty(cls)
createRelation: (cls) ->
relation = @_createRelation()
relation = @_applyQuery(relation)
relation = @_applyThrough(relation)
self = this
-> self._augementRelated this, relation.apply(this, arguments)
createGetter: ->
self = this
->
related = @related(self.name)
unless related.__augemented
self._augementRelated this, related
related
createSetter: -> undefined
onDestroy: (model, options) ->
switch @option('onDestroy', 'ignore')
when 'ignore'
return
when 'cascade'
@_destroyCascade model, options
when 'reject'
@_destroyReject model, options
when 'detach'
@_destroyDetach model, options
_destroyCascade: (model, options) ->
if @constructor.multiple
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = PI:KEY:<KEY>END_PI
unless options.destroyingCache[key]?
options.destroyingCache[key] = obj.destroy(options)
else
model[@name]().fetch(options).then (obj) ->
if obj?
key = PI:KEY:<KEY>END_PI
options.destroyingCache[key] = obj.destroy(options)
_destroyReject: (model, options) ->
if @constructor.multiple
model[@accessor].fetch(options).then (related) ->
for obj in related when "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
return Rejected new Error("destroy was reject")
else
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: (model, options) ->
if @constructor.multiple
model[@accessor].assign [], options
else
model[@accessor].assign null, options
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
throughForeignKeyTarget = @options.throughForeignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> builder.call(this).through(interim, throughForeignKey, otherKey, throughForeignKeyTarget, otherKeyTarget)
_applyQuery: (builder) ->
return builder unless @options.query
query = @options.query
-> query.apply builder.call(this)
_augementRelated: (parent, related) ->
return related unless @constructor.injectedMethods
self = this
for name, method of @constructor.injectedMethods
do (method) ->
if name of related
related["_original#{upperFirst(name)}"] = related[name]
related[name] = (args...) ->
args = [parent, self].concat args
method.apply this, args
related.__augemented = true
related
_createProperty: (cls) ->
return if @name is 'id' or @accessor of cls.prototype
spec = {}
getter = @createGetter()
setter = @createSetter()
spec.get = getter if getter
spec.set = setter if setter
Object.defineProperty cls.prototype, @accessor, spec
_relatedModelName: ->
switch
when typeof @relatedModel is 'string'
@relatedModel
when @relatedModel.name
@relatedModel.name
when @relatedModel.displayName
@relatedModel.displayName
when @relatedModel::tableName
singularize camelize @relatedModel::tableName
else
throw new Error("Can't deduce related model name, try to pass \"name\" as an option")
_deduceName: ->
return @options.name if @options.name?
if @constructor.multiple
pluralize lowerFirst(@_relatedModelName())
else
lowerFirst(@_relatedModelName())
_deduceAccessorName: -> "#{@option('accessorPrefix', 'relationsAccessorPrefix', '$')}#{@name}"
class HasOne extends Relation
constructor: (model, options = {}) ->
return new HasOne(arguments...) unless this instanceof HasOne
super
@injectedMethods: require './relations/has_one'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasOne related, foreignKey, foreignKeyTarget
class BelongsTo extends Relation
constructor: (model, options = {}) ->
return new BelongsTo(arguments...) unless this instanceof BelongsTo
super
contributeToSchema: (schema) ->
super
foreignKey = @options.foreignKey or "#{@_relatedModelName().toLowerCase()}_id"
pushField schema, foreignKey, IntField(foreignKey)
@injectedMethods: require './relations/belongs_to'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @belongsTo related, foreignKey, foreignKeyTarget
# Patch returned relations joinClauses and whereClauses
# TODO: apply withPivot
# TODO: auto-discover withPivot columns from through models schema
_applyThrough: (builder) ->
return builder unless @options.through
interim = @options.through
throughForeignKey = @options.throughForeignKey
otherKey = @options.otherKey
->
relation = builder.call(this).through(interim, throughForeignKey, otherKey)
relation.relatedData.joinClauses = BelongsTo._patchedJoinClauses
relation.relatedData.whereClauses = BelongsTo._patchedWhereClauses
relation
@_patchedJoinClauses: (knex) ->
joinTable = @joinTable()
targetKey = @key('foreignKey')
knex.join \
joinTable,
joinTable + '.' + targetKey, '=',
@targetTableName + '.' + @targetIdAttribute
knex.join \
"#{@parentTableName} as __parent",
"#{joinTable}.#{@throughIdAttribute}", '=',
"__parent.#{@key('throughForeignKey')}"
@_patchedWhereClauses: (knex, resp) ->
key = PI:KEY:<KEY>END_PI@parentIdAttributePI:KEY:<KEY>END_PI}"
knex[if resp then 'whereIn' else 'where'](key, if resp then @eagerKeys(resp) else @parentFk)
class HasMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new HasMany(arguments...) unless this instanceof HasMany
super
@injectedMethods: require './relations/has_many'
_createRelation: ->
related = @relatedModel
foreignKey = @options.foreignKey
foreignKeyTarget = @options.foreignKeyTarget
-> @hasMany related, foreignKey, foreignKeyTarget
class BelongsToMany extends Relation
@multiple: true
constructor: (model, options = {}) ->
return new BelongsToMany(arguments...) unless this instanceof BelongsToMany
super
@injectedMethods: require './relations/belongs_to_many'
_destroyCascade: (model, options) ->
accessor = @accessor
model[@name]().fetch(options).then (related) ->
related.forEach (obj) ->
key = "#{obj.tableName}:PI:KEY:<KEY>END_PIobj.PI:KEY:<KEY>END_PI
unless options.destroyingCache[key]?
pending = model[accessor]
.detach(obj, options)
.then -> obj.destroy(options)
options.destroyingCache[key] = pending
_createRelation: ->
related = @relatedModel
table = @options.table
foreignKey = @options.foreignKey
otherKey = @options.otherKey
foreignKeyTarget = @options.foreignKeyTarget
otherKeyTarget = @options.otherKeyTarget
-> @belongsToMany related, table, foreignKey, otherKey, foreignKeyTarget, otherKeyTarget
class MorphOne extends Relation
constructor: (model, polymorphicName, options = {}) ->
return new MorphOne(arguments...) unless this instanceof MorphOne
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_one'
_destroyDetach: (model, options) ->
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphOne related, name, columnNames, morphValue
class MorphMany extends Relation
@multiple: true
constructor: (model, polymorphicName, options = {}) ->
return new MorphMany(arguments...) unless this instanceof MorphMany
unless typeof polymorphicName is 'string'
throw new Error('polymorphicName should be string')
super model, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_many'
_createRelation: ->
related = @relatedModel
name = @polymorphicName
columnNames = @options.columnNames
morphValue = @options.morphValue
-> @morphMany related, name, columnNames, morphValue
class MorphTo extends Relation
constructor: (polymorphicName, targets, options = {}) ->
return new MorphTo(arguments...) unless this instanceof MorphTo
options.name = polymorphicName
super targets, options
@polymorphicName = polymorphicName
@injectedMethods: require './relations/morph_to'
contributeToSchema: (schema) ->
super
if @options.columnNames
idName = @options.polymorphicName[0]
typeName = @options.polymorphicName[1]
else
idName = "#{@polymorphicName}_id"
typeName = "#{@polymorphicName}_type"
pushField schema, idName, IntField(idName)
pushField schema, typeName, StringField(typeName)
_destroyReject: (model, options) ->
polymorphicId = if @options.columnNames
@options.columnNames[0]
else
"#{@polymorphicName}_id"
polymorphicType = if @options.columnNames
@options.columnNames[1]
else
"#{@polymorphicName}_type"
if model.get(polymorphicId)? \
and model.get(polymorphicType)?
model[@name]().fetch(options).then (obj) ->
if obj and "#{obj.tableName}:#{obj.id}" not of options.destroyingCache
Rejected new Error('destroy rejected')
_destroyDetach: ->
_createRelation: ->
args = [@polymorphicName]
args.push @options.columnNames if @options.columnNames
args = args.concat @relatedModel
-> @morphTo args...
module.exports =
HasOne: HasOne
BelongsTo: BelongsTo
HasMany: HasMany
BelongsToMany: BelongsToMany
MorphOne: MorphOne
MorphMany: MorphMany
MorphTo: MorphTo
|
[
{
"context": "te <username> - Deactivate account\n#\n# Author:\n# Pablo M.\n\nxml2js = require 'xml2js'\nutil = require 'util'",
"end": 501,
"score": 0.9996581673622131,
"start": 494,
"tag": "NAME",
"value": "Pablo M"
},
{
"context": ">\n bdusername = msg.match[1]\n bdpassword = \"?\"\n bdstatus = \"2\"\n accountmngmnt msg,bduserna",
"end": 5712,
"score": 0.9369129538536072,
"start": 5712,
"tag": "PASSWORD",
"value": ""
}
] | src/bydesign.coffee | Ev1l/bydesign.coffee | 0 | # Description:
# Manage ByDesign Accounts using Hubot
#
# Dependencies:
# "xml2js": "0.1.14"
#
# Configuration:
# HUBOT_BYDESIGN_API_HOST
# HUBOT_BYDESIGN_API_PATH
# HUBOT_BYDESIGN_API_ACTION
# HUBOT_BYDESIGN_API_USERNAME
# HUBOT_BYDESIGN_API_PASSWORD
#
# Commands:
# hubot bydesign activate <username> - Activate account
# hubot bydesign reset <username> <password> - Reset password of an account
# hubot bydesign deactivate <username> - Deactivate account
#
# Author:
# Pablo M.
xml2js = require 'xml2js'
util = require 'util'
# The domain for the API host like "api.URL.com"
host = process.env.HUBOT_BYDESIGN_API_HOST
# The path for the api "/CUSTOMERGATEWAY/personal/webservice/mdbapi.asmx"
path = process.env.HUBOT_BYDESIGN_API_PATH
# The SOAPAction call "http://www.URL.com/UserAccountManagement"
action = process.env.HUBOT_BYDESIGN_API_ACTION
apiuser = process.env.HUBOT_BYDESIGN_API_USERNAME
apipass = process.env.HUBOT_BYDESIGN_API_PASSWORD
wrapInEnvelope = (body) ->
"""
<x:Envelope xmlns:x="http://schemas.xmlsoap.org/soap/envelope/" xmlns:www="http://www.securefreedom.com">
<x:Body>#{body}</x:Body>
</x:Envelope>
"""
getURL = (path) ->
"https://#{host}#{path}"
makeRequest = (msg, path, action, body, response, cb) ->
wrappedBody = wrapInEnvelope body
msg.http(getURL path).header('SOAPAction', action).header('Content-type', 'text/xml; charset=utf-8')
.post(wrappedBody) (err, resp, body) ->
parser = new xml2js.Parser({ explicitArray : false, ignoreAttrs : true })
parser.parseString body, (err, json) ->
jstring = JSON.stringify(json)
js = JSON.parse(jstring)
#example of grabbing objects that match some key and value in JSON
#return an array of objects according to key, value, or key and value matching
getObjects = (obj, key, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getObjects(obj[i], key, val))
else if i == key and obj[i] == val or i == key and val == ''
objects.push obj
else if obj[i] == val and key == ''
#only add if the object is not already in the array
if objects.lastIndexOf(obj) == -1
objects.push obj
objects
#return an array of values that match on a certain key
getValues = (obj, key) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getValues(obj[i], key))
else if i == key
objects.push obj[i]
objects
#return an array of keys that match on a certain value
getKeys = (obj, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getKeys(obj[i], val))
else if obj[i] == val
objects.push i
objects
#Examples
#console.log getObjects(js, 'Success', '1')
#returns 1 object where a key names Success has the value 1
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Message', '')
#example of grabbing objects that match some value in JSON
#console.log getObjects(js, '', '1')
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Sucess', '')
#example of grabbing values from any key passed in JSON
#console.log getValues(js, 'Message')
#example of grabbing keys by searching via values in JSON
#console.log getKeys(js, '1')
if (err)
msg.send "An error occurred"
console.log "An error occurred: #{err}"
else
msg.send "#{getValues(js, 'Message')}"
console.log "Action completed succesfully with message #{getValues(js, 'Message')} Request initated by #{msg.envelope.user.name}"
accountmngmnt = (msg,bdusername,bdpassword,bdstatus) ->
if bdusername?
body = """
<www:UserAccountManagement>
<www:Credentials>
<www:Username>#{apiuser}</www:Username>
<www:Password>#{apipass}</www:Password>
</www:Credentials>
<www:UserNames>
<www:UserNames>
<www:UserName>#{bdusername}</www:UserName>
</www:UserNames>
</www:UserNames>
<www:Password>#{bdpassword}</www:Password>
<www:AccountStatus>#{bdstatus}</www:AccountStatus>
</www:UserAccountManagement>
"""
makeRequest msg, path, action, body, 'Success', (obj) ->
module.exports = (robot) ->
robot.respond /bydesign activate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "?"
bdstatus = "1"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD activation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign reset (.*) (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = msg.match[2]
bdstatus = "0"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD PW reset request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign deactivate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "?"
bdstatus = "2"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD deactivation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
| 30833 | # Description:
# Manage ByDesign Accounts using Hubot
#
# Dependencies:
# "xml2js": "0.1.14"
#
# Configuration:
# HUBOT_BYDESIGN_API_HOST
# HUBOT_BYDESIGN_API_PATH
# HUBOT_BYDESIGN_API_ACTION
# HUBOT_BYDESIGN_API_USERNAME
# HUBOT_BYDESIGN_API_PASSWORD
#
# Commands:
# hubot bydesign activate <username> - Activate account
# hubot bydesign reset <username> <password> - Reset password of an account
# hubot bydesign deactivate <username> - Deactivate account
#
# Author:
# <NAME>.
xml2js = require 'xml2js'
util = require 'util'
# The domain for the API host like "api.URL.com"
host = process.env.HUBOT_BYDESIGN_API_HOST
# The path for the api "/CUSTOMERGATEWAY/personal/webservice/mdbapi.asmx"
path = process.env.HUBOT_BYDESIGN_API_PATH
# The SOAPAction call "http://www.URL.com/UserAccountManagement"
action = process.env.HUBOT_BYDESIGN_API_ACTION
apiuser = process.env.HUBOT_BYDESIGN_API_USERNAME
apipass = process.env.HUBOT_BYDESIGN_API_PASSWORD
wrapInEnvelope = (body) ->
"""
<x:Envelope xmlns:x="http://schemas.xmlsoap.org/soap/envelope/" xmlns:www="http://www.securefreedom.com">
<x:Body>#{body}</x:Body>
</x:Envelope>
"""
getURL = (path) ->
"https://#{host}#{path}"
makeRequest = (msg, path, action, body, response, cb) ->
wrappedBody = wrapInEnvelope body
msg.http(getURL path).header('SOAPAction', action).header('Content-type', 'text/xml; charset=utf-8')
.post(wrappedBody) (err, resp, body) ->
parser = new xml2js.Parser({ explicitArray : false, ignoreAttrs : true })
parser.parseString body, (err, json) ->
jstring = JSON.stringify(json)
js = JSON.parse(jstring)
#example of grabbing objects that match some key and value in JSON
#return an array of objects according to key, value, or key and value matching
getObjects = (obj, key, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getObjects(obj[i], key, val))
else if i == key and obj[i] == val or i == key and val == ''
objects.push obj
else if obj[i] == val and key == ''
#only add if the object is not already in the array
if objects.lastIndexOf(obj) == -1
objects.push obj
objects
#return an array of values that match on a certain key
getValues = (obj, key) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getValues(obj[i], key))
else if i == key
objects.push obj[i]
objects
#return an array of keys that match on a certain value
getKeys = (obj, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getKeys(obj[i], val))
else if obj[i] == val
objects.push i
objects
#Examples
#console.log getObjects(js, 'Success', '1')
#returns 1 object where a key names Success has the value 1
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Message', '')
#example of grabbing objects that match some value in JSON
#console.log getObjects(js, '', '1')
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Sucess', '')
#example of grabbing values from any key passed in JSON
#console.log getValues(js, 'Message')
#example of grabbing keys by searching via values in JSON
#console.log getKeys(js, '1')
if (err)
msg.send "An error occurred"
console.log "An error occurred: #{err}"
else
msg.send "#{getValues(js, 'Message')}"
console.log "Action completed succesfully with message #{getValues(js, 'Message')} Request initated by #{msg.envelope.user.name}"
accountmngmnt = (msg,bdusername,bdpassword,bdstatus) ->
if bdusername?
body = """
<www:UserAccountManagement>
<www:Credentials>
<www:Username>#{apiuser}</www:Username>
<www:Password>#{apipass}</www:Password>
</www:Credentials>
<www:UserNames>
<www:UserNames>
<www:UserName>#{bdusername}</www:UserName>
</www:UserNames>
</www:UserNames>
<www:Password>#{bdpassword}</www:Password>
<www:AccountStatus>#{bdstatus}</www:AccountStatus>
</www:UserAccountManagement>
"""
makeRequest msg, path, action, body, 'Success', (obj) ->
module.exports = (robot) ->
robot.respond /bydesign activate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "?"
bdstatus = "1"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD activation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign reset (.*) (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = msg.match[2]
bdstatus = "0"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD PW reset request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign deactivate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "<PASSWORD>?"
bdstatus = "2"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD deactivation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
| true | # Description:
# Manage ByDesign Accounts using Hubot
#
# Dependencies:
# "xml2js": "0.1.14"
#
# Configuration:
# HUBOT_BYDESIGN_API_HOST
# HUBOT_BYDESIGN_API_PATH
# HUBOT_BYDESIGN_API_ACTION
# HUBOT_BYDESIGN_API_USERNAME
# HUBOT_BYDESIGN_API_PASSWORD
#
# Commands:
# hubot bydesign activate <username> - Activate account
# hubot bydesign reset <username> <password> - Reset password of an account
# hubot bydesign deactivate <username> - Deactivate account
#
# Author:
# PI:NAME:<NAME>END_PI.
xml2js = require 'xml2js'
util = require 'util'
# The domain for the API host like "api.URL.com"
host = process.env.HUBOT_BYDESIGN_API_HOST
# The path for the api "/CUSTOMERGATEWAY/personal/webservice/mdbapi.asmx"
path = process.env.HUBOT_BYDESIGN_API_PATH
# The SOAPAction call "http://www.URL.com/UserAccountManagement"
action = process.env.HUBOT_BYDESIGN_API_ACTION
apiuser = process.env.HUBOT_BYDESIGN_API_USERNAME
apipass = process.env.HUBOT_BYDESIGN_API_PASSWORD
wrapInEnvelope = (body) ->
"""
<x:Envelope xmlns:x="http://schemas.xmlsoap.org/soap/envelope/" xmlns:www="http://www.securefreedom.com">
<x:Body>#{body}</x:Body>
</x:Envelope>
"""
getURL = (path) ->
"https://#{host}#{path}"
makeRequest = (msg, path, action, body, response, cb) ->
wrappedBody = wrapInEnvelope body
msg.http(getURL path).header('SOAPAction', action).header('Content-type', 'text/xml; charset=utf-8')
.post(wrappedBody) (err, resp, body) ->
parser = new xml2js.Parser({ explicitArray : false, ignoreAttrs : true })
parser.parseString body, (err, json) ->
jstring = JSON.stringify(json)
js = JSON.parse(jstring)
#example of grabbing objects that match some key and value in JSON
#return an array of objects according to key, value, or key and value matching
getObjects = (obj, key, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getObjects(obj[i], key, val))
else if i == key and obj[i] == val or i == key and val == ''
objects.push obj
else if obj[i] == val and key == ''
#only add if the object is not already in the array
if objects.lastIndexOf(obj) == -1
objects.push obj
objects
#return an array of values that match on a certain key
getValues = (obj, key) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getValues(obj[i], key))
else if i == key
objects.push obj[i]
objects
#return an array of keys that match on a certain value
getKeys = (obj, val) ->
objects = []
for i of obj
if !obj.hasOwnProperty(i)
continue
if typeof obj[i] == 'object'
objects = objects.concat(getKeys(obj[i], val))
else if obj[i] == val
objects.push i
objects
#Examples
#console.log getObjects(js, 'Success', '1')
#returns 1 object where a key names Success has the value 1
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Message', '')
#example of grabbing objects that match some value in JSON
#console.log getObjects(js, '', '1')
#example of grabbing objects that match some key in JSON
#console.log getObjects(js, 'Sucess', '')
#example of grabbing values from any key passed in JSON
#console.log getValues(js, 'Message')
#example of grabbing keys by searching via values in JSON
#console.log getKeys(js, '1')
if (err)
msg.send "An error occurred"
console.log "An error occurred: #{err}"
else
msg.send "#{getValues(js, 'Message')}"
console.log "Action completed succesfully with message #{getValues(js, 'Message')} Request initated by #{msg.envelope.user.name}"
accountmngmnt = (msg,bdusername,bdpassword,bdstatus) ->
if bdusername?
body = """
<www:UserAccountManagement>
<www:Credentials>
<www:Username>#{apiuser}</www:Username>
<www:Password>#{apipass}</www:Password>
</www:Credentials>
<www:UserNames>
<www:UserNames>
<www:UserName>#{bdusername}</www:UserName>
</www:UserNames>
</www:UserNames>
<www:Password>#{bdpassword}</www:Password>
<www:AccountStatus>#{bdstatus}</www:AccountStatus>
</www:UserAccountManagement>
"""
makeRequest msg, path, action, body, 'Success', (obj) ->
module.exports = (robot) ->
robot.respond /bydesign activate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "?"
bdstatus = "1"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD activation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign reset (.*) (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = msg.match[2]
bdstatus = "0"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD PW reset request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
robot.respond /bydesign deactivate (.*)/i, (msg) ->
bdusername = msg.match[1]
bdpassword = "PI:PASSWORD:<PASSWORD>END_PI?"
bdstatus = "2"
accountmngmnt msg,bdusername,bdpassword,bdstatus
robot.logger.info "Processing BD deactivation request from #{msg.envelope.user.name}, Data: #{msg} #{bdusername} #{bdpassword} #{bdstatus}"
|
[
{
"context": "ext Completions converted from https://github.com/Southclaw/pawn-sublime-language\n# Converter created by Rena",
"end": 100,
"score": 0.999620258808136,
"start": 91,
"tag": "USERNAME",
"value": "Southclaw"
},
{
"context": "hclaw/pawn-sublime-language\n# Converter created by Renato \"Hii\" Garcia.\n# Repo: https://github.com/Renato-Gar",
"end": 152,
"score": 0.8957919478416443,
"start": 146,
"tag": "NAME",
"value": "Renato"
},
{
"context": "n-sublime-language\n# Converter created by Renato \"Hii\" Garcia.\n# Repo: https://github.com/Renato-Garcia/",
"end": 157,
"score": 0.9266020655632019,
"start": 154,
"tag": "NAME",
"value": "Hii"
},
{
"context": "blime-language\n# Converter created by Renato \"Hii\" Garcia.\n# Repo: https://github.com/Renato-Garcia/sublime",
"end": 165,
"score": 0.9712100028991699,
"start": 159,
"tag": "NAME",
"value": "Garcia"
},
{
"context": "y Renato \"Hii\" Garcia.\n# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets\n'.source.pwn",
"end": 207,
"score": 0.9992713332176208,
"start": 194,
"tag": "USERNAME",
"value": "Renato-Garcia"
}
] | snippets/irc.cson | Wuzi/language-pawn | 4 | # IRC Plugin Atom Snippets from Sublime Text Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by Renato "Hii" Garcia.
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'IRC_Connect':
'prefix': 'IRC_Connect'
'body': 'IRC_Connect(${1:const server[]}, ${2:port}, ${3:const nickname[]}, ${4:const realname[]}, ${5:const username[]}, ${6:bool:ssl = false}, ${7:const localip[] = \"\"}, ${8:const serverpassword[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Quit':
'prefix': 'IRC_Quit'
'body': 'IRC_Quit(${1:botid}, ${2:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_JoinChannel':
'prefix': 'IRC_JoinChannel'
'body': 'IRC_JoinChannel(${1:botid}, ${2:const channel[]}, ${3:const key[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_PartChannel':
'prefix': 'IRC_PartChannel'
'body': 'IRC_PartChannel(${1:botid}, ${2:const channel[]}, ${3:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ChangeNick':
'prefix': 'IRC_ChangeNick'
'body': 'IRC_ChangeNick(${1:botid}, ${2:const nick[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetMode':
'prefix': 'IRC_SetMode'
'body': 'IRC_SetMode(${1:botid}, ${2:const target[]}, ${3:const mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Say':
'prefix': 'IRC_Say'
'body': 'IRC_Say(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Notice':
'prefix': 'IRC_Notice'
'body': 'IRC_Notice(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsUserOnChannel':
'prefix': 'IRC_IsUserOnChannel'
'body': 'IRC_IsUserOnChannel(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_InviteUser':
'prefix': 'IRC_InviteUser'
'body': 'IRC_InviteUser(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_KickUser':
'prefix': 'IRC_KickUser'
'body': 'IRC_KickUser(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetUserChannelMode':
'prefix': 'IRC_GetUserChannelMode'
'body': 'IRC_GetUserChannelMode(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:dest[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetChannelUserList':
'prefix': 'IRC_GetChannelUserList'
'body': 'IRC_GetChannelUserList(${1:botid}, ${2:const channel[]}, ${3:dest[]}, ${4:maxlength = sizeof dest})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetChannelTopic':
'prefix': 'IRC_SetChannelTopic'
'body': 'IRC_SetChannelTopic(${1:botid}, ${2:const channel[]}, ${3:const topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RequestCTCP':
'prefix': 'IRC_RequestCTCP'
'body': 'IRC_RequestCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ReplyCTCP':
'prefix': 'IRC_ReplyCTCP'
'body': 'IRC_ReplyCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SendRaw':
'prefix': 'IRC_SendRaw'
'body': 'IRC_SendRaw(${1:botid}, ${2:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_CreateGroup':
'prefix': 'IRC_CreateGroup'
'body': 'IRC_CreateGroup()'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_DestroyGroup':
'prefix': 'IRC_DestroyGroup'
'body': 'IRC_DestroyGroup(${1:groupid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_AddToGroup':
'prefix': 'IRC_AddToGroup'
'body': 'IRC_AddToGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RemoveFromGroup':
'prefix': 'IRC_RemoveFromGroup'
'body': 'IRC_RemoveFromGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupSay':
'prefix': 'IRC_GroupSay'
'body': 'IRC_GroupSay(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupNotice':
'prefix': 'IRC_GroupNotice'
'body': 'IRC_GroupNotice(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetIntData':
'prefix': 'IRC_SetIntData'
'body': 'IRC_SetIntData(${1:botid}, ${2:data}, ${3:value})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnect':
'prefix': 'IRC_OnConnect'
'body': 'IRC_OnConnect(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnDisconnect':
'prefix': 'IRC_OnDisconnect'
'body': 'IRC_OnDisconnect(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttempt':
'prefix': 'IRC_OnConnectAttempt'
'body': 'IRC_OnConnectAttempt(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttemptFail':
'prefix': 'IRC_OnConnectAttemptFail'
'body': 'IRC_OnConnectAttemptFail(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnJoinChannel':
'prefix': 'IRC_OnJoinChannel'
'body': 'IRC_OnJoinChannel(${1:botid}, ${2:channel[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnLeaveChannel':
'prefix': 'IRC_OnLeaveChannel'
'body': 'IRC_OnLeaveChannel(${1:botid}, ${2:channel[]}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnInvitedToChannel':
'prefix': 'IRC_OnInvitedToChannel'
'body': 'IRC_OnInvitedToChannel(${1:botid}, ${2:channel[]}, ${3:invitinguser[]}, ${4:invitinghost[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnKickedFromChannel':
'prefix': 'IRC_OnKickedFromChannel'
'body': 'IRC_OnKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:oppeduser[]}, ${4:oppedhost[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserDisconnect':
'prefix': 'IRC_OnUserDisconnect'
'body': 'IRC_OnUserDisconnect(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserJoinChannel':
'prefix': 'IRC_OnUserJoinChannel'
'body': 'IRC_OnUserJoinChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserLeaveChannel':
'prefix': 'IRC_OnUserLeaveChannel'
'body': 'IRC_OnUserLeaveChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserKickedFromChannel':
'prefix': 'IRC_OnUserKickedFromChannel'
'body': 'IRC_OnUserKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:kickeduser[]}, ${4:oppeduser[]}, ${5:oppedhost[]}, ${6:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNickChange':
'prefix': 'IRC_OnUserNickChange'
'body': 'IRC_OnUserNickChange(${1:botid}, ${2:oldnick[]}, ${3:newnick[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelMode':
'prefix': 'IRC_OnUserSetChannelMode'
'body': 'IRC_OnUserSetChannelMode(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelTopic':
'prefix': 'IRC_OnUserSetChannelTopic'
'body': 'IRC_OnUserSetChannelTopic(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSay':
'prefix': 'IRC_OnUserSay'
'body': 'IRC_OnUserSay(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNotice':
'prefix': 'IRC_OnUserNotice'
'body': 'IRC_OnUserNotice(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserRequestCTCP':
'prefix': 'IRC_OnUserRequestCTCP'
'body': 'IRC_OnUserRequestCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserReplyCTCP':
'prefix': 'IRC_OnUserReplyCTCP'
'body': 'IRC_OnUserReplyCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveNumeric':
'prefix': 'IRC_OnReceiveNumeric'
'body': 'IRC_OnReceiveNumeric(${1:botid}, ${2:numeric}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveRaw':
'prefix': 'IRC_OnReceiveRaw'
'body': 'IRC_OnReceiveRaw(${1:botid}, ${2:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsVoice':
'prefix': 'IRC_IsVoice'
'body': 'IRC_IsVoice(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsHalfop':
'prefix': 'IRC_IsHalfop'
'body': 'IRC_IsHalfop(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOp':
'prefix': 'IRC_IsOp'
'body': 'IRC_IsOp(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsAdmin':
'prefix': 'IRC_IsAdmin'
'body': 'IRC_IsAdmin(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOwner':
'prefix': 'IRC_IsOwner'
'body': 'IRC_IsOwner(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
| 156445 | # IRC Plugin Atom Snippets from Sublime Text Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by <NAME> "<NAME>" <NAME>.
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'IRC_Connect':
'prefix': 'IRC_Connect'
'body': 'IRC_Connect(${1:const server[]}, ${2:port}, ${3:const nickname[]}, ${4:const realname[]}, ${5:const username[]}, ${6:bool:ssl = false}, ${7:const localip[] = \"\"}, ${8:const serverpassword[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Quit':
'prefix': 'IRC_Quit'
'body': 'IRC_Quit(${1:botid}, ${2:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_JoinChannel':
'prefix': 'IRC_JoinChannel'
'body': 'IRC_JoinChannel(${1:botid}, ${2:const channel[]}, ${3:const key[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_PartChannel':
'prefix': 'IRC_PartChannel'
'body': 'IRC_PartChannel(${1:botid}, ${2:const channel[]}, ${3:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ChangeNick':
'prefix': 'IRC_ChangeNick'
'body': 'IRC_ChangeNick(${1:botid}, ${2:const nick[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetMode':
'prefix': 'IRC_SetMode'
'body': 'IRC_SetMode(${1:botid}, ${2:const target[]}, ${3:const mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Say':
'prefix': 'IRC_Say'
'body': 'IRC_Say(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Notice':
'prefix': 'IRC_Notice'
'body': 'IRC_Notice(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsUserOnChannel':
'prefix': 'IRC_IsUserOnChannel'
'body': 'IRC_IsUserOnChannel(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_InviteUser':
'prefix': 'IRC_InviteUser'
'body': 'IRC_InviteUser(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_KickUser':
'prefix': 'IRC_KickUser'
'body': 'IRC_KickUser(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetUserChannelMode':
'prefix': 'IRC_GetUserChannelMode'
'body': 'IRC_GetUserChannelMode(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:dest[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetChannelUserList':
'prefix': 'IRC_GetChannelUserList'
'body': 'IRC_GetChannelUserList(${1:botid}, ${2:const channel[]}, ${3:dest[]}, ${4:maxlength = sizeof dest})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetChannelTopic':
'prefix': 'IRC_SetChannelTopic'
'body': 'IRC_SetChannelTopic(${1:botid}, ${2:const channel[]}, ${3:const topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RequestCTCP':
'prefix': 'IRC_RequestCTCP'
'body': 'IRC_RequestCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ReplyCTCP':
'prefix': 'IRC_ReplyCTCP'
'body': 'IRC_ReplyCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SendRaw':
'prefix': 'IRC_SendRaw'
'body': 'IRC_SendRaw(${1:botid}, ${2:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_CreateGroup':
'prefix': 'IRC_CreateGroup'
'body': 'IRC_CreateGroup()'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_DestroyGroup':
'prefix': 'IRC_DestroyGroup'
'body': 'IRC_DestroyGroup(${1:groupid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_AddToGroup':
'prefix': 'IRC_AddToGroup'
'body': 'IRC_AddToGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RemoveFromGroup':
'prefix': 'IRC_RemoveFromGroup'
'body': 'IRC_RemoveFromGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupSay':
'prefix': 'IRC_GroupSay'
'body': 'IRC_GroupSay(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupNotice':
'prefix': 'IRC_GroupNotice'
'body': 'IRC_GroupNotice(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetIntData':
'prefix': 'IRC_SetIntData'
'body': 'IRC_SetIntData(${1:botid}, ${2:data}, ${3:value})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnect':
'prefix': 'IRC_OnConnect'
'body': 'IRC_OnConnect(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnDisconnect':
'prefix': 'IRC_OnDisconnect'
'body': 'IRC_OnDisconnect(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttempt':
'prefix': 'IRC_OnConnectAttempt'
'body': 'IRC_OnConnectAttempt(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttemptFail':
'prefix': 'IRC_OnConnectAttemptFail'
'body': 'IRC_OnConnectAttemptFail(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnJoinChannel':
'prefix': 'IRC_OnJoinChannel'
'body': 'IRC_OnJoinChannel(${1:botid}, ${2:channel[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnLeaveChannel':
'prefix': 'IRC_OnLeaveChannel'
'body': 'IRC_OnLeaveChannel(${1:botid}, ${2:channel[]}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnInvitedToChannel':
'prefix': 'IRC_OnInvitedToChannel'
'body': 'IRC_OnInvitedToChannel(${1:botid}, ${2:channel[]}, ${3:invitinguser[]}, ${4:invitinghost[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnKickedFromChannel':
'prefix': 'IRC_OnKickedFromChannel'
'body': 'IRC_OnKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:oppeduser[]}, ${4:oppedhost[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserDisconnect':
'prefix': 'IRC_OnUserDisconnect'
'body': 'IRC_OnUserDisconnect(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserJoinChannel':
'prefix': 'IRC_OnUserJoinChannel'
'body': 'IRC_OnUserJoinChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserLeaveChannel':
'prefix': 'IRC_OnUserLeaveChannel'
'body': 'IRC_OnUserLeaveChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserKickedFromChannel':
'prefix': 'IRC_OnUserKickedFromChannel'
'body': 'IRC_OnUserKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:kickeduser[]}, ${4:oppeduser[]}, ${5:oppedhost[]}, ${6:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNickChange':
'prefix': 'IRC_OnUserNickChange'
'body': 'IRC_OnUserNickChange(${1:botid}, ${2:oldnick[]}, ${3:newnick[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelMode':
'prefix': 'IRC_OnUserSetChannelMode'
'body': 'IRC_OnUserSetChannelMode(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelTopic':
'prefix': 'IRC_OnUserSetChannelTopic'
'body': 'IRC_OnUserSetChannelTopic(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSay':
'prefix': 'IRC_OnUserSay'
'body': 'IRC_OnUserSay(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNotice':
'prefix': 'IRC_OnUserNotice'
'body': 'IRC_OnUserNotice(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserRequestCTCP':
'prefix': 'IRC_OnUserRequestCTCP'
'body': 'IRC_OnUserRequestCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserReplyCTCP':
'prefix': 'IRC_OnUserReplyCTCP'
'body': 'IRC_OnUserReplyCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveNumeric':
'prefix': 'IRC_OnReceiveNumeric'
'body': 'IRC_OnReceiveNumeric(${1:botid}, ${2:numeric}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveRaw':
'prefix': 'IRC_OnReceiveRaw'
'body': 'IRC_OnReceiveRaw(${1:botid}, ${2:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsVoice':
'prefix': 'IRC_IsVoice'
'body': 'IRC_IsVoice(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsHalfop':
'prefix': 'IRC_IsHalfop'
'body': 'IRC_IsHalfop(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOp':
'prefix': 'IRC_IsOp'
'body': 'IRC_IsOp(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsAdmin':
'prefix': 'IRC_IsAdmin'
'body': 'IRC_IsAdmin(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOwner':
'prefix': 'IRC_IsOwner'
'body': 'IRC_IsOwner(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
| true | # IRC Plugin Atom Snippets from Sublime Text Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by PI:NAME:<NAME>END_PI "PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI.
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'IRC_Connect':
'prefix': 'IRC_Connect'
'body': 'IRC_Connect(${1:const server[]}, ${2:port}, ${3:const nickname[]}, ${4:const realname[]}, ${5:const username[]}, ${6:bool:ssl = false}, ${7:const localip[] = \"\"}, ${8:const serverpassword[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Quit':
'prefix': 'IRC_Quit'
'body': 'IRC_Quit(${1:botid}, ${2:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_JoinChannel':
'prefix': 'IRC_JoinChannel'
'body': 'IRC_JoinChannel(${1:botid}, ${2:const channel[]}, ${3:const key[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_PartChannel':
'prefix': 'IRC_PartChannel'
'body': 'IRC_PartChannel(${1:botid}, ${2:const channel[]}, ${3:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ChangeNick':
'prefix': 'IRC_ChangeNick'
'body': 'IRC_ChangeNick(${1:botid}, ${2:const nick[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetMode':
'prefix': 'IRC_SetMode'
'body': 'IRC_SetMode(${1:botid}, ${2:const target[]}, ${3:const mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Say':
'prefix': 'IRC_Say'
'body': 'IRC_Say(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_Notice':
'prefix': 'IRC_Notice'
'body': 'IRC_Notice(${1:botid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsUserOnChannel':
'prefix': 'IRC_IsUserOnChannel'
'body': 'IRC_IsUserOnChannel(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_InviteUser':
'prefix': 'IRC_InviteUser'
'body': 'IRC_InviteUser(${1:botid}, ${2:const channel[]}, ${3:const user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_KickUser':
'prefix': 'IRC_KickUser'
'body': 'IRC_KickUser(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:const message[] = \"\"})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetUserChannelMode':
'prefix': 'IRC_GetUserChannelMode'
'body': 'IRC_GetUserChannelMode(${1:botid}, ${2:const channel[]}, ${3:const user[]}, ${4:dest[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GetChannelUserList':
'prefix': 'IRC_GetChannelUserList'
'body': 'IRC_GetChannelUserList(${1:botid}, ${2:const channel[]}, ${3:dest[]}, ${4:maxlength = sizeof dest})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetChannelTopic':
'prefix': 'IRC_SetChannelTopic'
'body': 'IRC_SetChannelTopic(${1:botid}, ${2:const channel[]}, ${3:const topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RequestCTCP':
'prefix': 'IRC_RequestCTCP'
'body': 'IRC_RequestCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_ReplyCTCP':
'prefix': 'IRC_ReplyCTCP'
'body': 'IRC_ReplyCTCP(${1:botid}, ${2:const user[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SendRaw':
'prefix': 'IRC_SendRaw'
'body': 'IRC_SendRaw(${1:botid}, ${2:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_CreateGroup':
'prefix': 'IRC_CreateGroup'
'body': 'IRC_CreateGroup()'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_DestroyGroup':
'prefix': 'IRC_DestroyGroup'
'body': 'IRC_DestroyGroup(${1:groupid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_AddToGroup':
'prefix': 'IRC_AddToGroup'
'body': 'IRC_AddToGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_RemoveFromGroup':
'prefix': 'IRC_RemoveFromGroup'
'body': 'IRC_RemoveFromGroup(${1:groupid}, ${2:botid})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupSay':
'prefix': 'IRC_GroupSay'
'body': 'IRC_GroupSay(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_GroupNotice':
'prefix': 'IRC_GroupNotice'
'body': 'IRC_GroupNotice(${1:groupid}, ${2:const target[]}, ${3:const message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_SetIntData':
'prefix': 'IRC_SetIntData'
'body': 'IRC_SetIntData(${1:botid}, ${2:data}, ${3:value})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnect':
'prefix': 'IRC_OnConnect'
'body': 'IRC_OnConnect(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnDisconnect':
'prefix': 'IRC_OnDisconnect'
'body': 'IRC_OnDisconnect(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttempt':
'prefix': 'IRC_OnConnectAttempt'
'body': 'IRC_OnConnectAttempt(${1:botid}, ${2:ip[]}, ${3:port})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnConnectAttemptFail':
'prefix': 'IRC_OnConnectAttemptFail'
'body': 'IRC_OnConnectAttemptFail(${1:botid}, ${2:ip[]}, ${3:port}, ${4:reason[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnJoinChannel':
'prefix': 'IRC_OnJoinChannel'
'body': 'IRC_OnJoinChannel(${1:botid}, ${2:channel[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnLeaveChannel':
'prefix': 'IRC_OnLeaveChannel'
'body': 'IRC_OnLeaveChannel(${1:botid}, ${2:channel[]}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnInvitedToChannel':
'prefix': 'IRC_OnInvitedToChannel'
'body': 'IRC_OnInvitedToChannel(${1:botid}, ${2:channel[]}, ${3:invitinguser[]}, ${4:invitinghost[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnKickedFromChannel':
'prefix': 'IRC_OnKickedFromChannel'
'body': 'IRC_OnKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:oppeduser[]}, ${4:oppedhost[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserDisconnect':
'prefix': 'IRC_OnUserDisconnect'
'body': 'IRC_OnUserDisconnect(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserJoinChannel':
'prefix': 'IRC_OnUserJoinChannel'
'body': 'IRC_OnUserJoinChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserLeaveChannel':
'prefix': 'IRC_OnUserLeaveChannel'
'body': 'IRC_OnUserLeaveChannel(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserKickedFromChannel':
'prefix': 'IRC_OnUserKickedFromChannel'
'body': 'IRC_OnUserKickedFromChannel(${1:botid}, ${2:channel[]}, ${3:kickeduser[]}, ${4:oppeduser[]}, ${5:oppedhost[]}, ${6:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNickChange':
'prefix': 'IRC_OnUserNickChange'
'body': 'IRC_OnUserNickChange(${1:botid}, ${2:oldnick[]}, ${3:newnick[]}, ${4:host[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelMode':
'prefix': 'IRC_OnUserSetChannelMode'
'body': 'IRC_OnUserSetChannelMode(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:mode[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSetChannelTopic':
'prefix': 'IRC_OnUserSetChannelTopic'
'body': 'IRC_OnUserSetChannelTopic(${1:botid}, ${2:channel[]}, ${3:user[]}, ${4:host[]}, ${5:topic[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserSay':
'prefix': 'IRC_OnUserSay'
'body': 'IRC_OnUserSay(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserNotice':
'prefix': 'IRC_OnUserNotice'
'body': 'IRC_OnUserNotice(${1:botid}, ${2:recipient[]}, ${3:user[]}, ${4:host[]}, ${5:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserRequestCTCP':
'prefix': 'IRC_OnUserRequestCTCP'
'body': 'IRC_OnUserRequestCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnUserReplyCTCP':
'prefix': 'IRC_OnUserReplyCTCP'
'body': 'IRC_OnUserReplyCTCP(${1:botid}, ${2:user[]}, ${3:host[]}, ${4:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveNumeric':
'prefix': 'IRC_OnReceiveNumeric'
'body': 'IRC_OnReceiveNumeric(${1:botid}, ${2:numeric}, ${3:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_OnReceiveRaw':
'prefix': 'IRC_OnReceiveRaw'
'body': 'IRC_OnReceiveRaw(${1:botid}, ${2:message[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsVoice':
'prefix': 'IRC_IsVoice'
'body': 'IRC_IsVoice(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsHalfop':
'prefix': 'IRC_IsHalfop'
'body': 'IRC_IsHalfop(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOp':
'prefix': 'IRC_IsOp'
'body': 'IRC_IsOp(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsAdmin':
'prefix': 'IRC_IsAdmin'
'body': 'IRC_IsAdmin(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
'IRC_IsOwner':
'prefix': 'IRC_IsOwner'
'body': 'IRC_IsOwner(${1:botid}, ${2:channel[]}, ${3:user[]})'
'description': 'Function from: IRC Plugin'
'descriptionMoreURL': 'http://forum.sa-mp.com/showthread.php?t=98803'
|
[
{
"context": "[]\n\nscenarios.push new Scenario \n id: 0\n name: 'foo1'\n code: 'foo = \"fo222o\"'\n leadspace: ''\n\nscena",
"end": 199,
"score": 0.4111767113208771,
"start": 196,
"tag": "NAME",
"value": "foo"
},
{
"context": "\nscenarios.push new Scenario \n id: 0\n name: 'foo1'\n code: 'foo = \"fo222o\"'\n leadspace: ''\n\nscenar",
"end": 200,
"score": 0.553105890750885,
"start": 199,
"tag": "USERNAME",
"value": "1"
},
{
"context": "\nscenarios.push new Scenario \n id: 1\n name: 'foo2'\n code: 'foo2 = (foo) ->'\n leadspace: ''\n\nscena",
"end": 294,
"score": 0.4963388442993164,
"start": 293,
"tag": "USERNAME",
"value": "2"
},
{
"context": "''\n\nscenarios.push new Scenario \n id: 2\n name: 'foo3'\n code: 'foo3 = foo'\n leadspace: ' '\n\n#>> Whe",
"end": 388,
"score": 0.46920549869537354,
"start": 385,
"tag": "NAME",
"value": "foo"
},
{
"context": "\nscenarios.push new Scenario \n id: 2\n name: 'foo3'\n code: 'foo3 = foo'\n leadspace: ' '\n\n#>> When",
"end": 389,
"score": 0.5985579490661621,
"start": 388,
"tag": "USERNAME",
"value": "3"
}
] | domain/specifications/services/specs/buildTest.spec.coffee | wearefractal/spex | 0 | #>> Setup
require 'should'
buildTest = require '../buildTest'
Scenario = require '../../models/Scenario'
#>> Given some Scenarios
scenarios = []
scenarios.push new Scenario
id: 0
name: 'foo1'
code: 'foo = "fo222o"'
leadspace: ''
scenarios.push new Scenario
id: 1
name: 'foo2'
code: 'foo2 = (foo) ->'
leadspace: ''
scenarios.push new Scenario
id: 2
name: 'foo3'
code: 'foo3 = foo'
leadspace: ' '
#>> When I run buildTest
buildTest scenarios, (testCode) ->
#>> Then
testCode.should.equal
'''
var foo, foo2;
Error.prototype.spexScenarioId = 0;
foo = "fo222o";
spex.pass(0);
Error.prototype.spexScenarioId = 1;
foo2 = function(foo) {
var foo3;
spex.pass(1);
Error.prototype.spexScenarioId = 2;
foo3 = foo;
return spex.pass(2);
};
'''
| 120824 | #>> Setup
require 'should'
buildTest = require '../buildTest'
Scenario = require '../../models/Scenario'
#>> Given some Scenarios
scenarios = []
scenarios.push new Scenario
id: 0
name: '<NAME>1'
code: 'foo = "fo222o"'
leadspace: ''
scenarios.push new Scenario
id: 1
name: 'foo2'
code: 'foo2 = (foo) ->'
leadspace: ''
scenarios.push new Scenario
id: 2
name: '<NAME>3'
code: 'foo3 = foo'
leadspace: ' '
#>> When I run buildTest
buildTest scenarios, (testCode) ->
#>> Then
testCode.should.equal
'''
var foo, foo2;
Error.prototype.spexScenarioId = 0;
foo = "fo222o";
spex.pass(0);
Error.prototype.spexScenarioId = 1;
foo2 = function(foo) {
var foo3;
spex.pass(1);
Error.prototype.spexScenarioId = 2;
foo3 = foo;
return spex.pass(2);
};
'''
| true | #>> Setup
require 'should'
buildTest = require '../buildTest'
Scenario = require '../../models/Scenario'
#>> Given some Scenarios
scenarios = []
scenarios.push new Scenario
id: 0
name: 'PI:NAME:<NAME>END_PI1'
code: 'foo = "fo222o"'
leadspace: ''
scenarios.push new Scenario
id: 1
name: 'foo2'
code: 'foo2 = (foo) ->'
leadspace: ''
scenarios.push new Scenario
id: 2
name: 'PI:NAME:<NAME>END_PI3'
code: 'foo3 = foo'
leadspace: ' '
#>> When I run buildTest
buildTest scenarios, (testCode) ->
#>> Then
testCode.should.equal
'''
var foo, foo2;
Error.prototype.spexScenarioId = 0;
foo = "fo222o";
spex.pass(0);
Error.prototype.spexScenarioId = 1;
foo2 = function(foo) {
var foo3;
spex.pass(1);
Error.prototype.spexScenarioId = 2;
foo3 = foo;
return spex.pass(2);
};
'''
|
[
{
"context": "ship (belongsTo)', (done) ->\n related_key = 'reverses'\n related_id_accessor = 'reverse_ids'\n\n ",
"end": 6300,
"score": 0.9583751559257507,
"start": 6292,
"tag": "KEY",
"value": "reverses"
}
] | test/spec/node/migrations/directory.node.tests.coffee | dk-dev/backbone-orm | 54 | path = require 'path'
assert = assert or require?('chai').assert
NodeUtils = require '../../../lib/node_utils'
BackboneORM = window?.BackboneORM; try BackboneORM or= require?('backbone-orm') catch; try BackboneORM or= require?('../../../../backbone-orm')
{_, Backbone, Queue, Utils, JSONUtils, Fabricator} = BackboneORM
_.each BackboneORM.TestUtils.optionSets(), exports = (options) ->
options = _.extend({}, options, __test__parameters) if __test__parameters?
DATABASE_URL = options.database_url or ''
BASE_SCHEMA = options.schema or {}
SYNC = options.sync
BASE_COUNT = 5
describe "Node: Many to Many with resetSchemasByDirectory #{options.$parameter_tags or ''}#{options.$tags}", ->
Owner = Reverse = null
before ->
BackboneORM.configure {model_cache: {enabled: !!options.cache, max: 100}}
# manually clear the cache so the model can be rebootstrapped
delete require.cache[require.resolve('./directory/folder/reverse')]
delete require.cache[require.resolve('./directory/owner')]
Reverse = require './directory/folder/reverse'
Owner = require './directory/owner'
# pre-configure
Reverse::urlRoot = "#{DATABASE_URL}/reverses"
Reverse::schema = _.defaults({
owners: -> ['hasMany', Owner]
}, BASE_SCHEMA)
Reverse::sync = SYNC(Reverse)
Owner::urlRoot = "#{DATABASE_URL}/owners"
Owner::schema = _.defaults({
reverses: -> ['hasMany', Reverse]
}, BASE_SCHEMA)
Owner::sync = SYNC(Owner)
after (callback) ->
NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
beforeEach (callback) ->
relation = Owner.relation('reverses')
delete relation.virtual
MODELS = {}
queue = new Queue(1)
queue.defer (callback) -> NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
queue.defer (callback) ->
create_queue = new Queue()
create_queue.defer (callback) -> Fabricator.create Reverse, 2*BASE_COUNT, {
name: Fabricator.uniqueId('reverses_')
created_at: Fabricator.date
}, (err, models) -> MODELS.reverse = models; callback(err)
create_queue.defer (callback) -> Fabricator.create Owner, BASE_COUNT, {
name: Fabricator.uniqueId('owners_')
created_at: Fabricator.date
}, (err, models) -> MODELS.owner = models; callback(err)
create_queue.await callback
# link and save all
queue.defer (callback) ->
save_queue = new Queue()
for owner in MODELS.owner
do (owner) -> save_queue.defer (callback) ->
owner.save {reverses: [MODELS.reverse.pop(), MODELS.reverse.pop()]}, callback
save_queue.await callback
queue.await callback
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverses: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverse_ids: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owners: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owner_ids: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and update the relationship (belongsTo)', (done) ->
related_key = 'reverses'
related_id_accessor = 'reverse_ids'
Owner.cursor().include(related_key).toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner_id = owner.id
relateds = owner.get(related_key).models
related_ids = (related.id for related in relateds)
assert.ok(2, relateds.length, "Loaded relateds. Expected: #{2}. Actual: #{relateds.length}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
(attributes = {})[related_key] = relateds
new_owner = new Owner(attributes)
owner1 = null; new_owner1 = null; new_owner_id = null
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
queue = new Queue(1)
queue.defer (callback) -> new_owner.save callback
queue.defer (callback) -> owner.save callback
# make sure nothing changed after save
queue.defer (callback) ->
new_owner_id = new_owner.id
assert.ok(new_owner_id, 'had an id after after')
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
callback()
# load
queue.defer (callback) -> Owner.find owner_id, (err, _owner) -> callback(err, owner1 = _owner)
queue.defer (callback) -> Owner.find new_owner_id, (err, _owner) -> callback(err, new_owner1 = _owner)
# check
queue.defer (callback) ->
owner1.get related_key, (err, relateds) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{owner1.get(related_id_accessor)}")
new_owner1.get related_key, (err, related) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, new_owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{new_owner1.get(related_id_accessor)}")
callback()
queue.await done
it 'Handles a get query for a hasMany and hasMany two sided relation', (done) ->
Owner.findOne (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
test_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses.length, 'found related reverses')
if test_model.relationIsEmbedded('reverses')
assert.deepEqual(test_model.toJSON().reverses[0], reverses[0].toJSON(), "Serialized embedded. Expected: #{test_model.toJSON().reverses}. Actual: #{reverses[0].toJSON()}")
else
assert.deepEqual(test_model.get('reverse_ids')[0], reverses[0].id, "Serialized id only. Expected: #{test_model.get('reverse_ids')[0]}. Actual: #{reverses[0].id}")
reverse = reverses[0]
reverse.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owners.length, 'found related models')
owner = _.find(owners, (test) -> test_model.id is test.id)
owner_index = _.indexOf(owners, owner)
if reverse.relationIsEmbedded('owners')
assert.deepEqual(reverse.toJSON().owner_ids[owner_index], owner.id, "Serialized embedded. Expected: #{reverse.toJSON().owner_ids[owner_index]}. Actual: #{owner.id}")
else
assert.deepEqual(reverse.get('owner_ids')[owner_index], owner.id, "Serialized id only. Expected: #{reverse.get('owner_ids')[owner_index]}. Actual: #{owner.id}")
assert.ok(!!owner, 'found owner')
if Owner.cache
assert.deepEqual(test_model.toJSON(), owner.toJSON(), "\nExpected: #{JSONUtils.stringify(test_model.toJSON())}\nActual: #{JSONUtils.stringify(test_model.toJSON())}")
else
assert.equal(test_model.id, owner.id, "\nExpected: #{test_model.id}\nActual: #{owner.id}")
done()
it 'Can include related (two-way hasMany) models', (done) ->
Owner.cursor({$one: true}).include('reverses').toJSON (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Can query on related (two-way hasMany) models', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.equal(json.length, 1, "Found the correct number of owners \nExpected: #{1}\nActual: #{json.length}")
done()
it 'Can query on related (two-way hasMany) models with included relations', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).include('reverses').toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Clears its reverse relations on delete when the reverse relation is loaded', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Clears its reverse relations on delete when the reverse relation isnt loaded (one-way hasMany)', (done) ->
Owner.cursor().toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Can query on a ManyToMany relation by related id', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id}).toModels (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found models')
assert.equal(reverses.length, 2, "Found the correct number of reverses\n expected: #{2}, actual: #{reverses.length}")
done()
it 'Should be able to count relationships', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.count {owner_id: owner.id}, (err, count) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, count, "Counted reverses. Expected: 2. Actual: #{count}")
done()
it 'Should be able to count relationships with paging', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id, $page: true}).toJSON (err, paging_info) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, paging_info.offset, "Has offset. Expected: 0. Actual: #{paging_info.offset}")
assert.equal(2, paging_info.total_rows, "Counted reverses. Expected: 2. Actual: #{paging_info.total_rows}")
done()
backlinkTests = (virtual) ->
it "Should update backlinks using set (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
new_reverses0 = [reverses0[0], reverses1[0]]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
owner0.set({reverses: new_reverses0})
queue.defer checkReverseFn(new_reverses0, owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(0, reverses0[1].get('owners').models.length, "Reverse0_1 has no owners.\nExpected: #{0}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
in_0 = _.find(reverses0, (test) -> test.id is reverse.id)
in_new = _.find(new_reverses0, (test) -> test.id is reverse.id)
if in_0
return if in_new then 1 else 0
else
return if in_new then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b (#{reverses0b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b (#{reverses0b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b (#{reverses1b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b (#{reverses1b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
it "Should update backlinks using the collection directly (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
shared_reverse0 = reverses1[0]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
reverses = owner0.get('reverses')
reverses.add(shared_reverse0)
queue.defer checkReverseFn([shared_reverse0], owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(3, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
if virtual # doesn't save
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
else
assert.equal(3, owner0.get('reverses').models.length, "Owner0b has 3 reverses.\nExpected: #{3}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
return if shared_reverse0.id is reverse.id then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
# TODO: get these working again
# backlinkTests(false)
# backlinkTests(true)
it 'does not serialize virtual attributes', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'Reverse found model')
assert.equal(2, owner.get('reverses').length, "Virtual flat exists. Expected: #{2}. Actual: #{owner.get('reverses').length}")
relation = owner.relation('reverses')
relation.virtual = true
reverses = owner.get('reverses')
owner.set({reverses: []})
owner.save {reverses: reverses}, (err) ->
assert.ok(!err, "No errors: #{err}")
Owner.cache.reset(owner.id) if Owner.cache
Owner.find owner.id, (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, owner.get('reverses').length, "Virtual flat is not saved. Expected: #{0}. Actual: #{owner.get('reverses').length}")
done()
| 70107 | path = require 'path'
assert = assert or require?('chai').assert
NodeUtils = require '../../../lib/node_utils'
BackboneORM = window?.BackboneORM; try BackboneORM or= require?('backbone-orm') catch; try BackboneORM or= require?('../../../../backbone-orm')
{_, Backbone, Queue, Utils, JSONUtils, Fabricator} = BackboneORM
_.each BackboneORM.TestUtils.optionSets(), exports = (options) ->
options = _.extend({}, options, __test__parameters) if __test__parameters?
DATABASE_URL = options.database_url or ''
BASE_SCHEMA = options.schema or {}
SYNC = options.sync
BASE_COUNT = 5
describe "Node: Many to Many with resetSchemasByDirectory #{options.$parameter_tags or ''}#{options.$tags}", ->
Owner = Reverse = null
before ->
BackboneORM.configure {model_cache: {enabled: !!options.cache, max: 100}}
# manually clear the cache so the model can be rebootstrapped
delete require.cache[require.resolve('./directory/folder/reverse')]
delete require.cache[require.resolve('./directory/owner')]
Reverse = require './directory/folder/reverse'
Owner = require './directory/owner'
# pre-configure
Reverse::urlRoot = "#{DATABASE_URL}/reverses"
Reverse::schema = _.defaults({
owners: -> ['hasMany', Owner]
}, BASE_SCHEMA)
Reverse::sync = SYNC(Reverse)
Owner::urlRoot = "#{DATABASE_URL}/owners"
Owner::schema = _.defaults({
reverses: -> ['hasMany', Reverse]
}, BASE_SCHEMA)
Owner::sync = SYNC(Owner)
after (callback) ->
NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
beforeEach (callback) ->
relation = Owner.relation('reverses')
delete relation.virtual
MODELS = {}
queue = new Queue(1)
queue.defer (callback) -> NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
queue.defer (callback) ->
create_queue = new Queue()
create_queue.defer (callback) -> Fabricator.create Reverse, 2*BASE_COUNT, {
name: Fabricator.uniqueId('reverses_')
created_at: Fabricator.date
}, (err, models) -> MODELS.reverse = models; callback(err)
create_queue.defer (callback) -> Fabricator.create Owner, BASE_COUNT, {
name: Fabricator.uniqueId('owners_')
created_at: Fabricator.date
}, (err, models) -> MODELS.owner = models; callback(err)
create_queue.await callback
# link and save all
queue.defer (callback) ->
save_queue = new Queue()
for owner in MODELS.owner
do (owner) -> save_queue.defer (callback) ->
owner.save {reverses: [MODELS.reverse.pop(), MODELS.reverse.pop()]}, callback
save_queue.await callback
queue.await callback
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverses: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverse_ids: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owners: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owner_ids: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and update the relationship (belongsTo)', (done) ->
related_key = '<KEY>'
related_id_accessor = 'reverse_ids'
Owner.cursor().include(related_key).toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner_id = owner.id
relateds = owner.get(related_key).models
related_ids = (related.id for related in relateds)
assert.ok(2, relateds.length, "Loaded relateds. Expected: #{2}. Actual: #{relateds.length}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
(attributes = {})[related_key] = relateds
new_owner = new Owner(attributes)
owner1 = null; new_owner1 = null; new_owner_id = null
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
queue = new Queue(1)
queue.defer (callback) -> new_owner.save callback
queue.defer (callback) -> owner.save callback
# make sure nothing changed after save
queue.defer (callback) ->
new_owner_id = new_owner.id
assert.ok(new_owner_id, 'had an id after after')
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
callback()
# load
queue.defer (callback) -> Owner.find owner_id, (err, _owner) -> callback(err, owner1 = _owner)
queue.defer (callback) -> Owner.find new_owner_id, (err, _owner) -> callback(err, new_owner1 = _owner)
# check
queue.defer (callback) ->
owner1.get related_key, (err, relateds) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{owner1.get(related_id_accessor)}")
new_owner1.get related_key, (err, related) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, new_owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{new_owner1.get(related_id_accessor)}")
callback()
queue.await done
it 'Handles a get query for a hasMany and hasMany two sided relation', (done) ->
Owner.findOne (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
test_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses.length, 'found related reverses')
if test_model.relationIsEmbedded('reverses')
assert.deepEqual(test_model.toJSON().reverses[0], reverses[0].toJSON(), "Serialized embedded. Expected: #{test_model.toJSON().reverses}. Actual: #{reverses[0].toJSON()}")
else
assert.deepEqual(test_model.get('reverse_ids')[0], reverses[0].id, "Serialized id only. Expected: #{test_model.get('reverse_ids')[0]}. Actual: #{reverses[0].id}")
reverse = reverses[0]
reverse.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owners.length, 'found related models')
owner = _.find(owners, (test) -> test_model.id is test.id)
owner_index = _.indexOf(owners, owner)
if reverse.relationIsEmbedded('owners')
assert.deepEqual(reverse.toJSON().owner_ids[owner_index], owner.id, "Serialized embedded. Expected: #{reverse.toJSON().owner_ids[owner_index]}. Actual: #{owner.id}")
else
assert.deepEqual(reverse.get('owner_ids')[owner_index], owner.id, "Serialized id only. Expected: #{reverse.get('owner_ids')[owner_index]}. Actual: #{owner.id}")
assert.ok(!!owner, 'found owner')
if Owner.cache
assert.deepEqual(test_model.toJSON(), owner.toJSON(), "\nExpected: #{JSONUtils.stringify(test_model.toJSON())}\nActual: #{JSONUtils.stringify(test_model.toJSON())}")
else
assert.equal(test_model.id, owner.id, "\nExpected: #{test_model.id}\nActual: #{owner.id}")
done()
it 'Can include related (two-way hasMany) models', (done) ->
Owner.cursor({$one: true}).include('reverses').toJSON (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Can query on related (two-way hasMany) models', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.equal(json.length, 1, "Found the correct number of owners \nExpected: #{1}\nActual: #{json.length}")
done()
it 'Can query on related (two-way hasMany) models with included relations', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).include('reverses').toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Clears its reverse relations on delete when the reverse relation is loaded', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Clears its reverse relations on delete when the reverse relation isnt loaded (one-way hasMany)', (done) ->
Owner.cursor().toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Can query on a ManyToMany relation by related id', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id}).toModels (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found models')
assert.equal(reverses.length, 2, "Found the correct number of reverses\n expected: #{2}, actual: #{reverses.length}")
done()
it 'Should be able to count relationships', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.count {owner_id: owner.id}, (err, count) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, count, "Counted reverses. Expected: 2. Actual: #{count}")
done()
it 'Should be able to count relationships with paging', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id, $page: true}).toJSON (err, paging_info) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, paging_info.offset, "Has offset. Expected: 0. Actual: #{paging_info.offset}")
assert.equal(2, paging_info.total_rows, "Counted reverses. Expected: 2. Actual: #{paging_info.total_rows}")
done()
backlinkTests = (virtual) ->
it "Should update backlinks using set (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
new_reverses0 = [reverses0[0], reverses1[0]]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
owner0.set({reverses: new_reverses0})
queue.defer checkReverseFn(new_reverses0, owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(0, reverses0[1].get('owners').models.length, "Reverse0_1 has no owners.\nExpected: #{0}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
in_0 = _.find(reverses0, (test) -> test.id is reverse.id)
in_new = _.find(new_reverses0, (test) -> test.id is reverse.id)
if in_0
return if in_new then 1 else 0
else
return if in_new then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b (#{reverses0b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b (#{reverses0b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b (#{reverses1b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b (#{reverses1b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
it "Should update backlinks using the collection directly (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
shared_reverse0 = reverses1[0]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
reverses = owner0.get('reverses')
reverses.add(shared_reverse0)
queue.defer checkReverseFn([shared_reverse0], owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(3, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
if virtual # doesn't save
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
else
assert.equal(3, owner0.get('reverses').models.length, "Owner0b has 3 reverses.\nExpected: #{3}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
return if shared_reverse0.id is reverse.id then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
# TODO: get these working again
# backlinkTests(false)
# backlinkTests(true)
it 'does not serialize virtual attributes', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'Reverse found model')
assert.equal(2, owner.get('reverses').length, "Virtual flat exists. Expected: #{2}. Actual: #{owner.get('reverses').length}")
relation = owner.relation('reverses')
relation.virtual = true
reverses = owner.get('reverses')
owner.set({reverses: []})
owner.save {reverses: reverses}, (err) ->
assert.ok(!err, "No errors: #{err}")
Owner.cache.reset(owner.id) if Owner.cache
Owner.find owner.id, (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, owner.get('reverses').length, "Virtual flat is not saved. Expected: #{0}. Actual: #{owner.get('reverses').length}")
done()
| true | path = require 'path'
assert = assert or require?('chai').assert
NodeUtils = require '../../../lib/node_utils'
BackboneORM = window?.BackboneORM; try BackboneORM or= require?('backbone-orm') catch; try BackboneORM or= require?('../../../../backbone-orm')
{_, Backbone, Queue, Utils, JSONUtils, Fabricator} = BackboneORM
_.each BackboneORM.TestUtils.optionSets(), exports = (options) ->
options = _.extend({}, options, __test__parameters) if __test__parameters?
DATABASE_URL = options.database_url or ''
BASE_SCHEMA = options.schema or {}
SYNC = options.sync
BASE_COUNT = 5
describe "Node: Many to Many with resetSchemasByDirectory #{options.$parameter_tags or ''}#{options.$tags}", ->
Owner = Reverse = null
before ->
BackboneORM.configure {model_cache: {enabled: !!options.cache, max: 100}}
# manually clear the cache so the model can be rebootstrapped
delete require.cache[require.resolve('./directory/folder/reverse')]
delete require.cache[require.resolve('./directory/owner')]
Reverse = require './directory/folder/reverse'
Owner = require './directory/owner'
# pre-configure
Reverse::urlRoot = "#{DATABASE_URL}/reverses"
Reverse::schema = _.defaults({
owners: -> ['hasMany', Owner]
}, BASE_SCHEMA)
Reverse::sync = SYNC(Reverse)
Owner::urlRoot = "#{DATABASE_URL}/owners"
Owner::schema = _.defaults({
reverses: -> ['hasMany', Reverse]
}, BASE_SCHEMA)
Owner::sync = SYNC(Owner)
after (callback) ->
NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
beforeEach (callback) ->
relation = Owner.relation('reverses')
delete relation.virtual
MODELS = {}
queue = new Queue(1)
queue.defer (callback) -> NodeUtils.resetSchemasByDirectory path.join(__dirname, 'directory'), callback
queue.defer (callback) ->
create_queue = new Queue()
create_queue.defer (callback) -> Fabricator.create Reverse, 2*BASE_COUNT, {
name: Fabricator.uniqueId('reverses_')
created_at: Fabricator.date
}, (err, models) -> MODELS.reverse = models; callback(err)
create_queue.defer (callback) -> Fabricator.create Owner, BASE_COUNT, {
name: Fabricator.uniqueId('owners_')
created_at: Fabricator.date
}, (err, models) -> MODELS.owner = models; callback(err)
create_queue.await callback
# link and save all
queue.defer (callback) ->
save_queue = new Queue()
for owner in MODELS.owner
do (owner) -> save_queue.defer (callback) ->
owner.save {reverses: [MODELS.reverse.pop(), MODELS.reverse.pop()]}, callback
save_queue.await callback
queue.await callback
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverses: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (hasMany)', (done) ->
Reverse.cursor({$values: 'id'}).limit(4).toJSON (err, reverse_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverse_ids.length, "found 4 reverses. Actual: #{reverse_ids.length}")
new_model = new Owner()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({reverse_ids: reverse_ids})
new_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, reverses.length, "found 4 related model. Actual: #{reverses.length}")
assert.equal(_.difference(reverse_ids, (test.id for test in reverses)).length, 0, "expected owners: #{_.difference(reverse_ids, (test.id for test in reverses))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owners: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and load a related model by id (belongsTo)', (done) ->
Owner.cursor({$values: 'id'}).limit(4).toJSON (err, owner_ids) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owner_ids.length, "found 4 owners. Actual: #{owner_ids.length}")
new_model = new Reverse()
new_model.save (err) ->
assert.ok(!err, "No errors: #{err}")
new_model.set({owner_ids: owner_ids})
new_model.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(4, owners.length, "loaded correct model. Expected: #{4}. Actual: #{owners.length}")
assert.equal(_.difference(owner_ids, (test.id for test in owners)).length, 0, "expected owners: #{_.difference(owner_ids, (owner.id for owner in owners))}")
done()
it 'Can create a model and update the relationship (belongsTo)', (done) ->
related_key = 'PI:KEY:<KEY>END_PI'
related_id_accessor = 'reverse_ids'
Owner.cursor().include(related_key).toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner_id = owner.id
relateds = owner.get(related_key).models
related_ids = (related.id for related in relateds)
assert.ok(2, relateds.length, "Loaded relateds. Expected: #{2}. Actual: #{relateds.length}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
(attributes = {})[related_key] = relateds
new_owner = new Owner(attributes)
owner1 = null; new_owner1 = null; new_owner_id = null
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
queue = new Queue(1)
queue.defer (callback) -> new_owner.save callback
queue.defer (callback) -> owner.save callback
# make sure nothing changed after save
queue.defer (callback) ->
new_owner_id = new_owner.id
assert.ok(new_owner_id, 'had an id after after')
assert.ok(!_.difference(related_ids, (related.id for related in owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, owner.get(related_id_accessor)).length, "Got related_id from previous related. Expected: #{related_ids}. Actual: #{owner.get(related_id_accessor)}")
assert.ok(!_.difference(related_ids, (related.id for related in new_owner.get(related_key).models)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in new_owner.get(related_key).models)}")
assert.ok(!_.difference(related_ids, new_owner.get(related_id_accessor)).length, "Got related_id from copied related. Expected: #{related_ids}. Actual: #{new_owner.get(related_id_accessor)}")
callback()
# load
queue.defer (callback) -> Owner.find owner_id, (err, _owner) -> callback(err, owner1 = _owner)
queue.defer (callback) -> Owner.find new_owner_id, (err, _owner) -> callback(err, new_owner1 = _owner)
# check
queue.defer (callback) ->
owner1.get related_key, (err, relateds) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{owner1.get(related_id_accessor)}")
new_owner1.get related_key, (err, related) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(!_.difference(related_ids, (related.id for related in relateds)).length, "Loaded related from previous related. Expected: #{related_ids}. Actual: #{(related.id for related in relateds)}")
assert.ok(!_.difference(related_ids, new_owner1.get(related_id_accessor)).length, "Got related_id from reloaded previous related. Expected: #{related_ids}. Actual: #{new_owner1.get(related_id_accessor)}")
callback()
queue.await done
it 'Handles a get query for a hasMany and hasMany two sided relation', (done) ->
Owner.findOne (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
test_model.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses.length, 'found related reverses')
if test_model.relationIsEmbedded('reverses')
assert.deepEqual(test_model.toJSON().reverses[0], reverses[0].toJSON(), "Serialized embedded. Expected: #{test_model.toJSON().reverses}. Actual: #{reverses[0].toJSON()}")
else
assert.deepEqual(test_model.get('reverse_ids')[0], reverses[0].id, "Serialized id only. Expected: #{test_model.get('reverse_ids')[0]}. Actual: #{reverses[0].id}")
reverse = reverses[0]
reverse.get 'owners', (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owners.length, 'found related models')
owner = _.find(owners, (test) -> test_model.id is test.id)
owner_index = _.indexOf(owners, owner)
if reverse.relationIsEmbedded('owners')
assert.deepEqual(reverse.toJSON().owner_ids[owner_index], owner.id, "Serialized embedded. Expected: #{reverse.toJSON().owner_ids[owner_index]}. Actual: #{owner.id}")
else
assert.deepEqual(reverse.get('owner_ids')[owner_index], owner.id, "Serialized id only. Expected: #{reverse.get('owner_ids')[owner_index]}. Actual: #{owner.id}")
assert.ok(!!owner, 'found owner')
if Owner.cache
assert.deepEqual(test_model.toJSON(), owner.toJSON(), "\nExpected: #{JSONUtils.stringify(test_model.toJSON())}\nActual: #{JSONUtils.stringify(test_model.toJSON())}")
else
assert.equal(test_model.id, owner.id, "\nExpected: #{test_model.id}\nActual: #{owner.id}")
done()
it 'Can include related (two-way hasMany) models', (done) ->
Owner.cursor({$one: true}).include('reverses').toJSON (err, test_model) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Can query on related (two-way hasMany) models', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.equal(json.length, 1, "Found the correct number of owners \nExpected: #{1}\nActual: #{json.length}")
done()
it 'Can query on related (two-way hasMany) models with included relations', (done) ->
Reverse.findOne (err, reverse) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverse, 'found model')
Owner.cursor({'reverses.name': reverse.get('name')}).include('reverses').toJSON (err, json) ->
test_model = json[0]
assert.ok(!err, "No errors: #{err}")
assert.ok(test_model, 'found model')
assert.ok(test_model.reverses, 'Has related reverses')
assert.equal(test_model.reverses.length, 2, "Has the correct number of related reverses \nExpected: #{2}\nActual: #{test_model.reverses.length}")
done()
it 'Clears its reverse relations on delete when the reverse relation is loaded', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Clears its reverse relations on delete when the reverse relation isnt loaded (one-way hasMany)', (done) ->
Owner.cursor().toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
owner.get 'reverses', (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found model')
owner.destroy (err, owner) ->
assert.ok(!err, "No errors: #{err}")
Owner.relation('reverses').join_table.find {owner_id: owner.id}, (err, null_reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(null_reverses.length, 0, 'No reverses found for this owner after save')
done()
it 'Can query on a ManyToMany relation by related id', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id}).toModels (err, reverses) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(reverses, 'found models')
assert.equal(reverses.length, 2, "Found the correct number of reverses\n expected: #{2}, actual: #{reverses.length}")
done()
it 'Should be able to count relationships', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.count {owner_id: owner.id}, (err, count) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, count, "Counted reverses. Expected: 2. Actual: #{count}")
done()
it 'Should be able to count relationships with paging', (done) ->
Owner.findOne (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'found model')
Reverse.cursor({owner_id: owner.id, $page: true}).toJSON (err, paging_info) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, paging_info.offset, "Has offset. Expected: 0. Actual: #{paging_info.offset}")
assert.equal(2, paging_info.total_rows, "Counted reverses. Expected: 2. Actual: #{paging_info.total_rows}")
done()
backlinkTests = (virtual) ->
it "Should update backlinks using set (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
new_reverses0 = [reverses0[0], reverses1[0]]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
owner0.set({reverses: new_reverses0})
queue.defer checkReverseFn(new_reverses0, owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(0, reverses0[1].get('owners').models.length, "Reverse0_1 has no owners.\nExpected: #{0}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
in_0 = _.find(reverses0, (test) -> test.id is reverse.id)
in_new = _.find(new_reverses0, (test) -> test.id is reverse.id)
if in_0
return if in_new then 1 else 0
else
return if in_new then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b (#{reverses0b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b (#{reverses0b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b (#{reverses1b[0].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b (#{reverses1b[1].id}) has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
it "Should update backlinks using the collection directly (#{if virtual then 'virtual' else 'no modifiers'})", (done) ->
checkReverseFn = (reverses, expected_owner) -> return (callback) ->
assert.ok(reverses, 'Reverses exists')
for reverse in reverses
assert.ok(_.contains(reverse.get('owners').models, expected_owner), "Reverse owner is in the list. Expected: #{expected_owner}. Actual: #{reverse.get('owners').models}")
callback()
Owner.cursor().limit(2).include('reverses').toModels (err, owners) ->
if virtual # set as virtual relationship after including reverse
relation = Owner.relation('reverses')
relation.virtual = true
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners. Expected: 2. Actual: #{owners.length}")
owner0 = owners[0]; owner0_id = owner0.id; reverses0 = _.clone(owner0.get('reverses').models); reverses0a = null; reverses0b = null
owner1 = owners[1]; owner1_id = owner1.id; reverses1 = _.clone(owner1.get('reverses').models); reverses1a = null; reverses1b = null
shared_reverse0 = reverses1[0]
queue = new Queue(1)
queue.defer checkReverseFn(reverses0, owner0)
queue.defer checkReverseFn(reverses1, owner1)
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(1, reverses1[0].get('owners').models.length, "Reverse1_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models.length)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
queue.defer (callback) ->
reverses = owner0.get('reverses')
reverses.add(shared_reverse0)
queue.defer checkReverseFn([shared_reverse0], owner0) # confirm it moved
queue.defer checkReverseFn(reverses1, owner1)
reverses0a = _.clone(owners[0].get('reverses').models)
reverses1a = _.clone(owners[1].get('reverses').models)
assert.equal(3, owner0.get('reverses').models.length, "Owner0 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1 has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
assert.equal(1, reverses0[0].get('owners').models.length, "Reverse0_0 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[0].get('owners').models.length)}")
assert.equal(1, reverses0[1].get('owners').models.length, "Reverse0_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owners').models.length)}")
assert.equal(2, reverses1[0].get('owners').models.length, "Reverse1_0 has 2 owners.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(reverses1[0].get('owners').models)}")
assert.equal(1, reverses1[1].get('owners').models.length, "Reverse1_1 has 1 owner.\nExpected: #{1}.\nActual: #{JSONUtils.stringify(reverses1[1].get('owners').models.length)}")
callback()
# save and recheck
queue.defer (callback) -> owner0.save callback
queue.defer (callback) -> owner1.save callback
queue.defer (callback) ->
Owner.cursor({$ids: [owner0.id, owner1.id]}).limit(2).include('reverses').toModels (err, owners) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(2, owners.length, "Found owners post-save. Expected: 2. Actual: #{owners.length}")
# lookup owners
owner0 = owner1 = null
for owner in owners
if owner.id is owner0_id
owner0 = owner
else if owner.id is owner1_id
owner1 = owner
assert(owner0, 'refound owner0')
assert(owner1, 'refound owner1')
reverses0b = _.clone(owner0.get('reverses').models)
reverses1b = _.clone(owner1.get('reverses').models)
if virtual # doesn't save
assert.equal(2, owner0.get('reverses').models.length, "Owner0b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
else
assert.equal(3, owner0.get('reverses').models.length, "Owner0b has 3 reverses.\nExpected: #{3}.\nActual: #{JSONUtils.stringify(owner0.get('reverses').models.length)}")
assert.equal(2, owner1.get('reverses').models.length, "Owner1b has 2 reverses.\nExpected: #{2}.\nActual: #{JSONUtils.stringify(owner1.get('reverses').models.length)}")
getReverseCount = (reverse) ->
return 1 if virtual
return if shared_reverse0.id is reverse.id then 2 else 1
queue.defer checkReverseFn(reverses0b, owner0) # confirm it moved
assert.equal(null, reverses0[1].get('owner'), "Reverse owner is cleared.\nExpected: #{null}.\nActual: #{JSONUtils.stringify(reverses0[1].get('owner'))}")
queue.defer checkReverseFn(reverses1b, owner1) # confirm it moved
assert.equal(getReverseCount(reverses0b[0]), reverses0b[0].get('owners').models.length, "Reverse0_0b has expected owners.\nExpected: #{getReverseCount(reverses0b[0])}.\nActual: #{JSONUtils.stringify(reverses0b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses0b[1]), reverses0b[1].get('owners').models.length, "Reverse0_1b has expected owners.\nExpected: #{getReverseCount(reverses0b[1])}.\nActual: #{JSONUtils.stringify(reverses0b[1].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[0]), reverses1b[0].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[0])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
assert.equal(getReverseCount(reverses1b[1]), reverses1b[1].get('owners').models.length, "Reverse1_0b has expected owners.\nExpected: #{getReverseCount(reverses1b[1])}.\nActual: #{JSONUtils.stringify(reverses1b[0].get('owners').models)}")
callback()
queue.await (err) ->
assert.ok(!err, "No errors: #{err}")
done()
# TODO: get these working again
# backlinkTests(false)
# backlinkTests(true)
it 'does not serialize virtual attributes', (done) ->
Owner.cursor().include('reverses').toModel (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.ok(owner, 'Reverse found model')
assert.equal(2, owner.get('reverses').length, "Virtual flat exists. Expected: #{2}. Actual: #{owner.get('reverses').length}")
relation = owner.relation('reverses')
relation.virtual = true
reverses = owner.get('reverses')
owner.set({reverses: []})
owner.save {reverses: reverses}, (err) ->
assert.ok(!err, "No errors: #{err}")
Owner.cache.reset(owner.id) if Owner.cache
Owner.find owner.id, (err, owner) ->
assert.ok(!err, "No errors: #{err}")
assert.equal(0, owner.get('reverses').length, "Virtual flat is not saved. Expected: #{0}. Actual: #{owner.get('reverses').length}")
done()
|
[
{
"context": "Start world\n Gotham.World.start()\n\n banList = [\"128.39.202.89\"]\n server = Gotham.SocketServer\n server.setBanl",
"end": 971,
"score": 0.9997466802597046,
"start": 958,
"tag": "IP_ADDRESS",
"value": "128.39.202.89"
}
] | GOTHAM/Backend/src/Main.coffee | perara/gotham | 0 | ##########################################################
##
## Require Stuff
##
##########################################################
## Third Party
performance = require 'performance-now'
log = require('log4js').getLogger("Main")
# Extensions
require './Tools/JSON.coffee'
# Gotham Party
SocketServer = require './Networking/SocketServer.coffee'
Database = require './Database/Database.coffee'
LocalDatabase = require './Database/LocalDatabase.coffee'
World = require './Objects/World/World.coffee'
#########################################################
##
## Global Scope
##
#########################################################
global.Gotham =
Database: new Database()
LocalDatabase: new LocalDatabase()
World: new World()
SocketServer: new SocketServer 8081
Micro: require './Objects/Traffic/Micro/Micro.coffee'
Util: require './Tools/Util.coffee'
startServer = () ->
# Start world
Gotham.World.start()
banList = ["128.39.202.89"]
server = Gotham.SocketServer
server.setBanlist banList
server.setDatabase Gotham.Database
server.registerRoom new (require './Networking/Rooms/HostRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/UserRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/WorldMapRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/GeneralRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/MissionRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/ShopRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/TracerouteRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/PingRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/AdministrationRoom.coffee')()
server.start()
server.onConnect = (_client) ->
log.info "[SERVER] Client Connected | #{_client.handshake.address} | #{_client.id}"
server.onDisconnect = (_client) ->
log.info "[SERVER] Client Disconnected | #{_client.handshake.address} | #{_client.id}"
preload = (_c) ->
start = performance()
Gotham.LocalDatabase.preload ->
log.info "Preload done in #{((performance() - start) / 1000).toFixed(2)} Seconds"
_c()
# Preload then start server
preload ->
startServer()
"""
db_host = Gotham.LocalDatabase.table("Host")
db_network = Gotham.LocalDatabase.table("Network")
source = db_host.findOne(id: 17)
target = db_network.findOne(id: 500)
session = new Gotham.Micro.Session(source, target, "ICMP")
session.addPacket(new Gotham.Micro.Packet("", true, 1, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 1, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 2, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 2, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 3, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 3, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 4, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 4, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 5, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 5, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 6, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 6, 0))
for key, val of session.nodeHeaders
console.log val
"""
| 84364 | ##########################################################
##
## Require Stuff
##
##########################################################
## Third Party
performance = require 'performance-now'
log = require('log4js').getLogger("Main")
# Extensions
require './Tools/JSON.coffee'
# Gotham Party
SocketServer = require './Networking/SocketServer.coffee'
Database = require './Database/Database.coffee'
LocalDatabase = require './Database/LocalDatabase.coffee'
World = require './Objects/World/World.coffee'
#########################################################
##
## Global Scope
##
#########################################################
global.Gotham =
Database: new Database()
LocalDatabase: new LocalDatabase()
World: new World()
SocketServer: new SocketServer 8081
Micro: require './Objects/Traffic/Micro/Micro.coffee'
Util: require './Tools/Util.coffee'
startServer = () ->
# Start world
Gotham.World.start()
banList = ["172.16.31.10"]
server = Gotham.SocketServer
server.setBanlist banList
server.setDatabase Gotham.Database
server.registerRoom new (require './Networking/Rooms/HostRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/UserRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/WorldMapRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/GeneralRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/MissionRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/ShopRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/TracerouteRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/PingRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/AdministrationRoom.coffee')()
server.start()
server.onConnect = (_client) ->
log.info "[SERVER] Client Connected | #{_client.handshake.address} | #{_client.id}"
server.onDisconnect = (_client) ->
log.info "[SERVER] Client Disconnected | #{_client.handshake.address} | #{_client.id}"
preload = (_c) ->
start = performance()
Gotham.LocalDatabase.preload ->
log.info "Preload done in #{((performance() - start) / 1000).toFixed(2)} Seconds"
_c()
# Preload then start server
preload ->
startServer()
"""
db_host = Gotham.LocalDatabase.table("Host")
db_network = Gotham.LocalDatabase.table("Network")
source = db_host.findOne(id: 17)
target = db_network.findOne(id: 500)
session = new Gotham.Micro.Session(source, target, "ICMP")
session.addPacket(new Gotham.Micro.Packet("", true, 1, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 1, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 2, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 2, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 3, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 3, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 4, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 4, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 5, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 5, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 6, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 6, 0))
for key, val of session.nodeHeaders
console.log val
"""
| true | ##########################################################
##
## Require Stuff
##
##########################################################
## Third Party
performance = require 'performance-now'
log = require('log4js').getLogger("Main")
# Extensions
require './Tools/JSON.coffee'
# Gotham Party
SocketServer = require './Networking/SocketServer.coffee'
Database = require './Database/Database.coffee'
LocalDatabase = require './Database/LocalDatabase.coffee'
World = require './Objects/World/World.coffee'
#########################################################
##
## Global Scope
##
#########################################################
global.Gotham =
Database: new Database()
LocalDatabase: new LocalDatabase()
World: new World()
SocketServer: new SocketServer 8081
Micro: require './Objects/Traffic/Micro/Micro.coffee'
Util: require './Tools/Util.coffee'
startServer = () ->
# Start world
Gotham.World.start()
banList = ["PI:IP_ADDRESS:172.16.31.10END_PI"]
server = Gotham.SocketServer
server.setBanlist banList
server.setDatabase Gotham.Database
server.registerRoom new (require './Networking/Rooms/HostRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/UserRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/WorldMapRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/GeneralRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/MissionRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/ShopRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/TracerouteRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/Applications/PingRoom.coffee')()
server.registerRoom new (require './Networking/Rooms/AdministrationRoom.coffee')()
server.start()
server.onConnect = (_client) ->
log.info "[SERVER] Client Connected | #{_client.handshake.address} | #{_client.id}"
server.onDisconnect = (_client) ->
log.info "[SERVER] Client Disconnected | #{_client.handshake.address} | #{_client.id}"
preload = (_c) ->
start = performance()
Gotham.LocalDatabase.preload ->
log.info "Preload done in #{((performance() - start) / 1000).toFixed(2)} Seconds"
_c()
# Preload then start server
preload ->
startServer()
"""
db_host = Gotham.LocalDatabase.table("Host")
db_network = Gotham.LocalDatabase.table("Network")
source = db_host.findOne(id: 17)
target = db_network.findOne(id: 500)
session = new Gotham.Micro.Session(source, target, "ICMP")
session.addPacket(new Gotham.Micro.Packet("", true, 1, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 1, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 2, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 2, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 3, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 3, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 4, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 4, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 5, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 5, 0))
session.addPacket(new Gotham.Micro.Packet("", true, 6, 1000))
session.addPacket(new Gotham.Micro.Packet("", false, 6, 0))
for key, val of session.nodeHeaders
console.log val
"""
|
[
{
"context": " to return, throws, break, and continue.\n# @author Joel Feenstra\n###\n'use strict'\n\n#------------------------------",
"end": 116,
"score": 0.9998413920402527,
"start": 103,
"tag": "NAME",
"value": "Joel Feenstra"
}
] | src/rules/no-unreachable.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Checks for unreachable code due to return, throws, break, and continue.
# @author Joel Feenstra
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given variable declarator has the initializer.
# @param {ASTNode} node - A VariableDeclarator node to check.
# @returns {boolean} `true` if the node has the initializer.
###
isInitialized = (node) -> Boolean node.init
###*
# Checks whether or not a given code path segment is unreachable.
# @param {CodePathSegment} segment - A CodePathSegment to check.
# @returns {boolean} `true` if the segment is unreachable.
###
isUnreachable = (segment) -> not segment.reachable
###*
# The class to distinguish consecutive unreachable statements.
###
class ConsecutiveRange
constructor: (sourceCode) ->
@sourceCode = sourceCode
@startNode = null
@endNode = null
###*
# The location object of this range.
# @type {Object}
###
location: ->
start: @startNode.loc.start
end: @endNode.loc.end
###*
# `true` if this range is empty.
# @type {boolean}
###
isEmpty: -> not (@startNode and @endNode)
###*
# Checks whether the given node is inside of this range.
# @param {ASTNode|Token} node - The node to check.
# @returns {boolean} `true` if the node is inside of this range.
###
contains: (node) ->
node.range[0] >= @startNode.range[0] and node.range[1] <= @endNode.range[1]
###*
# Checks whether the given node is consecutive to this range.
# @param {ASTNode} node - The node to check.
# @returns {boolean} `true` if the node is consecutive to this range.
###
isConsecutive: (node) -> @contains @sourceCode.getTokenBefore node
###*
# Merges the given node to this range.
# @param {ASTNode} node - The node to merge.
# @returns {void}
###
merge: (node) -> @endNode = node
###*
# Resets this range by the given node or null.
# @param {ASTNode|null} node - The node to reset, or null.
# @returns {void}
###
reset: (node) -> @startNode = @endNode = node
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow unreachable code after `return`, `throw`, `continue`, and `break` statements'
category: 'Possible Errors'
recommended: yes
url: 'https://eslint.org/docs/rules/no-unreachable'
schema: []
create: (context) ->
currentCodePath = null
range = new ConsecutiveRange context.getSourceCode()
###*
# Reports a given node if it's unreachable.
# @param {ASTNode} node - A statement node to report.
# @returns {void}
###
reportIfUnreachable = (node) ->
nextNode = null
if node and currentCodePath.currentSegments.every isUnreachable
# Store this statement to distinguish consecutive statements.
if range.isEmpty()
range.reset node
return
# Skip if this statement is inside of the current range.
return if range.contains node
# Merge if this statement is consecutive to the current range.
if range.isConsecutive node
range.merge node
return
nextNode = node
###
# Report the current range since this statement is reachable or is
# not consecutive to the current range.
###
unless range.isEmpty()
context.report
message: 'Unreachable code.'
loc: range.location()
node: range.startNode
# Update the current range.
range.reset nextNode
# Manages the current code path.
onCodePathStart: (codePath) -> currentCodePath = codePath
onCodePathEnd: -> currentCodePath ###:### = currentCodePath.upper
# Registers for all statement nodes (excludes FunctionDeclaration).
BlockStatement: reportIfUnreachable
BreakStatement: reportIfUnreachable
ClassDeclaration: reportIfUnreachable
ContinueStatement: reportIfUnreachable
DebuggerStatement: reportIfUnreachable
DoWhileStatement: reportIfUnreachable
EmptyStatement: reportIfUnreachable
ExpressionStatement: reportIfUnreachable
ForInStatement: reportIfUnreachable
ForOfStatement: reportIfUnreachable
ForStatement: reportIfUnreachable
For: reportIfUnreachable
IfStatement: reportIfUnreachable
ImportDeclaration: reportIfUnreachable
LabeledStatement: reportIfUnreachable
ReturnStatement: reportIfUnreachable
SwitchStatement: reportIfUnreachable
ThrowStatement: reportIfUnreachable
TryStatement: reportIfUnreachable
VariableDeclaration: (node) ->
if node.kind isnt 'var' or node.declarations.some isInitialized
reportIfUnreachable node
WhileStatement: reportIfUnreachable
WithStatement: reportIfUnreachable
ExportNamedDeclaration: reportIfUnreachable
ExportDefaultDeclaration: reportIfUnreachable
ExportAllDeclaration: reportIfUnreachable
'Program:exit': -> reportIfUnreachable()
| 90871 | ###*
# @fileoverview Checks for unreachable code due to return, throws, break, and continue.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given variable declarator has the initializer.
# @param {ASTNode} node - A VariableDeclarator node to check.
# @returns {boolean} `true` if the node has the initializer.
###
isInitialized = (node) -> Boolean node.init
###*
# Checks whether or not a given code path segment is unreachable.
# @param {CodePathSegment} segment - A CodePathSegment to check.
# @returns {boolean} `true` if the segment is unreachable.
###
isUnreachable = (segment) -> not segment.reachable
###*
# The class to distinguish consecutive unreachable statements.
###
class ConsecutiveRange
constructor: (sourceCode) ->
@sourceCode = sourceCode
@startNode = null
@endNode = null
###*
# The location object of this range.
# @type {Object}
###
location: ->
start: @startNode.loc.start
end: @endNode.loc.end
###*
# `true` if this range is empty.
# @type {boolean}
###
isEmpty: -> not (@startNode and @endNode)
###*
# Checks whether the given node is inside of this range.
# @param {ASTNode|Token} node - The node to check.
# @returns {boolean} `true` if the node is inside of this range.
###
contains: (node) ->
node.range[0] >= @startNode.range[0] and node.range[1] <= @endNode.range[1]
###*
# Checks whether the given node is consecutive to this range.
# @param {ASTNode} node - The node to check.
# @returns {boolean} `true` if the node is consecutive to this range.
###
isConsecutive: (node) -> @contains @sourceCode.getTokenBefore node
###*
# Merges the given node to this range.
# @param {ASTNode} node - The node to merge.
# @returns {void}
###
merge: (node) -> @endNode = node
###*
# Resets this range by the given node or null.
# @param {ASTNode|null} node - The node to reset, or null.
# @returns {void}
###
reset: (node) -> @startNode = @endNode = node
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow unreachable code after `return`, `throw`, `continue`, and `break` statements'
category: 'Possible Errors'
recommended: yes
url: 'https://eslint.org/docs/rules/no-unreachable'
schema: []
create: (context) ->
currentCodePath = null
range = new ConsecutiveRange context.getSourceCode()
###*
# Reports a given node if it's unreachable.
# @param {ASTNode} node - A statement node to report.
# @returns {void}
###
reportIfUnreachable = (node) ->
nextNode = null
if node and currentCodePath.currentSegments.every isUnreachable
# Store this statement to distinguish consecutive statements.
if range.isEmpty()
range.reset node
return
# Skip if this statement is inside of the current range.
return if range.contains node
# Merge if this statement is consecutive to the current range.
if range.isConsecutive node
range.merge node
return
nextNode = node
###
# Report the current range since this statement is reachable or is
# not consecutive to the current range.
###
unless range.isEmpty()
context.report
message: 'Unreachable code.'
loc: range.location()
node: range.startNode
# Update the current range.
range.reset nextNode
# Manages the current code path.
onCodePathStart: (codePath) -> currentCodePath = codePath
onCodePathEnd: -> currentCodePath ###:### = currentCodePath.upper
# Registers for all statement nodes (excludes FunctionDeclaration).
BlockStatement: reportIfUnreachable
BreakStatement: reportIfUnreachable
ClassDeclaration: reportIfUnreachable
ContinueStatement: reportIfUnreachable
DebuggerStatement: reportIfUnreachable
DoWhileStatement: reportIfUnreachable
EmptyStatement: reportIfUnreachable
ExpressionStatement: reportIfUnreachable
ForInStatement: reportIfUnreachable
ForOfStatement: reportIfUnreachable
ForStatement: reportIfUnreachable
For: reportIfUnreachable
IfStatement: reportIfUnreachable
ImportDeclaration: reportIfUnreachable
LabeledStatement: reportIfUnreachable
ReturnStatement: reportIfUnreachable
SwitchStatement: reportIfUnreachable
ThrowStatement: reportIfUnreachable
TryStatement: reportIfUnreachable
VariableDeclaration: (node) ->
if node.kind isnt 'var' or node.declarations.some isInitialized
reportIfUnreachable node
WhileStatement: reportIfUnreachable
WithStatement: reportIfUnreachable
ExportNamedDeclaration: reportIfUnreachable
ExportDefaultDeclaration: reportIfUnreachable
ExportAllDeclaration: reportIfUnreachable
'Program:exit': -> reportIfUnreachable()
| true | ###*
# @fileoverview Checks for unreachable code due to return, throws, break, and continue.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
###*
# Checks whether or not a given variable declarator has the initializer.
# @param {ASTNode} node - A VariableDeclarator node to check.
# @returns {boolean} `true` if the node has the initializer.
###
isInitialized = (node) -> Boolean node.init
###*
# Checks whether or not a given code path segment is unreachable.
# @param {CodePathSegment} segment - A CodePathSegment to check.
# @returns {boolean} `true` if the segment is unreachable.
###
isUnreachable = (segment) -> not segment.reachable
###*
# The class to distinguish consecutive unreachable statements.
###
class ConsecutiveRange
constructor: (sourceCode) ->
@sourceCode = sourceCode
@startNode = null
@endNode = null
###*
# The location object of this range.
# @type {Object}
###
location: ->
start: @startNode.loc.start
end: @endNode.loc.end
###*
# `true` if this range is empty.
# @type {boolean}
###
isEmpty: -> not (@startNode and @endNode)
###*
# Checks whether the given node is inside of this range.
# @param {ASTNode|Token} node - The node to check.
# @returns {boolean} `true` if the node is inside of this range.
###
contains: (node) ->
node.range[0] >= @startNode.range[0] and node.range[1] <= @endNode.range[1]
###*
# Checks whether the given node is consecutive to this range.
# @param {ASTNode} node - The node to check.
# @returns {boolean} `true` if the node is consecutive to this range.
###
isConsecutive: (node) -> @contains @sourceCode.getTokenBefore node
###*
# Merges the given node to this range.
# @param {ASTNode} node - The node to merge.
# @returns {void}
###
merge: (node) -> @endNode = node
###*
# Resets this range by the given node or null.
# @param {ASTNode|null} node - The node to reset, or null.
# @returns {void}
###
reset: (node) -> @startNode = @endNode = node
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow unreachable code after `return`, `throw`, `continue`, and `break` statements'
category: 'Possible Errors'
recommended: yes
url: 'https://eslint.org/docs/rules/no-unreachable'
schema: []
create: (context) ->
currentCodePath = null
range = new ConsecutiveRange context.getSourceCode()
###*
# Reports a given node if it's unreachable.
# @param {ASTNode} node - A statement node to report.
# @returns {void}
###
reportIfUnreachable = (node) ->
nextNode = null
if node and currentCodePath.currentSegments.every isUnreachable
# Store this statement to distinguish consecutive statements.
if range.isEmpty()
range.reset node
return
# Skip if this statement is inside of the current range.
return if range.contains node
# Merge if this statement is consecutive to the current range.
if range.isConsecutive node
range.merge node
return
nextNode = node
###
# Report the current range since this statement is reachable or is
# not consecutive to the current range.
###
unless range.isEmpty()
context.report
message: 'Unreachable code.'
loc: range.location()
node: range.startNode
# Update the current range.
range.reset nextNode
# Manages the current code path.
onCodePathStart: (codePath) -> currentCodePath = codePath
onCodePathEnd: -> currentCodePath ###:### = currentCodePath.upper
# Registers for all statement nodes (excludes FunctionDeclaration).
BlockStatement: reportIfUnreachable
BreakStatement: reportIfUnreachable
ClassDeclaration: reportIfUnreachable
ContinueStatement: reportIfUnreachable
DebuggerStatement: reportIfUnreachable
DoWhileStatement: reportIfUnreachable
EmptyStatement: reportIfUnreachable
ExpressionStatement: reportIfUnreachable
ForInStatement: reportIfUnreachable
ForOfStatement: reportIfUnreachable
ForStatement: reportIfUnreachable
For: reportIfUnreachable
IfStatement: reportIfUnreachable
ImportDeclaration: reportIfUnreachable
LabeledStatement: reportIfUnreachable
ReturnStatement: reportIfUnreachable
SwitchStatement: reportIfUnreachable
ThrowStatement: reportIfUnreachable
TryStatement: reportIfUnreachable
VariableDeclaration: (node) ->
if node.kind isnt 'var' or node.declarations.some isInitialized
reportIfUnreachable node
WhileStatement: reportIfUnreachable
WithStatement: reportIfUnreachable
ExportNamedDeclaration: reportIfUnreachable
ExportDefaultDeclaration: reportIfUnreachable
ExportAllDeclaration: reportIfUnreachable
'Program:exit': -> reportIfUnreachable()
|
[
{
"context": "###\nDocxgen.coffee\nCreated by Edgar HIPP\n###\n\nDocUtils=require('./docUtils')\nImgManager=re",
"end": 40,
"score": 0.9998876452445984,
"start": 30,
"tag": "NAME",
"value": "Edgar HIPP"
}
] | coffee/docxgen.coffee | ssured/docxtemplater | 0 | ###
Docxgen.coffee
Created by Edgar HIPP
###
DocUtils=require('./docUtils')
ImgManager=require('./imgManager')
DocXTemplater=require('./docxTemplater')
JSZip=require('jszip')
fs= require('fs')
module.exports=class DocxGen
templatedFiles=["word/document.xml","word/footer1.xml","word/footer2.xml","word/footer3.xml","word/header1.xml","word/header2.xml","word/header3.xml"]
constructor: (content, @Tags={},@options) ->
@setOptions(@options)
@finishedCallback=()->
@filesProcessed=0 # This is the number of files that were processed, When all files are processed and all qrcodes are decoded, the finished Callback is called
@qrCodeNumCallBack=0 #This is the order of the qrcode
@qrCodeWaitingFor= [] #The templater waits till all the qrcodes are decoded, This is the list of the remaining qrcodes to decode (only their order in the document is stored)
if content? then if content.length>0 then @load(content)
setOptions:(@options)->
if @options?
@intelligentTagging= if @options.intelligentTagging? then @options.intelligentTagging else on
@qrCode= if @options.qrCode? then @options.qrCode else off
if @qrCode==true then @qrCode=DocUtils.unsecureQrCode
if @options.parser? then @parser=options.parser
this
loadFromFile:(path,options={})->
@setOptions(options)
promise=
success:(fun)->
this.successFun=fun
successFun:()->
if !options.docx? then options.docx=false
if !options.async? then options.async=false
if !options.callback? then options.callback=(rawData) =>
@load rawData
promise.successFun(this)
DocUtils.loadDoc(path,options)
if options.async==false then return this else return promise
qrCodeCallBack:(num,add=true) ->
if add==true
@qrCodeWaitingFor.push num
else if add == false
index = @qrCodeWaitingFor.indexOf(num)
@qrCodeWaitingFor.splice(index, 1)
@testReady()
testReady:()->
if @qrCodeWaitingFor.length==0 and @filesProcessed== templatedFiles.length ## When all files are processed and all qrCodes are processed too, the finished callback can be called
@ready=true
@finishedCallback()
getImageList:()-> @imgManager.getImageList()
setImage: (path,data,options={}) ->
if !options.binary? then options.binary=true
@imgManager.setImage(path,data,options)
load: (content)->
@loadedContent=content
@zip = new JSZip content
@imgManager=(new ImgManager(@zip)).loadImageRels()
this
applyTags:(@Tags=@Tags,qrCodeCallback=null)->
#Loop inside all templatedFiles (basically xml files with content). Sometimes they dont't exist (footer.xml for example)
for fileName in templatedFiles when !@zip.files[fileName]?
@filesProcessed++ #count files that don't exist as processed
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
qrCodeCallback:qrCodeCallback
parser:@parser
})
@setData(fileName,currentFile.applyTags().content)
@filesProcessed++
#When all files have been processed, check if the document is ready
@testReady()
setData:(fileName,data,options={})->
@zip.remove(fileName)
@zip.file(fileName,data,options)
getTags:()->
usedTags=[]
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
parser:@parser
})
usedTemplateV= currentFile.applyTags().usedTags
if DocUtils.sizeOfObject(usedTemplateV)
usedTags.push {fileName,vars:usedTemplateV}
usedTags
setTags: (@Tags) ->
this
#output all files, if docx has been loaded via javascript, it will be available
output: (options={}) ->
if !options.download? then options.download=true
if !options.name? then options.name="output.docx"
if !options.type? then options.type="base64"
result= @zip.generate({type:options.type})
if options.download
if DocUtils.env=='node'
fs.writeFile process.cwd()+'/'+options.name, result, 'base64', (err) ->
if err then throw err
if options.callback? then options.callback()
else
#Be aware that data-uri doesn't work for too big files: More Info http://stackoverflow.com/questions/17082286/getting-max-data-uri-size-in-javascript
document.location.href= "data:application/vnd.openxmlformats-officedocument.wordprocessingml.document;base64,#{result}"
result
getFullText:(path="word/document.xml") ->
usedData=@zip.files[path].asText()
(new DocXTemplater(usedData,{DocxGen:this,Tags:@Tags,intelligentTagging:@intelligentTagging})).getFullText()
download: (swfpath, imgpath, filename="default.docx") ->
output=@zip.generate()
Downloadify.create 'downloadify',
filename: () ->return filename
data: () ->
return output
onCancel: () -> alert 'You have cancelled the saving of this file.'
onError: () -> alert 'You must put something in the File Contents or there will be nothing to save!'
swf: swfpath
downloadImage: imgpath
width: 100
height: 30
transparent: true
append: false
dataType:'base64'
| 67036 | ###
Docxgen.coffee
Created by <NAME>
###
DocUtils=require('./docUtils')
ImgManager=require('./imgManager')
DocXTemplater=require('./docxTemplater')
JSZip=require('jszip')
fs= require('fs')
module.exports=class DocxGen
templatedFiles=["word/document.xml","word/footer1.xml","word/footer2.xml","word/footer3.xml","word/header1.xml","word/header2.xml","word/header3.xml"]
constructor: (content, @Tags={},@options) ->
@setOptions(@options)
@finishedCallback=()->
@filesProcessed=0 # This is the number of files that were processed, When all files are processed and all qrcodes are decoded, the finished Callback is called
@qrCodeNumCallBack=0 #This is the order of the qrcode
@qrCodeWaitingFor= [] #The templater waits till all the qrcodes are decoded, This is the list of the remaining qrcodes to decode (only their order in the document is stored)
if content? then if content.length>0 then @load(content)
setOptions:(@options)->
if @options?
@intelligentTagging= if @options.intelligentTagging? then @options.intelligentTagging else on
@qrCode= if @options.qrCode? then @options.qrCode else off
if @qrCode==true then @qrCode=DocUtils.unsecureQrCode
if @options.parser? then @parser=options.parser
this
loadFromFile:(path,options={})->
@setOptions(options)
promise=
success:(fun)->
this.successFun=fun
successFun:()->
if !options.docx? then options.docx=false
if !options.async? then options.async=false
if !options.callback? then options.callback=(rawData) =>
@load rawData
promise.successFun(this)
DocUtils.loadDoc(path,options)
if options.async==false then return this else return promise
qrCodeCallBack:(num,add=true) ->
if add==true
@qrCodeWaitingFor.push num
else if add == false
index = @qrCodeWaitingFor.indexOf(num)
@qrCodeWaitingFor.splice(index, 1)
@testReady()
testReady:()->
if @qrCodeWaitingFor.length==0 and @filesProcessed== templatedFiles.length ## When all files are processed and all qrCodes are processed too, the finished callback can be called
@ready=true
@finishedCallback()
getImageList:()-> @imgManager.getImageList()
setImage: (path,data,options={}) ->
if !options.binary? then options.binary=true
@imgManager.setImage(path,data,options)
load: (content)->
@loadedContent=content
@zip = new JSZip content
@imgManager=(new ImgManager(@zip)).loadImageRels()
this
applyTags:(@Tags=@Tags,qrCodeCallback=null)->
#Loop inside all templatedFiles (basically xml files with content). Sometimes they dont't exist (footer.xml for example)
for fileName in templatedFiles when !@zip.files[fileName]?
@filesProcessed++ #count files that don't exist as processed
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
qrCodeCallback:qrCodeCallback
parser:@parser
})
@setData(fileName,currentFile.applyTags().content)
@filesProcessed++
#When all files have been processed, check if the document is ready
@testReady()
setData:(fileName,data,options={})->
@zip.remove(fileName)
@zip.file(fileName,data,options)
getTags:()->
usedTags=[]
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
parser:@parser
})
usedTemplateV= currentFile.applyTags().usedTags
if DocUtils.sizeOfObject(usedTemplateV)
usedTags.push {fileName,vars:usedTemplateV}
usedTags
setTags: (@Tags) ->
this
#output all files, if docx has been loaded via javascript, it will be available
output: (options={}) ->
if !options.download? then options.download=true
if !options.name? then options.name="output.docx"
if !options.type? then options.type="base64"
result= @zip.generate({type:options.type})
if options.download
if DocUtils.env=='node'
fs.writeFile process.cwd()+'/'+options.name, result, 'base64', (err) ->
if err then throw err
if options.callback? then options.callback()
else
#Be aware that data-uri doesn't work for too big files: More Info http://stackoverflow.com/questions/17082286/getting-max-data-uri-size-in-javascript
document.location.href= "data:application/vnd.openxmlformats-officedocument.wordprocessingml.document;base64,#{result}"
result
getFullText:(path="word/document.xml") ->
usedData=@zip.files[path].asText()
(new DocXTemplater(usedData,{DocxGen:this,Tags:@Tags,intelligentTagging:@intelligentTagging})).getFullText()
download: (swfpath, imgpath, filename="default.docx") ->
output=@zip.generate()
Downloadify.create 'downloadify',
filename: () ->return filename
data: () ->
return output
onCancel: () -> alert 'You have cancelled the saving of this file.'
onError: () -> alert 'You must put something in the File Contents or there will be nothing to save!'
swf: swfpath
downloadImage: imgpath
width: 100
height: 30
transparent: true
append: false
dataType:'base64'
| true | ###
Docxgen.coffee
Created by PI:NAME:<NAME>END_PI
###
DocUtils=require('./docUtils')
ImgManager=require('./imgManager')
DocXTemplater=require('./docxTemplater')
JSZip=require('jszip')
fs= require('fs')
module.exports=class DocxGen
templatedFiles=["word/document.xml","word/footer1.xml","word/footer2.xml","word/footer3.xml","word/header1.xml","word/header2.xml","word/header3.xml"]
constructor: (content, @Tags={},@options) ->
@setOptions(@options)
@finishedCallback=()->
@filesProcessed=0 # This is the number of files that were processed, When all files are processed and all qrcodes are decoded, the finished Callback is called
@qrCodeNumCallBack=0 #This is the order of the qrcode
@qrCodeWaitingFor= [] #The templater waits till all the qrcodes are decoded, This is the list of the remaining qrcodes to decode (only their order in the document is stored)
if content? then if content.length>0 then @load(content)
setOptions:(@options)->
if @options?
@intelligentTagging= if @options.intelligentTagging? then @options.intelligentTagging else on
@qrCode= if @options.qrCode? then @options.qrCode else off
if @qrCode==true then @qrCode=DocUtils.unsecureQrCode
if @options.parser? then @parser=options.parser
this
loadFromFile:(path,options={})->
@setOptions(options)
promise=
success:(fun)->
this.successFun=fun
successFun:()->
if !options.docx? then options.docx=false
if !options.async? then options.async=false
if !options.callback? then options.callback=(rawData) =>
@load rawData
promise.successFun(this)
DocUtils.loadDoc(path,options)
if options.async==false then return this else return promise
qrCodeCallBack:(num,add=true) ->
if add==true
@qrCodeWaitingFor.push num
else if add == false
index = @qrCodeWaitingFor.indexOf(num)
@qrCodeWaitingFor.splice(index, 1)
@testReady()
testReady:()->
if @qrCodeWaitingFor.length==0 and @filesProcessed== templatedFiles.length ## When all files are processed and all qrCodes are processed too, the finished callback can be called
@ready=true
@finishedCallback()
getImageList:()-> @imgManager.getImageList()
setImage: (path,data,options={}) ->
if !options.binary? then options.binary=true
@imgManager.setImage(path,data,options)
load: (content)->
@loadedContent=content
@zip = new JSZip content
@imgManager=(new ImgManager(@zip)).loadImageRels()
this
applyTags:(@Tags=@Tags,qrCodeCallback=null)->
#Loop inside all templatedFiles (basically xml files with content). Sometimes they dont't exist (footer.xml for example)
for fileName in templatedFiles when !@zip.files[fileName]?
@filesProcessed++ #count files that don't exist as processed
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
qrCodeCallback:qrCodeCallback
parser:@parser
})
@setData(fileName,currentFile.applyTags().content)
@filesProcessed++
#When all files have been processed, check if the document is ready
@testReady()
setData:(fileName,data,options={})->
@zip.remove(fileName)
@zip.file(fileName,data,options)
getTags:()->
usedTags=[]
for fileName in templatedFiles when @zip.files[fileName]?
currentFile= new DocXTemplater(@zip.files[fileName].asText(),{
DocxGen:this
Tags:@Tags
intelligentTagging:@intelligentTagging
parser:@parser
})
usedTemplateV= currentFile.applyTags().usedTags
if DocUtils.sizeOfObject(usedTemplateV)
usedTags.push {fileName,vars:usedTemplateV}
usedTags
setTags: (@Tags) ->
this
#output all files, if docx has been loaded via javascript, it will be available
output: (options={}) ->
if !options.download? then options.download=true
if !options.name? then options.name="output.docx"
if !options.type? then options.type="base64"
result= @zip.generate({type:options.type})
if options.download
if DocUtils.env=='node'
fs.writeFile process.cwd()+'/'+options.name, result, 'base64', (err) ->
if err then throw err
if options.callback? then options.callback()
else
#Be aware that data-uri doesn't work for too big files: More Info http://stackoverflow.com/questions/17082286/getting-max-data-uri-size-in-javascript
document.location.href= "data:application/vnd.openxmlformats-officedocument.wordprocessingml.document;base64,#{result}"
result
getFullText:(path="word/document.xml") ->
usedData=@zip.files[path].asText()
(new DocXTemplater(usedData,{DocxGen:this,Tags:@Tags,intelligentTagging:@intelligentTagging})).getFullText()
download: (swfpath, imgpath, filename="default.docx") ->
output=@zip.generate()
Downloadify.create 'downloadify',
filename: () ->return filename
data: () ->
return output
onCancel: () -> alert 'You have cancelled the saving of this file.'
onError: () -> alert 'You must put something in the File Contents or there will be nothing to save!'
swf: swfpath
downloadImage: imgpath
width: 100
height: 30
transparent: true
append: false
dataType:'base64'
|
[
{
"context": "@fileoverview Tests for max-params rule.\n# @author Ilya Volodin\n###\n\n'use strict'\n\n#-----------------------------",
"end": 70,
"score": 0.9997814297676086,
"start": 58,
"tag": "NAME",
"value": "Ilya Volodin"
}
] | src/tests/rules/max-params.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for max-params rule.
# @author Ilya Volodin
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/max-params'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'max-params', rule,
valid: [
'(d, e, f) ->'
,
code: 'test = (a, b, c) ->', options: [3]
,
code: 'test = (a, b, c) =>'
options: [3]
parserOptions: ecmaVersion: 6
,
code: 'test = (a, b, c) ->', options: [3]
,
# object property options
code: 'test = (a, b, c) ->', options: [max: 3]
]
invalid: [
code: 'test = (a, b, c, d) ->'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: '((a, b, c, d) ->)'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: 'test = (a, b, c) ->'
options: [1]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 1.'
type: 'FunctionExpression'
]
,
# object property options
code: '(a, b, c) ->'
options: [max: 2]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 2.'
type: 'FunctionExpression'
]
]
| 6207 | ###*
# @fileoverview Tests for max-params rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/max-params'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'max-params', rule,
valid: [
'(d, e, f) ->'
,
code: 'test = (a, b, c) ->', options: [3]
,
code: 'test = (a, b, c) =>'
options: [3]
parserOptions: ecmaVersion: 6
,
code: 'test = (a, b, c) ->', options: [3]
,
# object property options
code: 'test = (a, b, c) ->', options: [max: 3]
]
invalid: [
code: 'test = (a, b, c, d) ->'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: '((a, b, c, d) ->)'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: 'test = (a, b, c) ->'
options: [1]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 1.'
type: 'FunctionExpression'
]
,
# object property options
code: '(a, b, c) ->'
options: [max: 2]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 2.'
type: 'FunctionExpression'
]
]
| true | ###*
# @fileoverview Tests for max-params rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/max-params'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'max-params', rule,
valid: [
'(d, e, f) ->'
,
code: 'test = (a, b, c) ->', options: [3]
,
code: 'test = (a, b, c) =>'
options: [3]
parserOptions: ecmaVersion: 6
,
code: 'test = (a, b, c) ->', options: [3]
,
# object property options
code: 'test = (a, b, c) ->', options: [max: 3]
]
invalid: [
code: 'test = (a, b, c, d) ->'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: '((a, b, c, d) ->)'
options: [3]
errors: [
message: 'Function has too many parameters (4). Maximum allowed is 3.'
type: 'FunctionExpression'
]
,
code: 'test = (a, b, c) ->'
options: [1]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 1.'
type: 'FunctionExpression'
]
,
# object property options
code: '(a, b, c) ->'
options: [max: 2]
errors: [
message:
'Function has too many parameters (3). Maximum allowed is 2.'
type: 'FunctionExpression'
]
]
|
[
{
"context": " Safari/537.17\"\n\n constructor: (@username=null, @password=null) ->\n\n setUsername: (username) ->",
"end": 489,
"score": 0.852112889289856,
"start": 485,
"tag": "USERNAME",
"value": "null"
},
{
"context": "37.17\"\n\n constructor: (@username=null, @password=null) ->\n\n setUsername: (username) ->\n @username =",
"end": 505,
"score": 0.9682425856590271,
"start": 501,
"tag": "PASSWORD",
"value": "null"
},
{
"context": ") ->\n\n setUsername: (username) ->\n @username = username\n\n setPassword: (password) ->\n @password = pas",
"end": 564,
"score": 0.9860467314720154,
"start": 556,
"tag": "USERNAME",
"value": "username"
},
{
"context": "name\n\n setPassword: (password) ->\n @password = password\n\n # Uses the username and password from initiali",
"end": 619,
"score": 0.9970709085464478,
"start": 611,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "PI_LOGIN_PATH,\n body : {\n 'username' : @username,\n 'password' : @password\n }\n }",
"end": 937,
"score": 0.9925912618637085,
"start": 928,
"tag": "USERNAME",
"value": "@username"
},
{
"context": " 'username' : @username,\n 'password' : @password\n }\n },\n (data) ->\n if dat",
"end": 969,
"score": 0.9968360662460327,
"start": 960,
"tag": "PASSWORD",
"value": "@password"
}
] | src/nest-api.coffee | Tommylans/node-nest-api | 8 | https = require 'https'
queryString = require 'querystring'
url = require 'url'
util = require 'util'
NEST_API_HOSTNAME = 'home.nest.com'
NEST_API_PORT = 443
NEST_API_LOGIN_PATH = '/user/login'
class NestApi
username = null
password = null
session = {}
@userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2)
AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0
Safari/537.17"
constructor: (@username=null, @password=null) ->
setUsername: (username) ->
@username = username
setPassword: (password) ->
@password = password
# Uses the username and password from initialization to log
# the user and and get the settings required to access the
# Nest unit.
login: (callback) ->
@post {
hostname : NEST_API_HOSTNAME,
port : NEST_API_PORT,
path : NEST_API_LOGIN_PATH,
body : {
'username' : @username,
'password' : @password
}
},
(data) ->
if data.error?
console.log "Error authenticating: #{data.error}
(#{data.error_description})"
false
session = data
session.urls.transport_url = url.parse session.urls.transport_url
callback data if callback?
# Post to specified settings to the Nest API then pass the returned
# data to the specified callback.
post: (settings, callback) ->
allData = []
settings = settings() if typeof settings is 'function'
if settings? and typeof settings is 'object'
hostname = settings.hostname || session.urls.transport_url.hostname
port = settings.port || session.urls.transport_url.port
path = settings.path
body = settings.body || null
else
throw new Error 'Invalid settings'
if typeof body is 'string'
post_data = body
contentType = 'application/json'
else
post_data = queryString.stringify body
contentType = 'application/x-www-form-urlencoded; charset=utf-8'
options =
host: hostname
port: port
path: path
method: 'POST'
headers:
'Content-Type': contentType
'User-Agent': NestApi.userAgent
'Content-Length': post_data.length
if @session and @session.access_token
options.headers = merge options.headers,
'X-nl-user-id': session.userid,
'X-nl-protocol-version': '1',
'Accept-Language': 'en-us',
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding 'utf8'
response.on 'data', (data) ->
allData.push data
response.on 'error', () ->
if (callback)
callback null, response.headers || {}
response.on 'end', () ->
allData = allData.join('')
if allData? and typeof allData is 'string'
allData = JSON.parse allData
callback allData, response.headers || {} if callback?
request.write(post_data)
request.end()
# Get information about the Nest unit then pass the returned data
# to the specified callback.
get: (callback) ->
path = "/v2/mobile/#{session.user}"
allData = []
options =
host: session.urls.transport_url.hostname
port: session.urls.transport_url.port
path: path
method: 'GET'
headers:
'User-Agent': NestApi.userAgent
'X-nl-user-id': session.userid
'X-nl-protocol-version': '1'
'Accept-Language': 'en-us'
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding('utf8')
response.on 'data', (data) ->
allData.push data
response.on 'end', () ->
allData = allData.join ''
if allData and typeof allData is 'string' and allData.length > 0
allData = JSON.parse allData
else
allData = null
callback allData if callback?
request.end()
# Logs the user in the uses the settings returned from
# Nest to get information about the unit.
getInfo: (serialNumber, callback) ->
self = @
self.login () ->
self.get (data) ->
console.log data
if data.device[serialNumber]? || data.shared[serialNumber]?
callback merge(data.device[serialNumber], data.shared[serialNumber])
module.exports = NestApi
| 155805 | https = require 'https'
queryString = require 'querystring'
url = require 'url'
util = require 'util'
NEST_API_HOSTNAME = 'home.nest.com'
NEST_API_PORT = 443
NEST_API_LOGIN_PATH = '/user/login'
class NestApi
username = null
password = null
session = {}
@userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2)
AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0
Safari/537.17"
constructor: (@username=null, @password=<PASSWORD>) ->
setUsername: (username) ->
@username = username
setPassword: (password) ->
@password = <PASSWORD>
# Uses the username and password from initialization to log
# the user and and get the settings required to access the
# Nest unit.
login: (callback) ->
@post {
hostname : NEST_API_HOSTNAME,
port : NEST_API_PORT,
path : NEST_API_LOGIN_PATH,
body : {
'username' : @username,
'password' : <PASSWORD>
}
},
(data) ->
if data.error?
console.log "Error authenticating: #{data.error}
(#{data.error_description})"
false
session = data
session.urls.transport_url = url.parse session.urls.transport_url
callback data if callback?
# Post to specified settings to the Nest API then pass the returned
# data to the specified callback.
post: (settings, callback) ->
allData = []
settings = settings() if typeof settings is 'function'
if settings? and typeof settings is 'object'
hostname = settings.hostname || session.urls.transport_url.hostname
port = settings.port || session.urls.transport_url.port
path = settings.path
body = settings.body || null
else
throw new Error 'Invalid settings'
if typeof body is 'string'
post_data = body
contentType = 'application/json'
else
post_data = queryString.stringify body
contentType = 'application/x-www-form-urlencoded; charset=utf-8'
options =
host: hostname
port: port
path: path
method: 'POST'
headers:
'Content-Type': contentType
'User-Agent': NestApi.userAgent
'Content-Length': post_data.length
if @session and @session.access_token
options.headers = merge options.headers,
'X-nl-user-id': session.userid,
'X-nl-protocol-version': '1',
'Accept-Language': 'en-us',
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding 'utf8'
response.on 'data', (data) ->
allData.push data
response.on 'error', () ->
if (callback)
callback null, response.headers || {}
response.on 'end', () ->
allData = allData.join('')
if allData? and typeof allData is 'string'
allData = JSON.parse allData
callback allData, response.headers || {} if callback?
request.write(post_data)
request.end()
# Get information about the Nest unit then pass the returned data
# to the specified callback.
get: (callback) ->
path = "/v2/mobile/#{session.user}"
allData = []
options =
host: session.urls.transport_url.hostname
port: session.urls.transport_url.port
path: path
method: 'GET'
headers:
'User-Agent': NestApi.userAgent
'X-nl-user-id': session.userid
'X-nl-protocol-version': '1'
'Accept-Language': 'en-us'
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding('utf8')
response.on 'data', (data) ->
allData.push data
response.on 'end', () ->
allData = allData.join ''
if allData and typeof allData is 'string' and allData.length > 0
allData = JSON.parse allData
else
allData = null
callback allData if callback?
request.end()
# Logs the user in the uses the settings returned from
# Nest to get information about the unit.
getInfo: (serialNumber, callback) ->
self = @
self.login () ->
self.get (data) ->
console.log data
if data.device[serialNumber]? || data.shared[serialNumber]?
callback merge(data.device[serialNumber], data.shared[serialNumber])
module.exports = NestApi
| true | https = require 'https'
queryString = require 'querystring'
url = require 'url'
util = require 'util'
NEST_API_HOSTNAME = 'home.nest.com'
NEST_API_PORT = 443
NEST_API_LOGIN_PATH = '/user/login'
class NestApi
username = null
password = null
session = {}
@userAgent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2)
AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0
Safari/537.17"
constructor: (@username=null, @password=PI:PASSWORD:<PASSWORD>END_PI) ->
setUsername: (username) ->
@username = username
setPassword: (password) ->
@password = PI:PASSWORD:<PASSWORD>END_PI
# Uses the username and password from initialization to log
# the user and and get the settings required to access the
# Nest unit.
login: (callback) ->
@post {
hostname : NEST_API_HOSTNAME,
port : NEST_API_PORT,
path : NEST_API_LOGIN_PATH,
body : {
'username' : @username,
'password' : PI:PASSWORD:<PASSWORD>END_PI
}
},
(data) ->
if data.error?
console.log "Error authenticating: #{data.error}
(#{data.error_description})"
false
session = data
session.urls.transport_url = url.parse session.urls.transport_url
callback data if callback?
# Post to specified settings to the Nest API then pass the returned
# data to the specified callback.
post: (settings, callback) ->
allData = []
settings = settings() if typeof settings is 'function'
if settings? and typeof settings is 'object'
hostname = settings.hostname || session.urls.transport_url.hostname
port = settings.port || session.urls.transport_url.port
path = settings.path
body = settings.body || null
else
throw new Error 'Invalid settings'
if typeof body is 'string'
post_data = body
contentType = 'application/json'
else
post_data = queryString.stringify body
contentType = 'application/x-www-form-urlencoded; charset=utf-8'
options =
host: hostname
port: port
path: path
method: 'POST'
headers:
'Content-Type': contentType
'User-Agent': NestApi.userAgent
'Content-Length': post_data.length
if @session and @session.access_token
options.headers = merge options.headers,
'X-nl-user-id': session.userid,
'X-nl-protocol-version': '1',
'Accept-Language': 'en-us',
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding 'utf8'
response.on 'data', (data) ->
allData.push data
response.on 'error', () ->
if (callback)
callback null, response.headers || {}
response.on 'end', () ->
allData = allData.join('')
if allData? and typeof allData is 'string'
allData = JSON.parse allData
callback allData, response.headers || {} if callback?
request.write(post_data)
request.end()
# Get information about the Nest unit then pass the returned data
# to the specified callback.
get: (callback) ->
path = "/v2/mobile/#{session.user}"
allData = []
options =
host: session.urls.transport_url.hostname
port: session.urls.transport_url.port
path: path
method: 'GET'
headers:
'User-Agent': NestApi.userAgent
'X-nl-user-id': session.userid
'X-nl-protocol-version': '1'
'Accept-Language': 'en-us'
'Authorization': "Basic #{session.access_token}"
request = https.request options, (response) ->
response.setEncoding('utf8')
response.on 'data', (data) ->
allData.push data
response.on 'end', () ->
allData = allData.join ''
if allData and typeof allData is 'string' and allData.length > 0
allData = JSON.parse allData
else
allData = null
callback allData if callback?
request.end()
# Logs the user in the uses the settings returned from
# Nest to get information about the unit.
getInfo: (serialNumber, callback) ->
self = @
self.login () ->
self.get (data) ->
console.log data
if data.device[serialNumber]? || data.shared[serialNumber]?
callback merge(data.device[serialNumber], data.shared[serialNumber])
module.exports = NestApi
|
[
{
"context": "###\n* @author Robert Kolatzek\n * The MIT License (MIT)\n *\n * Copyright (c)",
"end": 29,
"score": 0.9998644590377808,
"start": 14,
"tag": "NAME",
"value": "Robert Kolatzek"
},
{
"context": "he MIT License (MIT)\n *\n * Copyright (c) 2015 Dr. Robert Kolatzek\n *\n * Permission is hereby granted, free of char",
"end": 104,
"score": 0.9998591542243958,
"start": 89,
"tag": "NAME",
"value": "Robert Kolatzek"
}
] | src/de.coffee | SULB/GNDcoffeine | 0 | ###
* @author Robert Kolatzek
* The MIT License (MIT)
*
* Copyright (c) 2015 Dr. Robert Kolatzek
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
###
gndSearchTimeout = 5000
gndDivInsertAfterId = '#swd_div'
insertswdCollectionAfterId = '#partSearchButton'
swdId = '#subject_swd'
swdErsatzNachrichtId = '#swdBereich'
xgndSearchFormId = '#xgndSearchForm'
swdDivSammlungId = '#swd_div_sammlung'
partSearchButtonId = '#partSearchButton'
exactSearchId = '#exactSearch'
swdWortClass = "swd_wort"
gndsDiv = '#gnds'
suchwortId = '#suchwort'
documentHistoryItem = 'GND Suche nach'
timeoutWarning = "<span class='gndTimeoutWarning'>Der Server braucht lange, um eine Antwort zu liefern. Warten Sie bitte die Antwort ab und versuchen Sie mit genaueren Begriffen zu recherchieren.</span>"
dataErrorWarning = "<p class='searchGndTimeout'>Der Server hat keine gültige Antwort ausgeliefert. Warten Sie bitte die Antwort des Servers ab (bis zu einer Minute) und versuchen mit genaueren Begriffen zu recherhieren.</p>"
emptyResultWarning = "<div class='emptyResult'>Die Suche ergab keine Treffer.</div>"
schlagwortSpanselected = '<div class="#{swdWortClass}" title="zum Entfernen von \'#{schlagwort}\' aus der Liste hier klicken" data-wort="#{schlagwort}"><span class="wort">#{schlagwort}</span> , </div>'
bitteWarten = '<span class="waitGnd">bitte warten</span>'
zeigeListeDerGruppeTyp = 'Für eine Liste von Schlagwörtern der Gruppe #{typ}, hier klicken'
schlagwortInFundListe = 'Zum übernehmen hier klicken'
detailsFuerSchlagwort = 'Details für #{schlagwort}'
Typ = "Typ"
Inhalt = "Inhalt"
Details = "Details"
detailsHinweis = "Für weitere Informationen zu diesem Schlagwort bitte hier klicken"
schlagwortUebernehmen = "<i>Schlagwörter, die in SciDok übernommen werden:</i> "
gndFormHtml = '
<form id="xgndSearchForm" onsubmit="return false;" aria-role="application" aria-labelledby="GNDcoffeineUeberschrift" aria-describedby="GNDcoffeineAnleitung">
<h1 id="GNDcoffeineUeberschrift">Schlagwortsuche in der GND</h1>
<p id="GNDcoffeineAnleitung">Geben Sie ein (Teil-)Wort ein, starten eine Suchabfrage, klappen die gewünschten Schlagwort-Typen per Klick auf und wählen aus diesen die passenden Schlagwörter aus.
Durch die Auswahl werden sie in die Liste oben übernommen. Unter Details finden Sie weitere Angaben zum Schlagwort und verwandte Schlagwörter. Ein Klick auf die
gemerkten Schlagwörter entfernt sie wieder aus der Liste.
</p>
<input type="search" placeholder="Wort für die Suche in der GND" size="40" id="suchwort" />
<input type="submit" value="exakte Schlagwortsuche" id="exactSearch"/><input type="submit" value="Teilwortsuche" title="Diese ungenaue Suche kann sehr viele Ergebnisse liefern. Seien Sie daher sehr geduldig!" id="partSearchButton" />
<div id="gnds" aria-live="polite" aria-atomic="true" aria-relevant="all"></div>
</form>'
exactSearchResultMsg = "<p>Ergebnis der exakten Suche:</p>"
fuzzySearchResultMsg = "<p>Ergebnis der Teilwortsuche:</p>" | 163862 | ###
* @author <NAME>
* The MIT License (MIT)
*
* Copyright (c) 2015 Dr. <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
###
gndSearchTimeout = 5000
gndDivInsertAfterId = '#swd_div'
insertswdCollectionAfterId = '#partSearchButton'
swdId = '#subject_swd'
swdErsatzNachrichtId = '#swdBereich'
xgndSearchFormId = '#xgndSearchForm'
swdDivSammlungId = '#swd_div_sammlung'
partSearchButtonId = '#partSearchButton'
exactSearchId = '#exactSearch'
swdWortClass = "swd_wort"
gndsDiv = '#gnds'
suchwortId = '#suchwort'
documentHistoryItem = 'GND Suche nach'
timeoutWarning = "<span class='gndTimeoutWarning'>Der Server braucht lange, um eine Antwort zu liefern. Warten Sie bitte die Antwort ab und versuchen Sie mit genaueren Begriffen zu recherchieren.</span>"
dataErrorWarning = "<p class='searchGndTimeout'>Der Server hat keine gültige Antwort ausgeliefert. Warten Sie bitte die Antwort des Servers ab (bis zu einer Minute) und versuchen mit genaueren Begriffen zu recherhieren.</p>"
emptyResultWarning = "<div class='emptyResult'>Die Suche ergab keine Treffer.</div>"
schlagwortSpanselected = '<div class="#{swdWortClass}" title="zum Entfernen von \'#{schlagwort}\' aus der Liste hier klicken" data-wort="#{schlagwort}"><span class="wort">#{schlagwort}</span> , </div>'
bitteWarten = '<span class="waitGnd">bitte warten</span>'
zeigeListeDerGruppeTyp = 'Für eine Liste von Schlagwörtern der Gruppe #{typ}, hier klicken'
schlagwortInFundListe = 'Zum übernehmen hier klicken'
detailsFuerSchlagwort = 'Details für #{schlagwort}'
Typ = "Typ"
Inhalt = "Inhalt"
Details = "Details"
detailsHinweis = "Für weitere Informationen zu diesem Schlagwort bitte hier klicken"
schlagwortUebernehmen = "<i>Schlagwörter, die in SciDok übernommen werden:</i> "
gndFormHtml = '
<form id="xgndSearchForm" onsubmit="return false;" aria-role="application" aria-labelledby="GNDcoffeineUeberschrift" aria-describedby="GNDcoffeineAnleitung">
<h1 id="GNDcoffeineUeberschrift">Schlagwortsuche in der GND</h1>
<p id="GNDcoffeineAnleitung">Geben Sie ein (Teil-)Wort ein, starten eine Suchabfrage, klappen die gewünschten Schlagwort-Typen per Klick auf und wählen aus diesen die passenden Schlagwörter aus.
Durch die Auswahl werden sie in die Liste oben übernommen. Unter Details finden Sie weitere Angaben zum Schlagwort und verwandte Schlagwörter. Ein Klick auf die
gemerkten Schlagwörter entfernt sie wieder aus der Liste.
</p>
<input type="search" placeholder="Wort für die Suche in der GND" size="40" id="suchwort" />
<input type="submit" value="exakte Schlagwortsuche" id="exactSearch"/><input type="submit" value="Teilwortsuche" title="Diese ungenaue Suche kann sehr viele Ergebnisse liefern. Seien Sie daher sehr geduldig!" id="partSearchButton" />
<div id="gnds" aria-live="polite" aria-atomic="true" aria-relevant="all"></div>
</form>'
exactSearchResultMsg = "<p>Ergebnis der exakten Suche:</p>"
fuzzySearchResultMsg = "<p>Ergebnis der Teilwortsuche:</p>" | true | ###
* @author PI:NAME:<NAME>END_PI
* The MIT License (MIT)
*
* Copyright (c) 2015 Dr. PI:NAME:<NAME>END_PI
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
###
gndSearchTimeout = 5000
gndDivInsertAfterId = '#swd_div'
insertswdCollectionAfterId = '#partSearchButton'
swdId = '#subject_swd'
swdErsatzNachrichtId = '#swdBereich'
xgndSearchFormId = '#xgndSearchForm'
swdDivSammlungId = '#swd_div_sammlung'
partSearchButtonId = '#partSearchButton'
exactSearchId = '#exactSearch'
swdWortClass = "swd_wort"
gndsDiv = '#gnds'
suchwortId = '#suchwort'
documentHistoryItem = 'GND Suche nach'
timeoutWarning = "<span class='gndTimeoutWarning'>Der Server braucht lange, um eine Antwort zu liefern. Warten Sie bitte die Antwort ab und versuchen Sie mit genaueren Begriffen zu recherchieren.</span>"
dataErrorWarning = "<p class='searchGndTimeout'>Der Server hat keine gültige Antwort ausgeliefert. Warten Sie bitte die Antwort des Servers ab (bis zu einer Minute) und versuchen mit genaueren Begriffen zu recherhieren.</p>"
emptyResultWarning = "<div class='emptyResult'>Die Suche ergab keine Treffer.</div>"
schlagwortSpanselected = '<div class="#{swdWortClass}" title="zum Entfernen von \'#{schlagwort}\' aus der Liste hier klicken" data-wort="#{schlagwort}"><span class="wort">#{schlagwort}</span> , </div>'
bitteWarten = '<span class="waitGnd">bitte warten</span>'
zeigeListeDerGruppeTyp = 'Für eine Liste von Schlagwörtern der Gruppe #{typ}, hier klicken'
schlagwortInFundListe = 'Zum übernehmen hier klicken'
detailsFuerSchlagwort = 'Details für #{schlagwort}'
Typ = "Typ"
Inhalt = "Inhalt"
Details = "Details"
detailsHinweis = "Für weitere Informationen zu diesem Schlagwort bitte hier klicken"
schlagwortUebernehmen = "<i>Schlagwörter, die in SciDok übernommen werden:</i> "
gndFormHtml = '
<form id="xgndSearchForm" onsubmit="return false;" aria-role="application" aria-labelledby="GNDcoffeineUeberschrift" aria-describedby="GNDcoffeineAnleitung">
<h1 id="GNDcoffeineUeberschrift">Schlagwortsuche in der GND</h1>
<p id="GNDcoffeineAnleitung">Geben Sie ein (Teil-)Wort ein, starten eine Suchabfrage, klappen die gewünschten Schlagwort-Typen per Klick auf und wählen aus diesen die passenden Schlagwörter aus.
Durch die Auswahl werden sie in die Liste oben übernommen. Unter Details finden Sie weitere Angaben zum Schlagwort und verwandte Schlagwörter. Ein Klick auf die
gemerkten Schlagwörter entfernt sie wieder aus der Liste.
</p>
<input type="search" placeholder="Wort für die Suche in der GND" size="40" id="suchwort" />
<input type="submit" value="exakte Schlagwortsuche" id="exactSearch"/><input type="submit" value="Teilwortsuche" title="Diese ungenaue Suche kann sehr viele Ergebnisse liefern. Seien Sie daher sehr geduldig!" id="partSearchButton" />
<div id="gnds" aria-live="polite" aria-atomic="true" aria-relevant="all"></div>
</form>'
exactSearchResultMsg = "<p>Ergebnis der exakten Suche:</p>"
fuzzySearchResultMsg = "<p>Ergebnis der Teilwortsuche:</p>" |
[
{
"context": "ftBorder), rightBorder)\n\n\tcoordinateMatrix = {\n\t\t'Amphail': {x: 38.67, y: 71.96},\n\t\t'Ascore': {x: 88.44, y:",
"end": 1805,
"score": 0.9990251660346985,
"start": 1798,
"tag": "NAME",
"value": "Amphail"
},
{
"context": "eMatrix = {\n\t\t'Amphail': {x: 38.67, y: 71.96},\n\t\t'Ascore': {x: 88.44, y: 24.11},\n\t\t'Aurilssbarg': {x: 4.77",
"end": 1839,
"score": 0.9989385008811951,
"start": 1833,
"tag": "NAME",
"value": "Ascore"
},
{
"context": ", y: 71.96},\n\t\t'Ascore': {x: 88.44, y: 24.11},\n\t\t'Aurilssbarg': {x: 4.77, y: 15.87},\n\t\t'Bargewright Inn': {x: 4",
"end": 1878,
"score": 0.9995902180671692,
"start": 1867,
"tag": "NAME",
"value": "Aurilssbarg"
},
{
"context": " 24.11},\n\t\t'Aurilssbarg': {x: 4.77, y: 15.87},\n\t\t'Bargewright Inn': {x: 43.28, y: 67.9},\n\t\t'Beliard': {x: 46.33, y:",
"end": 1920,
"score": 0.9995293021202087,
"start": 1905,
"tag": "NAME",
"value": "Bargewright Inn"
},
{
"context": "87},\n\t\t'Bargewright Inn': {x: 43.28, y: 67.9},\n\t\t'Beliard': {x: 46.33, y: 57.2},\n\t\t#'Beorunna\\'s Well': {x:",
"end": 1954,
"score": 0.9995558857917786,
"start": 1947,
"tag": "NAME",
"value": "Beliard"
},
{
"context": ", y: 67.9},\n\t\t'Beliard': {x: 46.33, y: 57.2},\n\t\t#'Beorunna\\'s Well': {x: 67.89, y: 8.49},\n\t\t'Calling Horns': {x: 53.",
"end": 1998,
"score": 0.9960238337516785,
"start": 1982,
"tag": "NAME",
"value": "Beorunna\\'s Well"
},
{
"context": "},\n\t\t#'Beorunna\\'s Well': {x: 67.89, y: 8.49},\n\t\t'Calling Horns': {x: 53.91, y: 42.07},\n\t\t'Carnath Roadhouse': {x",
"end": 2038,
"score": 0.9986839294433594,
"start": 2025,
"tag": "NAME",
"value": "Calling Horns"
},
{
"context": ".49},\n\t\t'Calling Horns': {x: 53.91, y: 42.07},\n\t\t'Carnath Roadhouse': {x: 32.66, y: 61.25},\n\t\t'Citadel Adbar': {x: 83",
"end": 2083,
"score": 0.9993893504142761,
"start": 2066,
"tag": "NAME",
"value": "Carnath Roadhouse"
},
{
"context": ",\n\t\t'Carnath Roadhouse': {x: 32.66, y: 61.25},\n\t\t'Citadel Adbar': {x: 83.59, y: 16.73},\n\t\t'Citadel Felbarr': {x: ",
"end": 2124,
"score": 0.9995333552360535,
"start": 2111,
"tag": "NAME",
"value": "Citadel Adbar"
},
{
"context": ".25},\n\t\t'Citadel Adbar': {x: 83.59, y: 16.73},\n\t\t'Citadel Felbarr': {x: 71.02, y: 20.05},\n\t\t'Daggerford': {x: 45.63",
"end": 2167,
"score": 0.9995427131652832,
"start": 2152,
"tag": "NAME",
"value": "Citadel Felbarr"
},
{
"context": "3},\n\t\t'Citadel Felbarr': {x: 71.02, y: 20.05},\n\t\t'Daggerford': {x: 45.63, y: 87.95},\n\t\t'Deadsnows': {x: 79.38,",
"end": 2205,
"score": 0.9995686411857605,
"start": 2195,
"tag": "NAME",
"value": "Daggerford"
},
{
"context": " 20.05},\n\t\t'Daggerford': {x: 45.63, y: 87.95},\n\t\t'Deadsnows': {x: 79.38, y: 21.53},\n\t\t#'Deadstone Cleft': {x:",
"end": 2242,
"score": 0.9996355772018433,
"start": 2233,
"tag": "NAME",
"value": "Deadsnows"
},
{
"context": " 87.95},\n\t\t'Deadsnows': {x: 79.38, y: 21.53},\n\t\t#'Deadstone Cleft': {x: 78.83, y: 63.96},\n\t\t'Everlund': {x: 63.05, ",
"end": 2286,
"score": 0.9938416481018066,
"start": 2271,
"tag": "NAME",
"value": "Deadstone Cleft"
},
{
"context": "},\n\t\t#'Deadstone Cleft': {x: 78.83, y: 63.96},\n\t\t'Everlund': {x: 63.05, y: 32.96},\n\t\t'Auge des Allvaters': {",
"end": 2322,
"score": 0.9972836375236511,
"start": 2314,
"tag": "NAME",
"value": "Everlund"
},
{
"context": "y: 63.96},\n\t\t'Everlund': {x: 63.05, y: 32.96},\n\t\t'Auge des Allvaters': {x: 42.73, y: 6.27},\n\t\t'Fireshear",
"end": 2354,
"score": 0.8117418885231018,
"start": 2350,
"tag": "NAME",
"value": "Auge"
},
{
"context": "},\n\t\t'Everlund': {x: 63.05, y: 32.96},\n\t\t'Auge des Allvaters': {x: 42.73, y: 6.27},\n\t\t'Fireshear': {x: 14.22, ",
"end": 2368,
"score": 0.8030033111572266,
"start": 2359,
"tag": "NAME",
"value": "Allvaters"
},
{
"context": ",\n\t\t'Auge des Allvaters': {x: 42.73, y: 6.27},\n\t\t'Fireshear': {x: 14.22, y: 19.8},\n\t\t#'Flint Rock': {x: 49.14",
"end": 2404,
"score": 0.999409556388855,
"start": 2395,
"tag": "NAME",
"value": "Fireshear"
},
{
"context": "y: 6.27},\n\t\t'Fireshear': {x: 14.22, y: 19.8},\n\t\t#'Flint Rock': {x: 49.14, y: 38.38},\n\t\t'Gauntlgrym': {x: 29",
"end": 2439,
"score": 0.973214328289032,
"start": 2432,
"tag": "NAME",
"value": "Flint R"
},
{
"context": "},\n\t\t'Griffon\\'s Nest': {x: 43.52, y: 27.55},\n\t\t#'Grudd Haug': {x: 48.28, y: 64.7},\n\t\t'Hawk\\'s Nest': {x: 66.3",
"end": 2692,
"score": 0.9961121678352356,
"start": 2682,
"tag": "NAME",
"value": "Grudd Haug"
},
{
"context": " 13.16},\n\t\t'Ironmaster': {x: 13.83, y: 10.7},\n\t\t'Ironslag': {x: 76.88, y: 9.47},\n\t\t#'Iymrith\\'s Lair': ",
"end": 2879,
"score": 0.5535679459571838,
"start": 2876,
"tag": "NAME",
"value": "ron"
},
{
"context": "6},\n\t\t'Ironmaster': {x: 13.83, y: 10.7},\n\t\t'Ironslag': {x: 76.88, y: 9.47},\n\t\t#'Iymrith\\'s Lair': {x: ",
"end": 2883,
"score": 0.567863404750824,
"start": 2881,
"tag": "NAME",
"value": "ag"
},
{
"context": " y: 10.7},\n\t\t'Ironslag': {x: 76.88, y: 9.47},\n\t\t#'Iymrith\\'s Lair': {x: 90.86, y: 21.89},\n\t\t'Julkoun'",
"end": 2912,
"score": 0.5819246172904968,
"start": 2911,
"tag": "NAME",
"value": "I"
},
{
"context": "10.7},\n\t\t'Ironslag': {x: 76.88, y: 9.47},\n\t\t#'Iymrith\\'s Lair': {x: 90.86, y: 21.89},\n\t\t'Julkoun': {x: ",
"end": 2918,
"score": 0.5445968508720398,
"start": 2915,
"tag": "NAME",
"value": "ith"
},
{
"context": "},\n\t\t#'Iymrith\\'s Lair': {x: 90.86, y: 21.89},\n\t\t'Julkoun': {x: 51.33, y: 85.61},\n\t\t'Kheldell': {x: 36.72, ",
"end": 2961,
"score": 0.8498678207397461,
"start": 2954,
"tag": "NAME",
"value": "Julkoun"
},
{
"context": " y: 21.89},\n\t\t'Julkoun': {x: 51.33, y: 85.61},\n\t\t'Kheldell': {x: 36.72, y: 63.84},\n\t\t'Leilon': {x: 29.61, y:",
"end": 2997,
"score": 0.7748372554779053,
"start": 2989,
"tag": "NAME",
"value": "Kheldell"
},
{
"context": "y: 73.43},\n\t\t'Lurkwood': {x: 37.38, y: 20.62},\n\t\t'Luskan': {x: 23.13, y: 24.35},\n\t\t'Mines of Mirabar': {x:",
"end": 3210,
"score": 0.7371786236763,
"start": 3204,
"tag": "NAME",
"value": "Luskan"
},
{
"context": "\n\t\t'Mornbryn\\'s Shield': {x: 46.41, y: 35.18},\n\t\t'Nesmé': {x: 51.17, y: 27.43},\n\t\t'Neverwinter': {x: 26.0",
"end": 3451,
"score": 0.7850155830383301,
"start": 3446,
"tag": "NAME",
"value": "Nesmé"
},
{
"context": " y: 70.11},\n\t\t'Parnast': {x: 84.84, y: 78.23},\n\t\t'Phandalin': {x: 32.19, y: 52.15},\n\t\t'Port Llast': {x: 2",
"end": 3788,
"score": 0.5786408185958862,
"start": 3783,
"tag": "NAME",
"value": "Phand"
},
{
"context": " 56.46},\n\t\t#'Stone Stand': {x: 70, y: 34.44},\n\t\t'Sundabar': {x: 72.03, y: 27.06},\n\t\t#'Svardborg': {x: 6.8",
"end": 4333,
"score": 0.6620511412620544,
"start": 4328,
"tag": "NAME",
"value": "undab"
},
{
"context": ": 34.44},\n\t\t'Sundabar': {x: 72.03, y: 27.06},\n\t\t#'Svardborg': {x: 6.8, y: 8.61},\n\t\t'Thornhold': {x: 31.64, y:",
"end": 4373,
"score": 0.9163591861724854,
"start": 4364,
"tag": "NAME",
"value": "Svardborg"
},
{
"context": " y: 27.06},\n\t\t#'Svardborg': {x: 6.8, y: 8.61},\n\t\t'Thornhold': {x: 31.64, y: 65.56},\n\t\t'Triboar': {x: 41.8",
"end": 4403,
"score": 0.5531495809555054,
"start": 4398,
"tag": "NAME",
"value": "Thorn"
},
{
"context": ", y: 65.56},\n\t\t'Triboar': {x: 41.88, y: 44.9},\n\t\t'Uluvin': {x: 54.3, y: 73.43},\n\t\t'Waterdeep': {x: 38.98, ",
"end": 4475,
"score": 0.8006574511528015,
"start": 4469,
"tag": "NAME",
"value": "Uluvin"
},
{
"context": " y: 52.52},\n\t\t'Womford': {x: 43.91, y: 68.14},\n\t\t'Xantharl\\'s Keep': {x: 36.02, y: 21.89},\n\t\t'Yartar': {x: 46.25, y:",
"end": 4663,
"score": 0.9826091527938843,
"start": 4647,
"tag": "NAME",
"value": "Xantharl\\'s Keep"
},
{
"context": "},\n\t\t'Xantharl\\'s Keep': {x: 36.02, y: 21.89},\n\t\t'Yartar': {x: 46.25, y: 44.4},\n\t\t'Zelbross': {x: 61.64, y",
"end": 4697,
"score": 0.9995982646942139,
"start": 4691,
"tag": "NAME",
"value": "Yartar"
},
{
"context": "2, y: 21.89},\n\t\t'Yartar': {x: 46.25, y: 44.4},\n\t\t'Zelbross': {x: 61.64, y: 77.74},\n\t\t'Zymorven Hall': {x: 57",
"end": 4732,
"score": 0.9994841814041138,
"start": 4724,
"tag": "NAME",
"value": "Zelbross"
},
{
"context": " y: 44.4},\n\t\t'Zelbross': {x: 61.64, y: 77.74},\n\t\t'Zymorven Hall': {x: 57.66, y: 25.34},\n\t}\n\n\taddMarkerToCurrentLo",
"end": 4773,
"score": 0.9988671541213989,
"start": 4760,
"tag": "NAME",
"value": "Zymorven Hall"
}
] | assets/js/location_map.coffee | kraatob/dnd5e_story_skt | 1 | ---
---
up.compiler '.location-map', ($map, currentLocation) ->
ZOOMFACTOR = 2.5
isZoomed = false
initialize = ->
initializeDom()
initializeHandlers()
initializeDom = ->
@$wrapper = $("<div class='location-map--image-wrapper'></div>")
@$mapImage = $('<img src="/images/karte/faerun_north.jpg" class="location-map--image">')
.load ->
addMarkerToCurrentLocation()
@$marker = $("<div class='location-map--marker pin' style='display: none;'></div>")
$wrapper.append($mapImage)
$wrapper.append($marker)
$map.append($wrapper)
initializeHandlers = ->
$wrapper.on('dblclick dbltap', toggleZoom)
$(window).resize(addMarkerToCurrentLocation)
# Uncomment the following lines to get the coordinates of a new location
# by just clicking on it. Usefull for adding new locations.
# $mapImage.on('click', showLocationCoordinates)
toggleZoom = (e) ->
isZoomed = !isZoomed
if isZoomed
$wrapper.on('click tap', navigateOnMap)
navigateOnMap(e)
else
$wrapper[0].style.transform = "scale(1)"
$wrapper.off('click tap', navigateOnMap)
navigateOnMap = (e) ->
offset = zoomImageOffset(e)
$wrapper[0].style.transform = "scale(#{ZOOMFACTOR}) translateX(#{offset.x}px) translateY(#{offset.y}px)"
zoomImageOffset = (e) ->
# The top left ankered zoom rectangle should be aligned with the now
# ZOOMFACTOR smaller image.
xOffset = mapBoundryOffset(e.target.clientWidth, e.offsetX)
yOffset = mapBoundryOffset(e.target.clientHeight, e.offsetY)
{x: xOffset, y: yOffset}
mapBoundryOffset = (maxLength, offset) ->
zoomedBoxLength = maxLength / ZOOMFACTOR
leftBorder = 0
rightBorder = maxLength - zoomedBoxLength
zoomedOffset = offset - zoomedBoxLength / 2
- Math.min(Math.max(zoomedOffset, leftBorder), rightBorder)
coordinateMatrix = {
'Amphail': {x: 38.67, y: 71.96},
'Ascore': {x: 88.44, y: 24.11},
'Aurilssbarg': {x: 4.77, y: 15.87},
'Bargewright Inn': {x: 43.28, y: 67.9},
'Beliard': {x: 46.33, y: 57.2},
#'Beorunna\'s Well': {x: 67.89, y: 8.49},
'Calling Horns': {x: 53.91, y: 42.07},
'Carnath Roadhouse': {x: 32.66, y: 61.25},
'Citadel Adbar': {x: 83.59, y: 16.73},
'Citadel Felbarr': {x: 71.02, y: 20.05},
'Daggerford': {x: 45.63, y: 87.95},
'Deadsnows': {x: 79.38, y: 21.53},
#'Deadstone Cleft': {x: 78.83, y: 63.96},
'Everlund': {x: 63.05, y: 32.96},
'Auge des Allvaters': {x: 42.73, y: 6.27},
'Fireshear': {x: 14.22, y: 19.8},
#'Flint Rock': {x: 49.14, y: 38.38},
'Gauntlgrym': {x: 29.06, y: 30.63},
'Goldenfields': {x: 42.58, y: 71.96},
#'Grandfather Tree': {x: 65.39, y: 42.07},
#'Great Worm Cavern': {x: 46.64, y: 7.13},
'Griffon\'s Nest': {x: 43.52, y: 27.55},
#'Grudd Haug': {x: 48.28, y: 64.7},
'Hawk\'s Nest': {x: 66.33, y: 24.97},
'Helm\'s Hold': {x: 26.72, y: 40.84},
'Hundelstone': {x: 16.09, y: 13.16},
'Ironmaster': {x: 13.83, y: 10.7},
'Ironslag': {x: 76.88, y: 9.47},
#'Iymrith\'s Lair': {x: 90.86, y: 21.89},
'Julkoun': {x: 51.33, y: 85.61},
'Kheldell': {x: 36.72, y: 63.84},
'Leilon': {x: 29.61, y: 55.23},
'Llorkh': {x: 76.02, y: 74.54},
'Longsaddle': {x: 38.98, y: 31.73},
'Loudwater': {x: 66.25, y: 73.43},
'Lurkwood': {x: 37.38, y: 20.62},
'Luskan': {x: 23.13, y: 24.35},
'Mines of Mirabar': {x: 32.89, y: 12.67},
'Mirabar': {x: 34.38, y: 15.62},
'Mithral Hall': {x: 54.37, y: 20.54},
#'Morgur\'s Mound': {x: 32.5, y: 25.09},
'Mornbryn\'s Shield': {x: 46.41, y: 35.18},
'Nesmé': {x: 51.17, y: 27.43},
'Neverwinter': {x: 26.02, y: 40.22},
'Newfort': {x: 75.16, y: 24.72},
'Nightstone': {x: 41.64, y: 80.93},
'Noanar\'s Hold': {x: 55.63, y: 44.28},
'Olostin\'s Hold': {x: 61.41, y: 36.65},
#'One Stone': {x: 64.61, y: 14.51},
'Orlbar': {x: 71.41, y: 70.11},
'Parnast': {x: 84.84, y: 78.23},
'Phandalin': {x: 32.19, y: 52.15},
'Port Llast': {x: 24.45, y: 35.55},
'Rassalantar': {x: 37.89, y: 74.54},
#'Raven Rock': {x: 25.7, y: 10.95},
'Red Larch': {x: 40.47, y: 60.76},
'Rivermoot': {x: 53.28, y: 24.23},
'Secomber': {x: 56.02, y: 81.43},
'Senteq\'s Hut': {x: 72, y: 41.79},
'Shadowtop Cathedral': {x: 59.38, y: 43.79},
#'Shining White': {x: 45.63, y: 25.22},
'Silverymoon': {x: 62.19, y: 28.17},
'Starmetal Hills': {x: 37.5, y: 35},
'Stone Bridge': {x: 45.23, y: 56.46},
#'Stone Stand': {x: 70, y: 34.44},
'Sundabar': {x: 72.03, y: 27.06},
#'Svardborg': {x: 6.8, y: 8.61},
'Thornhold': {x: 31.64, y: 65.56},
'Triboar': {x: 41.88, y: 44.9},
'Uluvin': {x: 54.3, y: 73.43},
'Waterdeep': {x: 38.98, y: 79.46},
'Way Inn': {x: 48.75, y: 96.68},
'Westbridge': {x: 41.72, y: 52.52},
'Womford': {x: 43.91, y: 68.14},
'Xantharl\'s Keep': {x: 36.02, y: 21.89},
'Yartar': {x: 46.25, y: 44.4},
'Zelbross': {x: 61.64, y: 77.74},
'Zymorven Hall': {x: 57.66, y: 25.34},
}
addMarkerToCurrentLocation = () ->
coordinates = deNormalizeCoordinates(coordinateMatrix[currentLocation])
$marker.css('left', "#{coordinates.x}px")
$marker.css('top', "#{coordinates.y}px")
$marker.fadeIn('fast')
showLocationCoordinates = (e) ->
location = prompt("What's the name of this location?")
coordinates = relativeClickingCoordinates(e)
if location
alert("'#{location}': {x: #{coordinates.x}, y: #{coordinates.y}},")
relativeClickingCoordinates = (e) ->
# Normalized percentual coordinates [x,y] in [0..100]²
x = normalizeCoordinate(e.offsetX / e.target.clientWidth)
y = normalizeCoordinate(e.offsetY / e.target.clientHeight)
{x: x, y: y}
normalizeCoordinate = (coordinate) ->
_.round(coordinate * 100, 2)
deNormalizeCoordinates = (percentualCoordinates) ->
{
x: percentualCoordinates.x * $mapImage[0].clientWidth / 100,
y: percentualCoordinates.y * $mapImage[0].clientHeight / 100
}
initialize()
# Ideas:
# walking days
# riding days
# flight time
true
| 152041 | ---
---
up.compiler '.location-map', ($map, currentLocation) ->
ZOOMFACTOR = 2.5
isZoomed = false
initialize = ->
initializeDom()
initializeHandlers()
initializeDom = ->
@$wrapper = $("<div class='location-map--image-wrapper'></div>")
@$mapImage = $('<img src="/images/karte/faerun_north.jpg" class="location-map--image">')
.load ->
addMarkerToCurrentLocation()
@$marker = $("<div class='location-map--marker pin' style='display: none;'></div>")
$wrapper.append($mapImage)
$wrapper.append($marker)
$map.append($wrapper)
initializeHandlers = ->
$wrapper.on('dblclick dbltap', toggleZoom)
$(window).resize(addMarkerToCurrentLocation)
# Uncomment the following lines to get the coordinates of a new location
# by just clicking on it. Usefull for adding new locations.
# $mapImage.on('click', showLocationCoordinates)
toggleZoom = (e) ->
isZoomed = !isZoomed
if isZoomed
$wrapper.on('click tap', navigateOnMap)
navigateOnMap(e)
else
$wrapper[0].style.transform = "scale(1)"
$wrapper.off('click tap', navigateOnMap)
navigateOnMap = (e) ->
offset = zoomImageOffset(e)
$wrapper[0].style.transform = "scale(#{ZOOMFACTOR}) translateX(#{offset.x}px) translateY(#{offset.y}px)"
zoomImageOffset = (e) ->
# The top left ankered zoom rectangle should be aligned with the now
# ZOOMFACTOR smaller image.
xOffset = mapBoundryOffset(e.target.clientWidth, e.offsetX)
yOffset = mapBoundryOffset(e.target.clientHeight, e.offsetY)
{x: xOffset, y: yOffset}
mapBoundryOffset = (maxLength, offset) ->
zoomedBoxLength = maxLength / ZOOMFACTOR
leftBorder = 0
rightBorder = maxLength - zoomedBoxLength
zoomedOffset = offset - zoomedBoxLength / 2
- Math.min(Math.max(zoomedOffset, leftBorder), rightBorder)
coordinateMatrix = {
'<NAME>': {x: 38.67, y: 71.96},
'<NAME>': {x: 88.44, y: 24.11},
'<NAME>': {x: 4.77, y: 15.87},
'<NAME>': {x: 43.28, y: 67.9},
'<NAME>': {x: 46.33, y: 57.2},
#'<NAME>': {x: 67.89, y: 8.49},
'<NAME>': {x: 53.91, y: 42.07},
'<NAME>': {x: 32.66, y: 61.25},
'<NAME>': {x: 83.59, y: 16.73},
'<NAME>': {x: 71.02, y: 20.05},
'<NAME>': {x: 45.63, y: 87.95},
'<NAME>': {x: 79.38, y: 21.53},
#'<NAME>': {x: 78.83, y: 63.96},
'<NAME>': {x: 63.05, y: 32.96},
'<NAME> des <NAME>': {x: 42.73, y: 6.27},
'<NAME>': {x: 14.22, y: 19.8},
#'<NAME>ock': {x: 49.14, y: 38.38},
'Gauntlgrym': {x: 29.06, y: 30.63},
'Goldenfields': {x: 42.58, y: 71.96},
#'Grandfather Tree': {x: 65.39, y: 42.07},
#'Great Worm Cavern': {x: 46.64, y: 7.13},
'Griffon\'s Nest': {x: 43.52, y: 27.55},
#'<NAME>': {x: 48.28, y: 64.7},
'Hawk\'s Nest': {x: 66.33, y: 24.97},
'Helm\'s Hold': {x: 26.72, y: 40.84},
'Hundelstone': {x: 16.09, y: 13.16},
'Ironmaster': {x: 13.83, y: 10.7},
'I<NAME>sl<NAME>': {x: 76.88, y: 9.47},
#'<NAME>ymr<NAME>\'s Lair': {x: 90.86, y: 21.89},
'<NAME>': {x: 51.33, y: 85.61},
'<NAME>': {x: 36.72, y: 63.84},
'Leilon': {x: 29.61, y: 55.23},
'Llorkh': {x: 76.02, y: 74.54},
'Longsaddle': {x: 38.98, y: 31.73},
'Loudwater': {x: 66.25, y: 73.43},
'Lurkwood': {x: 37.38, y: 20.62},
'<NAME>': {x: 23.13, y: 24.35},
'Mines of Mirabar': {x: 32.89, y: 12.67},
'Mirabar': {x: 34.38, y: 15.62},
'Mithral Hall': {x: 54.37, y: 20.54},
#'Morgur\'s Mound': {x: 32.5, y: 25.09},
'Mornbryn\'s Shield': {x: 46.41, y: 35.18},
'<NAME>': {x: 51.17, y: 27.43},
'Neverwinter': {x: 26.02, y: 40.22},
'Newfort': {x: 75.16, y: 24.72},
'Nightstone': {x: 41.64, y: 80.93},
'Noanar\'s Hold': {x: 55.63, y: 44.28},
'Olostin\'s Hold': {x: 61.41, y: 36.65},
#'One Stone': {x: 64.61, y: 14.51},
'Orlbar': {x: 71.41, y: 70.11},
'Parnast': {x: 84.84, y: 78.23},
'<NAME>alin': {x: 32.19, y: 52.15},
'Port Llast': {x: 24.45, y: 35.55},
'Rassalantar': {x: 37.89, y: 74.54},
#'Raven Rock': {x: 25.7, y: 10.95},
'Red Larch': {x: 40.47, y: 60.76},
'Rivermoot': {x: 53.28, y: 24.23},
'Secomber': {x: 56.02, y: 81.43},
'Senteq\'s Hut': {x: 72, y: 41.79},
'Shadowtop Cathedral': {x: 59.38, y: 43.79},
#'Shining White': {x: 45.63, y: 25.22},
'Silverymoon': {x: 62.19, y: 28.17},
'Starmetal Hills': {x: 37.5, y: 35},
'Stone Bridge': {x: 45.23, y: 56.46},
#'Stone Stand': {x: 70, y: 34.44},
'S<NAME>ar': {x: 72.03, y: 27.06},
#'<NAME>': {x: 6.8, y: 8.61},
'<NAME>hold': {x: 31.64, y: 65.56},
'Triboar': {x: 41.88, y: 44.9},
'<NAME>': {x: 54.3, y: 73.43},
'Waterdeep': {x: 38.98, y: 79.46},
'Way Inn': {x: 48.75, y: 96.68},
'Westbridge': {x: 41.72, y: 52.52},
'Womford': {x: 43.91, y: 68.14},
'<NAME>': {x: 36.02, y: 21.89},
'<NAME>': {x: 46.25, y: 44.4},
'<NAME>': {x: 61.64, y: 77.74},
'<NAME>': {x: 57.66, y: 25.34},
}
addMarkerToCurrentLocation = () ->
coordinates = deNormalizeCoordinates(coordinateMatrix[currentLocation])
$marker.css('left', "#{coordinates.x}px")
$marker.css('top', "#{coordinates.y}px")
$marker.fadeIn('fast')
showLocationCoordinates = (e) ->
location = prompt("What's the name of this location?")
coordinates = relativeClickingCoordinates(e)
if location
alert("'#{location}': {x: #{coordinates.x}, y: #{coordinates.y}},")
relativeClickingCoordinates = (e) ->
# Normalized percentual coordinates [x,y] in [0..100]²
x = normalizeCoordinate(e.offsetX / e.target.clientWidth)
y = normalizeCoordinate(e.offsetY / e.target.clientHeight)
{x: x, y: y}
normalizeCoordinate = (coordinate) ->
_.round(coordinate * 100, 2)
deNormalizeCoordinates = (percentualCoordinates) ->
{
x: percentualCoordinates.x * $mapImage[0].clientWidth / 100,
y: percentualCoordinates.y * $mapImage[0].clientHeight / 100
}
initialize()
# Ideas:
# walking days
# riding days
# flight time
true
| true | ---
---
up.compiler '.location-map', ($map, currentLocation) ->
ZOOMFACTOR = 2.5
isZoomed = false
initialize = ->
initializeDom()
initializeHandlers()
initializeDom = ->
@$wrapper = $("<div class='location-map--image-wrapper'></div>")
@$mapImage = $('<img src="/images/karte/faerun_north.jpg" class="location-map--image">')
.load ->
addMarkerToCurrentLocation()
@$marker = $("<div class='location-map--marker pin' style='display: none;'></div>")
$wrapper.append($mapImage)
$wrapper.append($marker)
$map.append($wrapper)
initializeHandlers = ->
$wrapper.on('dblclick dbltap', toggleZoom)
$(window).resize(addMarkerToCurrentLocation)
# Uncomment the following lines to get the coordinates of a new location
# by just clicking on it. Usefull for adding new locations.
# $mapImage.on('click', showLocationCoordinates)
toggleZoom = (e) ->
isZoomed = !isZoomed
if isZoomed
$wrapper.on('click tap', navigateOnMap)
navigateOnMap(e)
else
$wrapper[0].style.transform = "scale(1)"
$wrapper.off('click tap', navigateOnMap)
navigateOnMap = (e) ->
offset = zoomImageOffset(e)
$wrapper[0].style.transform = "scale(#{ZOOMFACTOR}) translateX(#{offset.x}px) translateY(#{offset.y}px)"
zoomImageOffset = (e) ->
# The top left ankered zoom rectangle should be aligned with the now
# ZOOMFACTOR smaller image.
xOffset = mapBoundryOffset(e.target.clientWidth, e.offsetX)
yOffset = mapBoundryOffset(e.target.clientHeight, e.offsetY)
{x: xOffset, y: yOffset}
mapBoundryOffset = (maxLength, offset) ->
zoomedBoxLength = maxLength / ZOOMFACTOR
leftBorder = 0
rightBorder = maxLength - zoomedBoxLength
zoomedOffset = offset - zoomedBoxLength / 2
- Math.min(Math.max(zoomedOffset, leftBorder), rightBorder)
coordinateMatrix = {
'PI:NAME:<NAME>END_PI': {x: 38.67, y: 71.96},
'PI:NAME:<NAME>END_PI': {x: 88.44, y: 24.11},
'PI:NAME:<NAME>END_PI': {x: 4.77, y: 15.87},
'PI:NAME:<NAME>END_PI': {x: 43.28, y: 67.9},
'PI:NAME:<NAME>END_PI': {x: 46.33, y: 57.2},
#'PI:NAME:<NAME>END_PI': {x: 67.89, y: 8.49},
'PI:NAME:<NAME>END_PI': {x: 53.91, y: 42.07},
'PI:NAME:<NAME>END_PI': {x: 32.66, y: 61.25},
'PI:NAME:<NAME>END_PI': {x: 83.59, y: 16.73},
'PI:NAME:<NAME>END_PI': {x: 71.02, y: 20.05},
'PI:NAME:<NAME>END_PI': {x: 45.63, y: 87.95},
'PI:NAME:<NAME>END_PI': {x: 79.38, y: 21.53},
#'PI:NAME:<NAME>END_PI': {x: 78.83, y: 63.96},
'PI:NAME:<NAME>END_PI': {x: 63.05, y: 32.96},
'PI:NAME:<NAME>END_PI des PI:NAME:<NAME>END_PI': {x: 42.73, y: 6.27},
'PI:NAME:<NAME>END_PI': {x: 14.22, y: 19.8},
#'PI:NAME:<NAME>END_PIock': {x: 49.14, y: 38.38},
'Gauntlgrym': {x: 29.06, y: 30.63},
'Goldenfields': {x: 42.58, y: 71.96},
#'Grandfather Tree': {x: 65.39, y: 42.07},
#'Great Worm Cavern': {x: 46.64, y: 7.13},
'Griffon\'s Nest': {x: 43.52, y: 27.55},
#'PI:NAME:<NAME>END_PI': {x: 48.28, y: 64.7},
'Hawk\'s Nest': {x: 66.33, y: 24.97},
'Helm\'s Hold': {x: 26.72, y: 40.84},
'Hundelstone': {x: 16.09, y: 13.16},
'Ironmaster': {x: 13.83, y: 10.7},
'IPI:NAME:<NAME>END_PIslPI:NAME:<NAME>END_PI': {x: 76.88, y: 9.47},
#'PI:NAME:<NAME>END_PIymrPI:NAME:<NAME>END_PI\'s Lair': {x: 90.86, y: 21.89},
'PI:NAME:<NAME>END_PI': {x: 51.33, y: 85.61},
'PI:NAME:<NAME>END_PI': {x: 36.72, y: 63.84},
'Leilon': {x: 29.61, y: 55.23},
'Llorkh': {x: 76.02, y: 74.54},
'Longsaddle': {x: 38.98, y: 31.73},
'Loudwater': {x: 66.25, y: 73.43},
'Lurkwood': {x: 37.38, y: 20.62},
'PI:NAME:<NAME>END_PI': {x: 23.13, y: 24.35},
'Mines of Mirabar': {x: 32.89, y: 12.67},
'Mirabar': {x: 34.38, y: 15.62},
'Mithral Hall': {x: 54.37, y: 20.54},
#'Morgur\'s Mound': {x: 32.5, y: 25.09},
'Mornbryn\'s Shield': {x: 46.41, y: 35.18},
'PI:NAME:<NAME>END_PI': {x: 51.17, y: 27.43},
'Neverwinter': {x: 26.02, y: 40.22},
'Newfort': {x: 75.16, y: 24.72},
'Nightstone': {x: 41.64, y: 80.93},
'Noanar\'s Hold': {x: 55.63, y: 44.28},
'Olostin\'s Hold': {x: 61.41, y: 36.65},
#'One Stone': {x: 64.61, y: 14.51},
'Orlbar': {x: 71.41, y: 70.11},
'Parnast': {x: 84.84, y: 78.23},
'PI:NAME:<NAME>END_PIalin': {x: 32.19, y: 52.15},
'Port Llast': {x: 24.45, y: 35.55},
'Rassalantar': {x: 37.89, y: 74.54},
#'Raven Rock': {x: 25.7, y: 10.95},
'Red Larch': {x: 40.47, y: 60.76},
'Rivermoot': {x: 53.28, y: 24.23},
'Secomber': {x: 56.02, y: 81.43},
'Senteq\'s Hut': {x: 72, y: 41.79},
'Shadowtop Cathedral': {x: 59.38, y: 43.79},
#'Shining White': {x: 45.63, y: 25.22},
'Silverymoon': {x: 62.19, y: 28.17},
'Starmetal Hills': {x: 37.5, y: 35},
'Stone Bridge': {x: 45.23, y: 56.46},
#'Stone Stand': {x: 70, y: 34.44},
'SPI:NAME:<NAME>END_PIar': {x: 72.03, y: 27.06},
#'PI:NAME:<NAME>END_PI': {x: 6.8, y: 8.61},
'PI:NAME:<NAME>END_PIhold': {x: 31.64, y: 65.56},
'Triboar': {x: 41.88, y: 44.9},
'PI:NAME:<NAME>END_PI': {x: 54.3, y: 73.43},
'Waterdeep': {x: 38.98, y: 79.46},
'Way Inn': {x: 48.75, y: 96.68},
'Westbridge': {x: 41.72, y: 52.52},
'Womford': {x: 43.91, y: 68.14},
'PI:NAME:<NAME>END_PI': {x: 36.02, y: 21.89},
'PI:NAME:<NAME>END_PI': {x: 46.25, y: 44.4},
'PI:NAME:<NAME>END_PI': {x: 61.64, y: 77.74},
'PI:NAME:<NAME>END_PI': {x: 57.66, y: 25.34},
}
addMarkerToCurrentLocation = () ->
coordinates = deNormalizeCoordinates(coordinateMatrix[currentLocation])
$marker.css('left', "#{coordinates.x}px")
$marker.css('top', "#{coordinates.y}px")
$marker.fadeIn('fast')
showLocationCoordinates = (e) ->
location = prompt("What's the name of this location?")
coordinates = relativeClickingCoordinates(e)
if location
alert("'#{location}': {x: #{coordinates.x}, y: #{coordinates.y}},")
relativeClickingCoordinates = (e) ->
# Normalized percentual coordinates [x,y] in [0..100]²
x = normalizeCoordinate(e.offsetX / e.target.clientWidth)
y = normalizeCoordinate(e.offsetY / e.target.clientHeight)
{x: x, y: y}
normalizeCoordinate = (coordinate) ->
_.round(coordinate * 100, 2)
deNormalizeCoordinates = (percentualCoordinates) ->
{
x: percentualCoordinates.x * $mapImage[0].clientWidth / 100,
y: percentualCoordinates.y * $mapImage[0].clientHeight / 100
}
initialize()
# Ideas:
# walking days
# riding days
# flight time
true
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998916387557983,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhakim RAFIK\n * @date \t\tMar 2021\n###\n\n###\n\tCreate demo data fo",
"end": 129,
"score": 0.9998772740364075,
"start": 113,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "\t\ti = 0\n\t\twhile i < 8\n\t\t\tdemoData.push\n\t\t\t\tname: \"Pharmacie #{++i}\"\n\t\t\t\taddresse: 'example city'\n\t\t\t\temail: '",
"end": 887,
"score": 0.9997338056564331,
"start": 878,
"tag": "NAME",
"value": "Pharmacie"
},
{
"context": " #{++i}\"\n\t\t\t\taddresse: 'example city'\n\t\t\t\temail: 'example@example.com'\n\t\t\t\twebSite: 'example.com'\n\t\t\t\tphone: '052456987",
"end": 956,
"score": 0.9999175071716309,
"start": 937,
"tag": "EMAIL",
"value": "example@example.com"
}
] | src/database/seeders/2021032400000-demo-pharmacy.coffee | AbdelhakimRafik/Pharmalogy-API | 0 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date Mar 2021
###
###
Create demo data for pharmacies
###
module.exports =
up: (queryInterface, Sequelize) ->
# create demo data
demoData = []
cities = ["Fez","Tangier","Marrakesh","Salé","Meknes","Rabat","Oujda","Kenitra"]
cord = [
longitude: 31.6450271
latitude: -7.9889703
,
longitude: 31.6540752
latitude: -7.9955977
,
longitude: 31.6448553
latitude: -8.0116763
,
longitude: 31.6337242
latitude: -8.0137297
,
longitude: 31.6217731
latitude: -8.0071646
,
longitude: 31.6188537
latitude: -7.983246
,
longitude: 31.6181156
latitude: -7.9639804
,
longitude: 31.6318514
latitude: -7.9508216
]
i = 0
while i < 8
demoData.push
name: "Pharmacie #{++i}"
addresse: 'example city'
email: 'example@example.com'
webSite: 'example.com'
phone: '0524569875'
city: 'Marrakech'
country: 'Morocco'
longitude: cord[i-1].latitude
latitude: cord[i-1].longitude
status: 1
createdAt: new Date()
updatedAt: new Date()
# add data to database
queryInterface.bulkInsert 'Pharmacies', demoData
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Sale-attachments' | 162339 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date Mar 2021
###
###
Create demo data for pharmacies
###
module.exports =
up: (queryInterface, Sequelize) ->
# create demo data
demoData = []
cities = ["Fez","Tangier","Marrakesh","Salé","Meknes","Rabat","Oujda","Kenitra"]
cord = [
longitude: 31.6450271
latitude: -7.9889703
,
longitude: 31.6540752
latitude: -7.9955977
,
longitude: 31.6448553
latitude: -8.0116763
,
longitude: 31.6337242
latitude: -8.0137297
,
longitude: 31.6217731
latitude: -8.0071646
,
longitude: 31.6188537
latitude: -7.983246
,
longitude: 31.6181156
latitude: -7.9639804
,
longitude: 31.6318514
latitude: -7.9508216
]
i = 0
while i < 8
demoData.push
name: "<NAME> #{++i}"
addresse: 'example city'
email: '<EMAIL>'
webSite: 'example.com'
phone: '0524569875'
city: 'Marrakech'
country: 'Morocco'
longitude: cord[i-1].latitude
latitude: cord[i-1].longitude
status: 1
createdAt: new Date()
updatedAt: new Date()
# add data to database
queryInterface.bulkInsert 'Pharmacies', demoData
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Sale-attachments' | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date Mar 2021
###
###
Create demo data for pharmacies
###
module.exports =
up: (queryInterface, Sequelize) ->
# create demo data
demoData = []
cities = ["Fez","Tangier","Marrakesh","Salé","Meknes","Rabat","Oujda","Kenitra"]
cord = [
longitude: 31.6450271
latitude: -7.9889703
,
longitude: 31.6540752
latitude: -7.9955977
,
longitude: 31.6448553
latitude: -8.0116763
,
longitude: 31.6337242
latitude: -8.0137297
,
longitude: 31.6217731
latitude: -8.0071646
,
longitude: 31.6188537
latitude: -7.983246
,
longitude: 31.6181156
latitude: -7.9639804
,
longitude: 31.6318514
latitude: -7.9508216
]
i = 0
while i < 8
demoData.push
name: "PI:NAME:<NAME>END_PI #{++i}"
addresse: 'example city'
email: 'PI:EMAIL:<EMAIL>END_PI'
webSite: 'example.com'
phone: '0524569875'
city: 'Marrakech'
country: 'Morocco'
longitude: cord[i-1].latitude
latitude: cord[i-1].longitude
status: 1
createdAt: new Date()
updatedAt: new Date()
# add data to database
queryInterface.bulkInsert 'Pharmacies', demoData
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Sale-attachments' |
[
{
"context": "------------------------------------------\n* Name: Jednotka - Multipurpose Website HTML Template\n* Author: ht",
"end": 139,
"score": 0.997738242149353,
"start": 131,
"tag": "NAME",
"value": "Jednotka"
},
{
"context": "ML Template\n* Author: http://themeforest.net/user/BublinaStudio\n* Version: 1.6\n* --------------------------------",
"end": 228,
"score": 0.9995338320732117,
"start": 215,
"tag": "USERNAME",
"value": "BublinaStudio"
}
] | assets/scripts/_theme.js.coffee | vlp-dev/vlp-theme | 0 | ###
* --------------------------------------------------------------------------------------------------------------------
* Name: Jednotka - Multipurpose Website HTML Template
* Author: http://themeforest.net/user/BublinaStudio
* Version: 1.6
* --------------------------------------------------------------------------------------------------------------------
###
$(document).ready ->
setValidateForm()
setIEHelperClassses()
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed header
* --------------------------------------------------------------------------------------------------------------------
###
$header = $("#header")
$carousel = $(".hero-carousel")
$main = $("#main")
if $header.attr("fixed")
$header.addClass("header--default")
$(window).scroll ->
if $(window).scrollTop() >= $carousel.height() - 150
$header.addClass("header--fixed")
$main.addClass("main--header-fixed")
else
$header.removeClass("header--fixed")
$main.removeClass("main--header-fixed")
if $(window).scrollTop() > $carousel.height()
$header.addClass("header--visible")
else
$header.removeClass("header--visible")
###
* --------------------------------------------------------------------------------------------------------------------
* bootstrap carousel definition
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().carousel
$('.carousel.carousel-auto').carousel()
$('.carousel.carousel-auto').on "swipeleft", (e) ->
$(this).carousel('next')
$('.carousel.carousel-auto').on "swiperight", (e) ->
$(this).carousel('prev')
###
* --------------------------------------------------------------------------------------------------------------------
* circle statistics
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().knob
$("[data-stat='circle']").each (i, el) ->
$(el).knob()
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* setting up bootstrap tooltips
* --------------------------------------------------------------------------------------------------------------------
###
touch = false
if window.Modernizr
touch = Modernizr.touch
unless touch
$("body").on "mouseenter", ".has-tooltip", ->
el = $(this)
if el.data("tooltip") is `undefined`
el.tooltip
placement: el.data("placement") or "top"
container: "body"
el.tooltip "show"
$("body").on "mouseleave", ".has-tooltip", ->
$(this).tooltip "hide"
###
* --------------------------------------------------------------------------------------------------------------------
* replacing *.svg images for *.png for browsers without *.svg support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.svg == false
$("img[src*=\"svg\"]").attr "src", ->
$(this).attr("src").replace ".svg", ".png"
###
* --------------------------------------------------------------------------------------------------------------------
* setting placeholders for browsers without placeholder support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.input.placeholder == false
$("[placeholder]").focus(->
input = $(this)
if input.val() is input.attr("placeholder")
input.val ""
input.removeClass "placeholder"
).blur(->
input = $(this)
if input.val() is "" or input.val() is input.attr("placeholder")
input.addClass "placeholder"
input.val input.attr("placeholder")
).blur()
$("[placeholder]").parents("form").submit ->
$(this).find("[placeholder]").each ->
input = $(this)
input.val "" if input.val() is input.attr("placeholder")
###
* --------------------------------------------------------------------------------------------------------------------
* flexslider
* --------------------------------------------------------------------------------------------------------------------
###
$(window).load ->
if jQuery().flexslider
$flexslider = $('.flexslider')
$allSlides = $flexslider.find('.item')
$flexslider.addClass("fade-loading")
$('.flexslider').flexslider
animation: 'fade'
pauseOnHover: true
slideshowSpeed: 5000
animationSpeed: 400
prevText: ''
nextText: ''
before: (slider) ->
$activeSlide = $flexslider.find('.flex-active-slide')
if $activeSlide.index() == $allSlides.length - 1
$allSlides.eq(0).find('.animate').children().addClass("animate").removeClass("animated")
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
else
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
setTimeout (->
$allSlides.eq(slider.animatingTo).find('.animate').children().addClass("animated").removeClass("animate")
), 50
###
* --------------------------------------------------------------------------------------------------------------------
* setting up countdown plugin
* --------------------------------------------------------------------------------------------------------------------
###
$("[data-countdown]").countdown() if jQuery().countdown
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed panel
* --------------------------------------------------------------------------------------------------------------------
###
$sidebar = $(".sidebar", "#main-content")
contentTop = $("#main-content").offset().top
paddingTop = $("#main-content").css("paddingTop")
padding = parseInt(paddingTop.substr(0, paddingTop.length-2))
scrollHeight = $("#main-content").outerHeight() - $sidebar.outerHeight() + padding
if $sidebar.hasClass("sidebar-fixed")
$sidebar.parent().css({ position: "relative"})
$sidebar.css({ position: "absolute"})
$(window).scroll ->
if ($(this).scrollTop() >= contentTop && $(this).scrollTop() <= scrollHeight)
top = $(window).scrollTop() - contentTop
$sidebar.css({ top: top })
if $(this).scrollTop() < contentTop
$sidebar.css({ top: 0 })
if $(this).scrollTop() > scrollHeight
$sidebar.css({ top: scrollHeight - contentTop })
###
* --------------------------------------------------------------------------------------------------------------------
* scroll top button
* --------------------------------------------------------------------------------------------------------------------
###
$("#scroll-to-top").on "click", (e) ->
$("body, html").animate
scrollTop: 0
, 800
false
$(window).load ->
$scrollToTop = $("#scroll-to-top")
defaultBottom = $scrollToTop.css("bottom")
scrollArea = ->
$(document).outerHeight() - $("#footer").outerHeight() - $(window).outerHeight()
if $('body').hasClass("boxed")
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
else
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
if $(this).scrollTop() >= scrollArea()
$scrollToTop.css bottom: $(this).scrollTop() - scrollArea() + 10
else
$scrollToTop.css bottom: defaultBottom
###
* --------------------------------------------------------------------------------------------------------------------
* setting up nivo lightbox
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().nivoLightbox
$("[data-lightbox]").nivoLightbox()
###
* --------------------------------------------------------------------------------------------------------------------
* ajax contact form
* --------------------------------------------------------------------------------------------------------------------
###
$(".form-contact").on "submit", (e) ->
if $(this).valid()
e.preventDefault()
submit = $(this).find(".form-contact-submit")
submit.button("loading")
success = $(this).find(".form-contact-success")
error = $(this).find(".form-contact-error")
inputs = $(this).find("input, textarea")
$.ajax
type: "POST"
url: "contact.php"
data: $(this).serialize()
success: (data) ->
if data is "success"
success.removeClass "hidden"
error.addClass "hidden"
inputs.val ""
else
error.removeClass "hidden"
success.addClass "hidden"
complete: ->
submit.button("reset")
###
* --------------------------------------------------------------------------------------------------------------------
* form validation
* --------------------------------------------------------------------------------------------------------------------
###
@setValidateForm = (selector = $(".form-validation")) ->
if jQuery().validate
selector.each (i, elem) ->
$(elem).validate
errorElement: "span"
errorClass: "help-block has-error"
errorPlacement: (err, e) ->
e.closest('.control-group').append err
highlight: (e) ->
$(e).closest('.control-group').addClass('has-error')
unhighlight: (e) ->
$(e).closest('.control-group').removeClass('has-error')
###
* --------------------------------------------------------------------------------------------------------------------
* internet explorer helpers classes :last-child, :nth-child
* --------------------------------------------------------------------------------------------------------------------
###
@setIEHelperClassses = ->
if /msie/.test(navigator.userAgent.toLowerCase())
$('*:last-child').addClass "last-child"
$('*:nth-child(odd)').addClass "nth-child-odd"
$('*:nth-child(even)').addClass "nth-child-even" | 90782 | ###
* --------------------------------------------------------------------------------------------------------------------
* Name: <NAME> - Multipurpose Website HTML Template
* Author: http://themeforest.net/user/BublinaStudio
* Version: 1.6
* --------------------------------------------------------------------------------------------------------------------
###
$(document).ready ->
setValidateForm()
setIEHelperClassses()
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed header
* --------------------------------------------------------------------------------------------------------------------
###
$header = $("#header")
$carousel = $(".hero-carousel")
$main = $("#main")
if $header.attr("fixed")
$header.addClass("header--default")
$(window).scroll ->
if $(window).scrollTop() >= $carousel.height() - 150
$header.addClass("header--fixed")
$main.addClass("main--header-fixed")
else
$header.removeClass("header--fixed")
$main.removeClass("main--header-fixed")
if $(window).scrollTop() > $carousel.height()
$header.addClass("header--visible")
else
$header.removeClass("header--visible")
###
* --------------------------------------------------------------------------------------------------------------------
* bootstrap carousel definition
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().carousel
$('.carousel.carousel-auto').carousel()
$('.carousel.carousel-auto').on "swipeleft", (e) ->
$(this).carousel('next')
$('.carousel.carousel-auto').on "swiperight", (e) ->
$(this).carousel('prev')
###
* --------------------------------------------------------------------------------------------------------------------
* circle statistics
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().knob
$("[data-stat='circle']").each (i, el) ->
$(el).knob()
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* setting up bootstrap tooltips
* --------------------------------------------------------------------------------------------------------------------
###
touch = false
if window.Modernizr
touch = Modernizr.touch
unless touch
$("body").on "mouseenter", ".has-tooltip", ->
el = $(this)
if el.data("tooltip") is `undefined`
el.tooltip
placement: el.data("placement") or "top"
container: "body"
el.tooltip "show"
$("body").on "mouseleave", ".has-tooltip", ->
$(this).tooltip "hide"
###
* --------------------------------------------------------------------------------------------------------------------
* replacing *.svg images for *.png for browsers without *.svg support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.svg == false
$("img[src*=\"svg\"]").attr "src", ->
$(this).attr("src").replace ".svg", ".png"
###
* --------------------------------------------------------------------------------------------------------------------
* setting placeholders for browsers without placeholder support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.input.placeholder == false
$("[placeholder]").focus(->
input = $(this)
if input.val() is input.attr("placeholder")
input.val ""
input.removeClass "placeholder"
).blur(->
input = $(this)
if input.val() is "" or input.val() is input.attr("placeholder")
input.addClass "placeholder"
input.val input.attr("placeholder")
).blur()
$("[placeholder]").parents("form").submit ->
$(this).find("[placeholder]").each ->
input = $(this)
input.val "" if input.val() is input.attr("placeholder")
###
* --------------------------------------------------------------------------------------------------------------------
* flexslider
* --------------------------------------------------------------------------------------------------------------------
###
$(window).load ->
if jQuery().flexslider
$flexslider = $('.flexslider')
$allSlides = $flexslider.find('.item')
$flexslider.addClass("fade-loading")
$('.flexslider').flexslider
animation: 'fade'
pauseOnHover: true
slideshowSpeed: 5000
animationSpeed: 400
prevText: ''
nextText: ''
before: (slider) ->
$activeSlide = $flexslider.find('.flex-active-slide')
if $activeSlide.index() == $allSlides.length - 1
$allSlides.eq(0).find('.animate').children().addClass("animate").removeClass("animated")
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
else
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
setTimeout (->
$allSlides.eq(slider.animatingTo).find('.animate').children().addClass("animated").removeClass("animate")
), 50
###
* --------------------------------------------------------------------------------------------------------------------
* setting up countdown plugin
* --------------------------------------------------------------------------------------------------------------------
###
$("[data-countdown]").countdown() if jQuery().countdown
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed panel
* --------------------------------------------------------------------------------------------------------------------
###
$sidebar = $(".sidebar", "#main-content")
contentTop = $("#main-content").offset().top
paddingTop = $("#main-content").css("paddingTop")
padding = parseInt(paddingTop.substr(0, paddingTop.length-2))
scrollHeight = $("#main-content").outerHeight() - $sidebar.outerHeight() + padding
if $sidebar.hasClass("sidebar-fixed")
$sidebar.parent().css({ position: "relative"})
$sidebar.css({ position: "absolute"})
$(window).scroll ->
if ($(this).scrollTop() >= contentTop && $(this).scrollTop() <= scrollHeight)
top = $(window).scrollTop() - contentTop
$sidebar.css({ top: top })
if $(this).scrollTop() < contentTop
$sidebar.css({ top: 0 })
if $(this).scrollTop() > scrollHeight
$sidebar.css({ top: scrollHeight - contentTop })
###
* --------------------------------------------------------------------------------------------------------------------
* scroll top button
* --------------------------------------------------------------------------------------------------------------------
###
$("#scroll-to-top").on "click", (e) ->
$("body, html").animate
scrollTop: 0
, 800
false
$(window).load ->
$scrollToTop = $("#scroll-to-top")
defaultBottom = $scrollToTop.css("bottom")
scrollArea = ->
$(document).outerHeight() - $("#footer").outerHeight() - $(window).outerHeight()
if $('body').hasClass("boxed")
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
else
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
if $(this).scrollTop() >= scrollArea()
$scrollToTop.css bottom: $(this).scrollTop() - scrollArea() + 10
else
$scrollToTop.css bottom: defaultBottom
###
* --------------------------------------------------------------------------------------------------------------------
* setting up nivo lightbox
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().nivoLightbox
$("[data-lightbox]").nivoLightbox()
###
* --------------------------------------------------------------------------------------------------------------------
* ajax contact form
* --------------------------------------------------------------------------------------------------------------------
###
$(".form-contact").on "submit", (e) ->
if $(this).valid()
e.preventDefault()
submit = $(this).find(".form-contact-submit")
submit.button("loading")
success = $(this).find(".form-contact-success")
error = $(this).find(".form-contact-error")
inputs = $(this).find("input, textarea")
$.ajax
type: "POST"
url: "contact.php"
data: $(this).serialize()
success: (data) ->
if data is "success"
success.removeClass "hidden"
error.addClass "hidden"
inputs.val ""
else
error.removeClass "hidden"
success.addClass "hidden"
complete: ->
submit.button("reset")
###
* --------------------------------------------------------------------------------------------------------------------
* form validation
* --------------------------------------------------------------------------------------------------------------------
###
@setValidateForm = (selector = $(".form-validation")) ->
if jQuery().validate
selector.each (i, elem) ->
$(elem).validate
errorElement: "span"
errorClass: "help-block has-error"
errorPlacement: (err, e) ->
e.closest('.control-group').append err
highlight: (e) ->
$(e).closest('.control-group').addClass('has-error')
unhighlight: (e) ->
$(e).closest('.control-group').removeClass('has-error')
###
* --------------------------------------------------------------------------------------------------------------------
* internet explorer helpers classes :last-child, :nth-child
* --------------------------------------------------------------------------------------------------------------------
###
@setIEHelperClassses = ->
if /msie/.test(navigator.userAgent.toLowerCase())
$('*:last-child').addClass "last-child"
$('*:nth-child(odd)').addClass "nth-child-odd"
$('*:nth-child(even)').addClass "nth-child-even" | true | ###
* --------------------------------------------------------------------------------------------------------------------
* Name: PI:NAME:<NAME>END_PI - Multipurpose Website HTML Template
* Author: http://themeforest.net/user/BublinaStudio
* Version: 1.6
* --------------------------------------------------------------------------------------------------------------------
###
$(document).ready ->
setValidateForm()
setIEHelperClassses()
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed header
* --------------------------------------------------------------------------------------------------------------------
###
$header = $("#header")
$carousel = $(".hero-carousel")
$main = $("#main")
if $header.attr("fixed")
$header.addClass("header--default")
$(window).scroll ->
if $(window).scrollTop() >= $carousel.height() - 150
$header.addClass("header--fixed")
$main.addClass("main--header-fixed")
else
$header.removeClass("header--fixed")
$main.removeClass("main--header-fixed")
if $(window).scrollTop() > $carousel.height()
$header.addClass("header--visible")
else
$header.removeClass("header--visible")
###
* --------------------------------------------------------------------------------------------------------------------
* bootstrap carousel definition
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().carousel
$('.carousel.carousel-auto').carousel()
$('.carousel.carousel-auto').on "swipeleft", (e) ->
$(this).carousel('next')
$('.carousel.carousel-auto').on "swiperight", (e) ->
$(this).carousel('prev')
###
* --------------------------------------------------------------------------------------------------------------------
* circle statistics
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().knob
$("[data-stat='circle']").each (i, el) ->
$(el).knob()
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* setting up bootstrap tooltips
* --------------------------------------------------------------------------------------------------------------------
###
touch = false
if window.Modernizr
touch = Modernizr.touch
unless touch
$("body").on "mouseenter", ".has-tooltip", ->
el = $(this)
if el.data("tooltip") is `undefined`
el.tooltip
placement: el.data("placement") or "top"
container: "body"
el.tooltip "show"
$("body").on "mouseleave", ".has-tooltip", ->
$(this).tooltip "hide"
###
* --------------------------------------------------------------------------------------------------------------------
* replacing *.svg images for *.png for browsers without *.svg support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.svg == false
$("img[src*=\"svg\"]").attr "src", ->
$(this).attr("src").replace ".svg", ".png"
###
* --------------------------------------------------------------------------------------------------------------------
* setting placeholders for browsers without placeholder support
* --------------------------------------------------------------------------------------------------------------------
###
if window.Modernizr && Modernizr.input.placeholder == false
$("[placeholder]").focus(->
input = $(this)
if input.val() is input.attr("placeholder")
input.val ""
input.removeClass "placeholder"
).blur(->
input = $(this)
if input.val() is "" or input.val() is input.attr("placeholder")
input.addClass "placeholder"
input.val input.attr("placeholder")
).blur()
$("[placeholder]").parents("form").submit ->
$(this).find("[placeholder]").each ->
input = $(this)
input.val "" if input.val() is input.attr("placeholder")
###
* --------------------------------------------------------------------------------------------------------------------
* flexslider
* --------------------------------------------------------------------------------------------------------------------
###
$(window).load ->
if jQuery().flexslider
$flexslider = $('.flexslider')
$allSlides = $flexslider.find('.item')
$flexslider.addClass("fade-loading")
$('.flexslider').flexslider
animation: 'fade'
pauseOnHover: true
slideshowSpeed: 5000
animationSpeed: 400
prevText: ''
nextText: ''
before: (slider) ->
$activeSlide = $flexslider.find('.flex-active-slide')
if $activeSlide.index() == $allSlides.length - 1
$allSlides.eq(0).find('.animate').children().addClass("animate").removeClass("animated")
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
else
$allSlides.not('.flex-active-slide').find('.animate').children().addClass("animate").removeClass("animated")
setTimeout (->
$allSlides.eq(slider.animatingTo).find('.animate').children().addClass("animated").removeClass("animate")
), 50
###
* --------------------------------------------------------------------------------------------------------------------
* setting up countdown plugin
* --------------------------------------------------------------------------------------------------------------------
###
$("[data-countdown]").countdown() if jQuery().countdown
# --------------------------------------------------------------------------------------------------------------------
###
* --------------------------------------------------------------------------------------------------------------------
* Fixed panel
* --------------------------------------------------------------------------------------------------------------------
###
$sidebar = $(".sidebar", "#main-content")
contentTop = $("#main-content").offset().top
paddingTop = $("#main-content").css("paddingTop")
padding = parseInt(paddingTop.substr(0, paddingTop.length-2))
scrollHeight = $("#main-content").outerHeight() - $sidebar.outerHeight() + padding
if $sidebar.hasClass("sidebar-fixed")
$sidebar.parent().css({ position: "relative"})
$sidebar.css({ position: "absolute"})
$(window).scroll ->
if ($(this).scrollTop() >= contentTop && $(this).scrollTop() <= scrollHeight)
top = $(window).scrollTop() - contentTop
$sidebar.css({ top: top })
if $(this).scrollTop() < contentTop
$sidebar.css({ top: 0 })
if $(this).scrollTop() > scrollHeight
$sidebar.css({ top: scrollHeight - contentTop })
###
* --------------------------------------------------------------------------------------------------------------------
* scroll top button
* --------------------------------------------------------------------------------------------------------------------
###
$("#scroll-to-top").on "click", (e) ->
$("body, html").animate
scrollTop: 0
, 800
false
$(window).load ->
$scrollToTop = $("#scroll-to-top")
defaultBottom = $scrollToTop.css("bottom")
scrollArea = ->
$(document).outerHeight() - $("#footer").outerHeight() - $(window).outerHeight()
if $('body').hasClass("boxed")
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
else
$(window).scroll ->
if $(this).scrollTop() > 500
$scrollToTop.addClass "in"
else
$scrollToTop.removeClass "in"
if $(this).scrollTop() >= scrollArea()
$scrollToTop.css bottom: $(this).scrollTop() - scrollArea() + 10
else
$scrollToTop.css bottom: defaultBottom
###
* --------------------------------------------------------------------------------------------------------------------
* setting up nivo lightbox
* --------------------------------------------------------------------------------------------------------------------
###
if jQuery().nivoLightbox
$("[data-lightbox]").nivoLightbox()
###
* --------------------------------------------------------------------------------------------------------------------
* ajax contact form
* --------------------------------------------------------------------------------------------------------------------
###
$(".form-contact").on "submit", (e) ->
if $(this).valid()
e.preventDefault()
submit = $(this).find(".form-contact-submit")
submit.button("loading")
success = $(this).find(".form-contact-success")
error = $(this).find(".form-contact-error")
inputs = $(this).find("input, textarea")
$.ajax
type: "POST"
url: "contact.php"
data: $(this).serialize()
success: (data) ->
if data is "success"
success.removeClass "hidden"
error.addClass "hidden"
inputs.val ""
else
error.removeClass "hidden"
success.addClass "hidden"
complete: ->
submit.button("reset")
###
* --------------------------------------------------------------------------------------------------------------------
* form validation
* --------------------------------------------------------------------------------------------------------------------
###
@setValidateForm = (selector = $(".form-validation")) ->
if jQuery().validate
selector.each (i, elem) ->
$(elem).validate
errorElement: "span"
errorClass: "help-block has-error"
errorPlacement: (err, e) ->
e.closest('.control-group').append err
highlight: (e) ->
$(e).closest('.control-group').addClass('has-error')
unhighlight: (e) ->
$(e).closest('.control-group').removeClass('has-error')
###
* --------------------------------------------------------------------------------------------------------------------
* internet explorer helpers classes :last-child, :nth-child
* --------------------------------------------------------------------------------------------------------------------
###
@setIEHelperClassses = ->
if /msie/.test(navigator.userAgent.toLowerCase())
$('*:last-child').addClass "last-child"
$('*:nth-child(odd)').addClass "nth-child-odd"
$('*:nth-child(even)').addClass "nth-child-even" |
[
{
"context": "pect(model.set).toHaveBeenCalledWith\n name: 'New Name'\n contains: 'Old Name'\n category_i",
"end": 2946,
"score": 0.6847773194313049,
"start": 2943,
"tag": "NAME",
"value": "New"
},
{
"context": "l = new DotLedger.Models.SortingRule\n name: 'Foobar'\n contains: 'Barfoo'\n category_id: '22'",
"end": 3175,
"score": 0.9333117008209229,
"start": 3169,
"tag": "NAME",
"value": "Foobar"
}
] | spec/javascripts/dot_ledger/views/sorting_rule/form_spec.js.coffee | malclocke/dotledger | 0 | describe "DotLedger.Views.SortingRules.Form", ->
createView = (model = new DotLedger.Models.SortingRule())->
categories = new DotLedger.Collections.Categories [
{
id: 11
name: 'Category One'
type: 'Essential'
}
{
id: 22
name: 'Category Two'
type: 'Flexible'
}
{
id: 33
name: 'Category Three'
type: 'Income'
}
{
id: 44
name: 'Transfer In'
type: 'Transfer'
}
]
view = new DotLedger.Views.SortingRules.Form
model: model
categories: categories
view
it "should be defined", ->
expect(DotLedger.Views.SortingRules.Form).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.SortingRules.Form).toUseTemplate('sorting_rules/form')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the form fields", ->
view = createView().render()
expect(view.$el).toContainElement('input[name=name]')
expect(view.$el).toContainElement('input[name=contains]')
expect(view.$el).toContainElement('select[name=category]')
expect(view.$el).toContainElement('option[value=11]')
expect(view.$el).toContainElement('option[value=22]')
expect(view.$el).toContainElement('option[value=33]')
expect(view.$el).toContainElement('option[value=44]')
expect(view.$el).toContainElement('optgroup[label=Essential]')
expect(view.$el).toContainElement('optgroup[label=Flexible]')
expect(view.$el).toContainElement('optgroup[label=Income]')
expect(view.$el).toContainElement('optgroup[label=Transfer]')
expect(view.$el).toContainElement('select[name=review]')
expect(view.$el).toContainElement('option[value=true]')
expect(view.$el).toContainElement('option[value=false]')
expect(view.$el).toContainElement('input[name=tags]')
it "renders the heading for new sorting_rule", ->
view = createView().render()
expect(view.$el).toHaveText(/New Sorting Rule/)
it "renders the heading for existing sorting_rule", ->
model = new DotLedger.Models.SortingRule
name: 'Some SortingRule'
view = createView(model).render()
expect(view.$el).toHaveText(/Some SortingRule/)
it "renders the cancel link", ->
view = createView().render()
expect(view.$el).toContainElement('a[href="/sorting-rules"]')
it "should set the values on the model when update is called", ->
model = new DotLedger.Models.SortingRule()
view = createView(model).render()
view.$el.find('input[name=name]').val('New Name')
view.$el.find('input[name=contains]').val('Old Name')
view.$el.find('select[name=category]').val('11')
view.$el.find('select[name=review]').val('true')
view.$el.find('input[name=tags]').val('Foo, Bar, Baz')
spyOn(model, 'set')
view.update()
expect(model.set).toHaveBeenCalledWith
name: 'New Name'
contains: 'Old Name'
category_id: '11'
review: 'true'
tags: 'Foo, Bar, Baz'
it "renders the form fields with the model values", ->
model = new DotLedger.Models.SortingRule
name: 'Foobar'
contains: 'Barfoo'
category_id: '22'
review: 'true'
tag_list: ['Foo', 'Bar', 'Baz']
view = createView(model).render()
expect(view.$el.find('input[name=name]')).toHaveValue('Foobar')
expect(view.$el.find('input[name=contains]')).toHaveValue('Barfoo')
expect(view.$el.find('select[name=category]')).toHaveValue('22')
expect(view.$el.find('select[name=review]')).toHaveValue('true')
expect(view.$el.find('input[name=tags]')).toHaveValue('Foo, Bar, Baz')
| 120932 | describe "DotLedger.Views.SortingRules.Form", ->
createView = (model = new DotLedger.Models.SortingRule())->
categories = new DotLedger.Collections.Categories [
{
id: 11
name: 'Category One'
type: 'Essential'
}
{
id: 22
name: 'Category Two'
type: 'Flexible'
}
{
id: 33
name: 'Category Three'
type: 'Income'
}
{
id: 44
name: 'Transfer In'
type: 'Transfer'
}
]
view = new DotLedger.Views.SortingRules.Form
model: model
categories: categories
view
it "should be defined", ->
expect(DotLedger.Views.SortingRules.Form).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.SortingRules.Form).toUseTemplate('sorting_rules/form')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the form fields", ->
view = createView().render()
expect(view.$el).toContainElement('input[name=name]')
expect(view.$el).toContainElement('input[name=contains]')
expect(view.$el).toContainElement('select[name=category]')
expect(view.$el).toContainElement('option[value=11]')
expect(view.$el).toContainElement('option[value=22]')
expect(view.$el).toContainElement('option[value=33]')
expect(view.$el).toContainElement('option[value=44]')
expect(view.$el).toContainElement('optgroup[label=Essential]')
expect(view.$el).toContainElement('optgroup[label=Flexible]')
expect(view.$el).toContainElement('optgroup[label=Income]')
expect(view.$el).toContainElement('optgroup[label=Transfer]')
expect(view.$el).toContainElement('select[name=review]')
expect(view.$el).toContainElement('option[value=true]')
expect(view.$el).toContainElement('option[value=false]')
expect(view.$el).toContainElement('input[name=tags]')
it "renders the heading for new sorting_rule", ->
view = createView().render()
expect(view.$el).toHaveText(/New Sorting Rule/)
it "renders the heading for existing sorting_rule", ->
model = new DotLedger.Models.SortingRule
name: 'Some SortingRule'
view = createView(model).render()
expect(view.$el).toHaveText(/Some SortingRule/)
it "renders the cancel link", ->
view = createView().render()
expect(view.$el).toContainElement('a[href="/sorting-rules"]')
it "should set the values on the model when update is called", ->
model = new DotLedger.Models.SortingRule()
view = createView(model).render()
view.$el.find('input[name=name]').val('New Name')
view.$el.find('input[name=contains]').val('Old Name')
view.$el.find('select[name=category]').val('11')
view.$el.find('select[name=review]').val('true')
view.$el.find('input[name=tags]').val('Foo, Bar, Baz')
spyOn(model, 'set')
view.update()
expect(model.set).toHaveBeenCalledWith
name: '<NAME> Name'
contains: 'Old Name'
category_id: '11'
review: 'true'
tags: 'Foo, Bar, Baz'
it "renders the form fields with the model values", ->
model = new DotLedger.Models.SortingRule
name: '<NAME>'
contains: 'Barfoo'
category_id: '22'
review: 'true'
tag_list: ['Foo', 'Bar', 'Baz']
view = createView(model).render()
expect(view.$el.find('input[name=name]')).toHaveValue('Foobar')
expect(view.$el.find('input[name=contains]')).toHaveValue('Barfoo')
expect(view.$el.find('select[name=category]')).toHaveValue('22')
expect(view.$el.find('select[name=review]')).toHaveValue('true')
expect(view.$el.find('input[name=tags]')).toHaveValue('Foo, Bar, Baz')
| true | describe "DotLedger.Views.SortingRules.Form", ->
createView = (model = new DotLedger.Models.SortingRule())->
categories = new DotLedger.Collections.Categories [
{
id: 11
name: 'Category One'
type: 'Essential'
}
{
id: 22
name: 'Category Two'
type: 'Flexible'
}
{
id: 33
name: 'Category Three'
type: 'Income'
}
{
id: 44
name: 'Transfer In'
type: 'Transfer'
}
]
view = new DotLedger.Views.SortingRules.Form
model: model
categories: categories
view
it "should be defined", ->
expect(DotLedger.Views.SortingRules.Form).toBeDefined()
it "should use the correct template", ->
expect(DotLedger.Views.SortingRules.Form).toUseTemplate('sorting_rules/form')
it "can be rendered", ->
view = createView()
expect(view.render).not.toThrow()
it "renders the form fields", ->
view = createView().render()
expect(view.$el).toContainElement('input[name=name]')
expect(view.$el).toContainElement('input[name=contains]')
expect(view.$el).toContainElement('select[name=category]')
expect(view.$el).toContainElement('option[value=11]')
expect(view.$el).toContainElement('option[value=22]')
expect(view.$el).toContainElement('option[value=33]')
expect(view.$el).toContainElement('option[value=44]')
expect(view.$el).toContainElement('optgroup[label=Essential]')
expect(view.$el).toContainElement('optgroup[label=Flexible]')
expect(view.$el).toContainElement('optgroup[label=Income]')
expect(view.$el).toContainElement('optgroup[label=Transfer]')
expect(view.$el).toContainElement('select[name=review]')
expect(view.$el).toContainElement('option[value=true]')
expect(view.$el).toContainElement('option[value=false]')
expect(view.$el).toContainElement('input[name=tags]')
it "renders the heading for new sorting_rule", ->
view = createView().render()
expect(view.$el).toHaveText(/New Sorting Rule/)
it "renders the heading for existing sorting_rule", ->
model = new DotLedger.Models.SortingRule
name: 'Some SortingRule'
view = createView(model).render()
expect(view.$el).toHaveText(/Some SortingRule/)
it "renders the cancel link", ->
view = createView().render()
expect(view.$el).toContainElement('a[href="/sorting-rules"]')
it "should set the values on the model when update is called", ->
model = new DotLedger.Models.SortingRule()
view = createView(model).render()
view.$el.find('input[name=name]').val('New Name')
view.$el.find('input[name=contains]').val('Old Name')
view.$el.find('select[name=category]').val('11')
view.$el.find('select[name=review]').val('true')
view.$el.find('input[name=tags]').val('Foo, Bar, Baz')
spyOn(model, 'set')
view.update()
expect(model.set).toHaveBeenCalledWith
name: 'PI:NAME:<NAME>END_PI Name'
contains: 'Old Name'
category_id: '11'
review: 'true'
tags: 'Foo, Bar, Baz'
it "renders the form fields with the model values", ->
model = new DotLedger.Models.SortingRule
name: 'PI:NAME:<NAME>END_PI'
contains: 'Barfoo'
category_id: '22'
review: 'true'
tag_list: ['Foo', 'Bar', 'Baz']
view = createView(model).render()
expect(view.$el.find('input[name=name]')).toHaveValue('Foobar')
expect(view.$el.find('input[name=contains]')).toHaveValue('Barfoo')
expect(view.$el.find('select[name=category]')).toHaveValue('22')
expect(view.$el.find('select[name=review]')).toHaveValue('true')
expect(view.$el.find('input[name=tags]')).toHaveValue('Foo, Bar, Baz')
|
[
{
"context": "# copyright 2015 by mike lodato (zvxryb@gmail.com)\n# this work is subject to the ",
"end": 31,
"score": 0.9998587369918823,
"start": 20,
"tag": "NAME",
"value": "mike lodato"
},
{
"context": "# copyright 2015 by mike lodato (zvxryb@gmail.com)\n# this work is subject to the terms of the MIT l",
"end": 49,
"score": 0.9999282360076904,
"start": 33,
"tag": "EMAIL",
"value": "zvxryb@gmail.com"
}
] | src/math/system.coffee | zvxryb/openjscad-solve | 1 | # copyright 2015 by mike lodato (zvxryb@gmail.com)
# this work is subject to the terms of the MIT license
define ['math/expr'], (Expr) ->
class SolveError
constructor: (@message, @cause) ->
@name = @constructor.name
@stack = (new Error()).stack
class OverdeterminedError extends SolveError
constructor: ->
super('overdetermined', null)
class UnderdeterminedError extends SolveError
constructor: ->
super('underdetermined', null)
unique = (array) ->
return array.slice().sort().filter((y) ->
isUnique = y isnt @last
@last = y
return isUnique
, {last: null})
class System
constructor: -> @exprList = []
add: (expr) ->
@exprList.push(switch
when expr instanceof Expr then expr
when typeof expr is 'string' then Expr.parse(expr)
)
vars: ->
vars = []
vars.push(expr.vars...) for expr in @exprList
return unique(vars)
@solve: (x, expr, exprList) ->
if expr.vars.length is 1 and expr.vars[0] is x
return (y.approx() for y in expr.solve(x))
for e0 in exprList
common = expr.common(e0)
continue unless common.length > 0
for y in common
continue if y is x
results = []
for e1 in e0.solve(y)
results.push(@solve(x,
expr.sub([[y, e1]]),
exprList.filter((e2) -> e2 isnt e0))...)
return results if results.length > 0
return []
solve: (x) ->
vars = @vars()
n = vars.length
throw new UnderdeterminedError() if @exprList.length < n
throw new OverdeterminedError() if @exprList.length > n
results = []
for expr in @exprList
continue unless expr.has(x)
remaining = @exprList.filter((e) -> e isnt expr)
results.push(@constructor.solve(x, expr, remaining)...)
return unique(results)
System
| 33592 | # copyright 2015 by <NAME> (<EMAIL>)
# this work is subject to the terms of the MIT license
define ['math/expr'], (Expr) ->
class SolveError
constructor: (@message, @cause) ->
@name = @constructor.name
@stack = (new Error()).stack
class OverdeterminedError extends SolveError
constructor: ->
super('overdetermined', null)
class UnderdeterminedError extends SolveError
constructor: ->
super('underdetermined', null)
unique = (array) ->
return array.slice().sort().filter((y) ->
isUnique = y isnt @last
@last = y
return isUnique
, {last: null})
class System
constructor: -> @exprList = []
add: (expr) ->
@exprList.push(switch
when expr instanceof Expr then expr
when typeof expr is 'string' then Expr.parse(expr)
)
vars: ->
vars = []
vars.push(expr.vars...) for expr in @exprList
return unique(vars)
@solve: (x, expr, exprList) ->
if expr.vars.length is 1 and expr.vars[0] is x
return (y.approx() for y in expr.solve(x))
for e0 in exprList
common = expr.common(e0)
continue unless common.length > 0
for y in common
continue if y is x
results = []
for e1 in e0.solve(y)
results.push(@solve(x,
expr.sub([[y, e1]]),
exprList.filter((e2) -> e2 isnt e0))...)
return results if results.length > 0
return []
solve: (x) ->
vars = @vars()
n = vars.length
throw new UnderdeterminedError() if @exprList.length < n
throw new OverdeterminedError() if @exprList.length > n
results = []
for expr in @exprList
continue unless expr.has(x)
remaining = @exprList.filter((e) -> e isnt expr)
results.push(@constructor.solve(x, expr, remaining)...)
return unique(results)
System
| true | # copyright 2015 by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# this work is subject to the terms of the MIT license
define ['math/expr'], (Expr) ->
class SolveError
constructor: (@message, @cause) ->
@name = @constructor.name
@stack = (new Error()).stack
class OverdeterminedError extends SolveError
constructor: ->
super('overdetermined', null)
class UnderdeterminedError extends SolveError
constructor: ->
super('underdetermined', null)
unique = (array) ->
return array.slice().sort().filter((y) ->
isUnique = y isnt @last
@last = y
return isUnique
, {last: null})
class System
constructor: -> @exprList = []
add: (expr) ->
@exprList.push(switch
when expr instanceof Expr then expr
when typeof expr is 'string' then Expr.parse(expr)
)
vars: ->
vars = []
vars.push(expr.vars...) for expr in @exprList
return unique(vars)
@solve: (x, expr, exprList) ->
if expr.vars.length is 1 and expr.vars[0] is x
return (y.approx() for y in expr.solve(x))
for e0 in exprList
common = expr.common(e0)
continue unless common.length > 0
for y in common
continue if y is x
results = []
for e1 in e0.solve(y)
results.push(@solve(x,
expr.sub([[y, e1]]),
exprList.filter((e2) -> e2 isnt e0))...)
return results if results.length > 0
return []
solve: (x) ->
vars = @vars()
n = vars.length
throw new UnderdeterminedError() if @exprList.length < n
throw new OverdeterminedError() if @exprList.length > n
results = []
for expr in @exprList
continue unless expr.has(x)
remaining = @exprList.filter((e) -> e isnt expr)
results.push(@constructor.solve(x, expr, remaining)...)
return unique(results)
System
|
[
{
"context": "# @fileoverview Tests for eol-last rule.\n# @author Nodeca Team <https://github.com/nodeca>\n###\n'use strict'\n\n#--",
"end": 67,
"score": 0.9897341132164001,
"start": 56,
"tag": "NAME",
"value": "Nodeca Team"
},
{
"context": "t rule.\n# @author Nodeca Team <https://github.com/nodeca>\n###\n'use strict'\n\n#-----------------------------",
"end": 94,
"score": 0.999600887298584,
"start": 88,
"tag": "USERNAME",
"value": "nodeca"
}
] | src/tests/rules/eol-last.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for eol-last rule.
# @author Nodeca Team <https://github.com/nodeca>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/eol-last'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'eol-last', rule,
valid: [
''
'\n'
'a = 123\n'
'a = 123\n\n'
'a = 123\n \n'
'\r\n'
'a = 123\r\n'
'a = 123\r\n\r\n'
'a = 123\r\n \r\n'
,
code: 'a = 123', options: ['never']
,
code: 'a = 123\nb = 456', options: ['never']
,
code: 'a = 123\r\nb = 456', options: ['never']
,
# Deprecated: `"unix"` parameter
code: '', options: ['unix']
,
code: '\n', options: ['unix']
,
code: 'a = 123\n', options: ['unix']
,
code: 'a = 123\n\n', options: ['unix']
,
code: 'a = 123\n \n', options: ['unix']
,
# Deprecated: `"windows"` parameter
code: '', options: ['windows']
,
code: '\n', options: ['windows']
,
code: '\r\n', options: ['windows']
,
code: 'a = 123\r\n', options: ['windows']
,
code: 'a = 123\r\n\r\n', options: ['windows']
,
code: 'a = 123\r\n \r\n', options: ['windows']
]
invalid: [
code: 'a = 123'
output: 'a = 123\n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\nb = 456\n'
output: 'a = 123\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\nb = 456\r\n'
output: 'a = 123\r\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\n\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
# Deprecated: `"unix"` parameter
code: 'a = 123'
output: 'a = 123\n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
# Deprecated: `"windows"` parameter
code: 'a = 123'
output: 'a = 123\r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\r\n '
output: 'a = 123\r\n \r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
]
| 211430 | ###*
# @fileoverview Tests for eol-last rule.
# @author <NAME> <https://github.com/nodeca>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/eol-last'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'eol-last', rule,
valid: [
''
'\n'
'a = 123\n'
'a = 123\n\n'
'a = 123\n \n'
'\r\n'
'a = 123\r\n'
'a = 123\r\n\r\n'
'a = 123\r\n \r\n'
,
code: 'a = 123', options: ['never']
,
code: 'a = 123\nb = 456', options: ['never']
,
code: 'a = 123\r\nb = 456', options: ['never']
,
# Deprecated: `"unix"` parameter
code: '', options: ['unix']
,
code: '\n', options: ['unix']
,
code: 'a = 123\n', options: ['unix']
,
code: 'a = 123\n\n', options: ['unix']
,
code: 'a = 123\n \n', options: ['unix']
,
# Deprecated: `"windows"` parameter
code: '', options: ['windows']
,
code: '\n', options: ['windows']
,
code: '\r\n', options: ['windows']
,
code: 'a = 123\r\n', options: ['windows']
,
code: 'a = 123\r\n\r\n', options: ['windows']
,
code: 'a = 123\r\n \r\n', options: ['windows']
]
invalid: [
code: 'a = 123'
output: 'a = 123\n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\nb = 456\n'
output: 'a = 123\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\nb = 456\r\n'
output: 'a = 123\r\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\n\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
# Deprecated: `"unix"` parameter
code: 'a = 123'
output: 'a = 123\n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
# Deprecated: `"windows"` parameter
code: 'a = 123'
output: 'a = 123\r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\r\n '
output: 'a = 123\r\n \r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
]
| true | ###*
# @fileoverview Tests for eol-last rule.
# @author PI:NAME:<NAME>END_PI <https://github.com/nodeca>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/eol-last'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'eol-last', rule,
valid: [
''
'\n'
'a = 123\n'
'a = 123\n\n'
'a = 123\n \n'
'\r\n'
'a = 123\r\n'
'a = 123\r\n\r\n'
'a = 123\r\n \r\n'
,
code: 'a = 123', options: ['never']
,
code: 'a = 123\nb = 456', options: ['never']
,
code: 'a = 123\r\nb = 456', options: ['never']
,
# Deprecated: `"unix"` parameter
code: '', options: ['unix']
,
code: '\n', options: ['unix']
,
code: 'a = 123\n', options: ['unix']
,
code: 'a = 123\n\n', options: ['unix']
,
code: 'a = 123\n \n', options: ['unix']
,
# Deprecated: `"windows"` parameter
code: '', options: ['windows']
,
code: '\n', options: ['windows']
,
code: '\r\n', options: ['windows']
,
code: 'a = 123\r\n', options: ['windows']
,
code: 'a = 123\r\n\r\n', options: ['windows']
,
code: 'a = 123\r\n \r\n', options: ['windows']
]
invalid: [
code: 'a = 123'
output: 'a = 123\n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\n\r\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\nb = 456\n'
output: 'a = 123\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\r\nb = 456\r\n'
output: 'a = 123\r\nb = 456'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
code: 'a = 123\n\n'
output: 'a = 123'
options: ['never']
errors: [messageId: 'unexpected', type: 'Program']
,
# Deprecated: `"unix"` parameter
code: 'a = 123'
output: 'a = 123\n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\n '
output: 'a = 123\n \n'
options: ['unix']
errors: [messageId: 'missing', type: 'Program']
,
# Deprecated: `"windows"` parameter
code: 'a = 123'
output: 'a = 123\r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
,
code: 'a = 123\r\n '
output: 'a = 123\r\n \r\n'
options: ['windows']
errors: [messageId: 'missing', type: 'Program']
]
|
[
{
"context": "#\n# Copyright (C) 2016 yanni4night.com\n# a.coffee\n#\n# changelog\n# 2016-08-18[12:50:49]:r",
"end": 38,
"score": 0.9955409169197083,
"start": 23,
"tag": "EMAIL",
"value": "yanni4night.com"
},
{
"context": "angelog\n# 2016-08-18[12:50:49]:revised\n#\n# @author yanni4night@gmail.com\n# @version 0.1.0\n# @since 0.1.0\n#\n",
"end": 128,
"score": 0.99992835521698,
"start": 107,
"tag": "EMAIL",
"value": "yanni4night@gmail.com"
}
] | test/fixtures/a.coffee | pantojs/time-panto | 0 | #
# Copyright (C) 2016 yanni4night.com
# a.coffee
#
# changelog
# 2016-08-18[12:50:49]:revised
#
# @author yanni4night@gmail.com
# @version 0.1.0
# @since 0.1.0
#
| 49877 | #
# Copyright (C) 2016 <EMAIL>
# a.coffee
#
# changelog
# 2016-08-18[12:50:49]:revised
#
# @author <EMAIL>
# @version 0.1.0
# @since 0.1.0
#
| true | #
# Copyright (C) 2016 PI:EMAIL:<EMAIL>END_PI
# a.coffee
#
# changelog
# 2016-08-18[12:50:49]:revised
#
# @author PI:EMAIL:<EMAIL>END_PI
# @version 0.1.0
# @since 0.1.0
#
|
[
{
"context": "\n bar:\n fruit: apple\n name: steve\n sport: baseball\n \"\"\"\n .toE",
"end": 1415,
"score": 0.9985499382019043,
"start": 1410,
"tag": "NAME",
"value": "steve"
},
{
"context": "ual foo: 'whatever', bar: (fruit: 'apple', name: 'steve', sport: 'baseball')\n\n\n it 'can be mixed mappi",
"end": 1521,
"score": 0.9985055923461914,
"start": 1516,
"tag": "NAME",
"value": "steve"
},
{
"context": " -\n fruit: apple\n name: steve\n sport: baseball\n - more\n ",
"end": 1702,
"score": 0.9982976913452148,
"start": 1697,
"tag": "NAME",
"value": "steve"
},
{
"context": "ever', bar: [\n (fruit: 'apple', name: 'steve', sport: 'baseball'),\n 'more',\n ",
"end": 1928,
"score": 0.9989557266235352,
"start": 1923,
"tag": "NAME",
"value": "steve"
},
{
"context": "ct YAML.parse \"\"\"\n mapping:\n name: Joe\n job: Accountant\n <<:\n ",
"end": 2629,
"score": 0.999666690826416,
"start": 2626,
"tag": "NAME",
"value": "Joe"
},
{
"context": " .toEqual mapping:\n name: 'Joe'\n job: 'Accountant'\n ",
"end": 2758,
"score": 0.9996340274810791,
"start": 2755,
"tag": "NAME",
"value": "Joe"
},
{
"context": "t YAML.parse \"\"\"\n ---\n hash: { name: Steve, foo: bar }\n \"\"\"\n .toEqual hash: (n",
"end": 3145,
"score": 0.9998583197593689,
"start": 3140,
"tag": "NAME",
"value": "Steve"
},
{
"context": " bar }\n \"\"\"\n .toEqual hash: (name: 'Steve', foo: 'bar')\n\n\n it 'can be nested inline hash",
"end": 3206,
"score": 0.9991508722305298,
"start": 3201,
"tag": "NAME",
"value": "Steve"
},
{
"context": "', ->\n\n expect YAML.parse \"\"\"\n name: Mr. Show\n hosted by: Bob and David\n date of ",
"end": 7893,
"score": 0.9839575290679932,
"start": 7885,
"tag": "NAME",
"value": "Mr. Show"
},
{
"context": "arse \"\"\"\n name: Mr. Show\n hosted by: Bob and David\n date of next season: ~\n ",
"end": 7916,
"score": 0.9997105002403259,
"start": 7913,
"tag": "NAME",
"value": "Bob"
},
{
"context": "\n name: Mr. Show\n hosted by: Bob and David\n date of next season: ~\n \"\"\"\n ",
"end": 7926,
"score": 0.9918132424354553,
"start": 7921,
"tag": "NAME",
"value": "David"
},
{
"context": " \"\"\"\n .toEqual (\n 'name': 'Mr. Show'\n 'hosted by': 'Bob and David'\n ",
"end": 8018,
"score": 0.9898173809051514,
"start": 8010,
"tag": "NAME",
"value": "Mr. Show"
},
{
"context": " 'name': 'Mr. Show'\n 'hosted by': 'Bob and David'\n 'date of next season': null\n ",
"end": 8053,
"score": 0.8604630827903748,
"start": 8046,
"tag": "NAME",
"value": "Bob and"
},
{
"context": "ame': 'Mr. Show'\n 'hosted by': 'Bob and David'\n 'date of next season': null\n ",
"end": 8059,
"score": 0.8950785994529724,
"start": 8054,
"tag": "NAME",
"value": "David"
},
{
"context": "parse \"\"\"\n ---\n quote: >\n Mark McGwire's\n year was crippled\n by a kn",
"end": 12550,
"score": 0.96185302734375,
"start": 12536,
"tag": "NAME",
"value": "Mark McGwire's"
},
{
"context": " \"\"\"\n .toEqual (\n 'quote': \"Mark McGwire's year was crippled by a knee injury.\\n\"\n ",
"end": 12697,
"score": 0.9995898008346558,
"start": 12685,
"tag": "NAME",
"value": "Mark McGwire"
},
{
"context": "\n\n expect YAML.parse \"\"\"\n - &showell Steve\n - Clark\n - Brian\n - Oren\n ",
"end": 15273,
"score": 0.9986168146133423,
"start": 15268,
"tag": "NAME",
"value": "Steve"
},
{
"context": " YAML.parse \"\"\"\n - &showell Steve\n - Clark\n - Brian\n - Oren\n - *showell",
"end": 15289,
"score": 0.99946129322052,
"start": 15284,
"tag": "NAME",
"value": "Clark"
},
{
"context": " - &showell Steve\n - Clark\n - Brian\n - Oren\n - *showell\n \"\"\"\n ",
"end": 15305,
"score": 0.9995707273483276,
"start": 15300,
"tag": "NAME",
"value": "Brian"
},
{
"context": "ll Steve\n - Clark\n - Brian\n - Oren\n - *showell\n \"\"\"\n .toEqual [",
"end": 15320,
"score": 0.9994379281997681,
"start": 15316,
"tag": "NAME",
"value": "Oren"
},
{
"context": " - *showell\n \"\"\"\n .toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'ca",
"end": 15376,
"score": 0.9995582103729248,
"start": 15371,
"tag": "NAME",
"value": "Steve"
},
{
"context": " *showell\n \"\"\"\n .toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alia",
"end": 15385,
"score": 0.9997586607933044,
"start": 15380,
"tag": "NAME",
"value": "Clark"
},
{
"context": "\n \"\"\"\n .toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a ma",
"end": 15394,
"score": 0.9997327327728271,
"start": 15389,
"tag": "NAME",
"value": "Brian"
},
{
"context": "\"\"\"\n .toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a mapping', ",
"end": 15402,
"score": 0.9996553659439087,
"start": 15398,
"tag": "NAME",
"value": "Oren"
},
{
"context": " .toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a mapping', ->\n\n ",
"end": 15411,
"score": 0.9996163845062256,
"start": 15406,
"tag": "NAME",
"value": "Steve"
},
{
"context": "\n bar:\n fruit: apple\n name: steve\n sport: baseball\n \"\"\"\n .toE",
"end": 18025,
"score": 0.9990496635437012,
"start": 18020,
"tag": "NAME",
"value": "steve"
},
{
"context": "ump foo: 'whatever', bar: (fruit: 'apple', name: 'steve', sport: 'baseball')\n\n\n it 'can be mixed mappi",
"end": 18152,
"score": 0.9983826875686646,
"start": 18147,
"tag": "NAME",
"value": "steve"
},
{
"context": " -\n fruit: apple\n name: steve\n sport: baseball\n - more\n ",
"end": 18333,
"score": 0.9986625909805298,
"start": 18328,
"tag": "NAME",
"value": "steve"
},
{
"context": "ever', bar: [\n (fruit: 'apple', name: 'steve', sport: 'baseball'),\n 'more',\n ",
"end": 18580,
"score": 0.9986990690231323,
"start": 18575,
"tag": "NAME",
"value": "steve"
},
{
"context": "ct YAML.parse \"\"\"\n mapping:\n name: Joe\n job: Accountant\n <<:\n ",
"end": 19323,
"score": 0.9995030164718628,
"start": 19320,
"tag": "NAME",
"value": "Joe"
},
{
"context": "rse YAML.dump mapping:\n name: 'Joe'\n job: 'Accountant'\n ",
"end": 19473,
"score": 0.9995450973510742,
"start": 19470,
"tag": "NAME",
"value": "Joe"
},
{
"context": "t YAML.parse \"\"\"\n ---\n hash: { name: Steve, foo: bar }\n \"\"\"\n .toEqual YAML.par",
"end": 19881,
"score": 0.999854564666748,
"start": 19876,
"tag": "NAME",
"value": "Steve"
},
{
"context": " .toEqual YAML.parse YAML.dump hash: (name: 'Steve', foo: 'bar')\n\n\n it 'can be multi-line inline ",
"end": 19963,
"score": 0.9988077282905579,
"start": 19958,
"tag": "NAME",
"value": "Steve"
},
{
"context": "', ->\n\n expect YAML.parse \"\"\"\n name: Mr. Show\n hosted by: Bob and David\n date of ",
"end": 24837,
"score": 0.8200947642326355,
"start": 24829,
"tag": "NAME",
"value": "Mr. Show"
},
{
"context": "arse \"\"\"\n name: Mr. Show\n hosted by: Bob and David\n date of next season: ~\n ",
"end": 24860,
"score": 0.99940025806427,
"start": 24857,
"tag": "NAME",
"value": "Bob"
},
{
"context": "\n name: Mr. Show\n hosted by: Bob and David\n date of next season: ~\n \"\"\"\n ",
"end": 24870,
"score": 0.9864835143089294,
"start": 24865,
"tag": "NAME",
"value": "David"
},
{
"context": "Equal YAML.parse YAML.dump (\n 'name': 'Mr. Show'\n 'hosted by': 'Bob and David'\n ",
"end": 24983,
"score": 0.806630551815033,
"start": 24975,
"tag": "NAME",
"value": "Mr. Show"
},
{
"context": " 'name': 'Mr. Show'\n 'hosted by': 'Bob and David'\n 'date of next season': null\n ",
"end": 25018,
"score": 0.7970213890075684,
"start": 25011,
"tag": "NAME",
"value": "Bob and"
},
{
"context": "ame': 'Mr. Show'\n 'hosted by': 'Bob and David'\n 'date of next season': null\n ",
"end": 25024,
"score": 0.8375778794288635,
"start": 25019,
"tag": "NAME",
"value": "David"
},
{
"context": "parse \"\"\"\n ---\n quote: >\n Mark McGwire's\n year was crippled\n by a ",
"end": 29723,
"score": 0.9995229840278625,
"start": 29711,
"tag": "NAME",
"value": "Mark McGwire"
},
{
"context": "qual YAML.parse YAML.dump (\n 'quote': \"Mark McGwire's year was crippled by a knee injury.\\n\"\n ",
"end": 29893,
"score": 0.9991884231567383,
"start": 29881,
"tag": "NAME",
"value": "Mark McGwire"
},
{
"context": "\n\n expect YAML.parse \"\"\"\n - &showell Steve\n - Clark\n - Brian\n - Oren\n ",
"end": 31655,
"score": 0.998622715473175,
"start": 31650,
"tag": "NAME",
"value": "Steve"
},
{
"context": " YAML.parse \"\"\"\n - &showell Steve\n - Clark\n - Brian\n - Oren\n - *showell",
"end": 31671,
"score": 0.9990955591201782,
"start": 31666,
"tag": "NAME",
"value": "Clark"
},
{
"context": " - &showell Steve\n - Clark\n - Brian\n - Oren\n - *showell\n \"\"\"\n ",
"end": 31687,
"score": 0.9995222091674805,
"start": 31682,
"tag": "NAME",
"value": "Brian"
},
{
"context": "ll Steve\n - Clark\n - Brian\n - Oren\n - *showell\n \"\"\"\n .toEqual Y",
"end": 31702,
"score": 0.9992929697036743,
"start": 31698,
"tag": "NAME",
"value": "Oren"
},
{
"context": "ark\n - Brian\n - Oren\n - *showell\n \"\"\"\n .toEqual YAML.parse YAML.dump",
"end": 31721,
"score": 0.9996874332427979,
"start": 31718,
"tag": "NAME",
"value": "ell"
},
{
"context": " \"\"\"\n .toEqual YAML.parse YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'ca",
"end": 31779,
"score": 0.9998040199279785,
"start": 31774,
"tag": "NAME",
"value": "Steve"
},
{
"context": "\n .toEqual YAML.parse YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alia",
"end": 31788,
"score": 0.9997481107711792,
"start": 31783,
"tag": "NAME",
"value": "Clark"
},
{
"context": ".toEqual YAML.parse YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a ma",
"end": 31797,
"score": 0.9997760057449341,
"start": 31792,
"tag": "NAME",
"value": "Brian"
},
{
"context": "YAML.parse YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a mapping', ",
"end": 31805,
"score": 0.9997973442077637,
"start": 31801,
"tag": "NAME",
"value": "Oren"
},
{
"context": "se YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']\n\n\n it 'can be alias of a mapping', ->\n\n ",
"end": 31814,
"score": 0.9997216463088989,
"start": 31809,
"tag": "NAME",
"value": "Steve"
}
] | node_modules/docpad/node_modules/yamljs/test/spec/YamlSpec.coffee | Vladimir37/Sanelotto_site | 0 |
unless YAML?
YAML = require '../../src/Yaml'
# Parsing
#
describe 'Parsed YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: steve
sport: baseball
"""
.toEqual foo: 'whatever', bar: (fruit: 'apple', name: 'steve', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: steve
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual foo: 'whatever', bar: [
(fruit: 'apple', name: 'steve', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: Joe
job: Accountant
<<:
age: 38
"""
.toEqual mapping:
name: 'Joe'
job: 'Accountant'
age: 38
describe 'Parsed YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: Steve, foo: bar }
"""
.toEqual hash: (name: 'Steve', foo: 'bar')
it 'can be nested inline hash', ->
expect YAML.parse """
---
hash: { val1: "string", val2: { v2k1: "v2k1v" } }
"""
.toEqual hash: (val1: 'string', val2: (v2k1: 'v2k1v'))
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
describe 'Parsed YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual str: 'string with \n inside'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual str: 'string with \\n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual str: 'string with \\n inside and \\\\ also'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: Mr. Show
hosted by: Bob and David
date of next season: ~
"""
.toEqual (
'name': 'Mr. Show'
'hosted by': 'Bob and David'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual withDatesToTime (
'date': aDate
)
describe 'Parsed YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
Mark McGwire's
year was crippled
by a knee injury.
source: espn
"""
.toEqual (
'quote': "Mark McGwire's year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be the whole document as intented block', ->
expect YAML.parse """
---
foo: "bar"
baz:
- "qux"
- "quxx"
corge: null
"""
.toEqual (
'foo': "bar"
'baz': ['qux', 'quxx']
'corge': null
)
describe 'Parsed YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual (
hello: 'world'
)
it 'can be less indented in mapping', ->
expect YAML.parse """
parts:
a: 'b'
# normally indented comment
c: 'd'
# less indented comment
e: 'f'
"""
.toEqual (
parts: {a: 'b', c: 'd', e: 'f'}
)
it 'can be less indented in sequence', ->
expect YAML.parse """
list-header:
- item1
# - item2
- item3
# - item4
"""
.toEqual (
'list-header': ['item1', 'item3']
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual (
hello: 'world'
)
describe 'Parsed YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell Steve
- Clark
- Brian
- Oren
- *showell
"""
.toEqual ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Parsed YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Dumping
#
describe 'Dumped YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual YAML.parse YAML.dump [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual YAML.parse YAML.dump [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: steve
sport: baseball
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: (fruit: 'apple', name: 'steve', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: steve
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: [
(fruit: 'apple', name: 'steve', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual YAML.parse YAML.dump [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual YAML.parse YAML.dump (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: Joe
job: Accountant
<<:
age: 38
"""
.toEqual YAML.parse YAML.dump mapping:
name: 'Joe'
job: 'Accountant'
age: 38
describe 'Dumped YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual YAML.parse YAML.dump seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: Steve, foo: bar }
"""
.toEqual YAML.parse YAML.dump hash: (name: 'Steve', foo: 'bar')
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual YAML.parse YAML.dump (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
it 'can be dumped empty sequences in mappings', ->
expect YAML.parse(YAML.dump({key:[]}))
.toEqual({key:[]})
describe 'Dumped YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual YAML.parse YAML.dump 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside and \\\\ also'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual YAML.parse YAML.dump [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual YAML.parse YAML.dump (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual YAML.parse YAML.dump (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual YAML.parse YAML.dump (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual YAML.parse YAML.dump (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: Mr. Show
hosted by: Bob and David
date of next season: ~
"""
.toEqual YAML.parse YAML.dump (
'name': 'Mr. Show'
'hosted by': 'Bob and David'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual YAML.parse YAML.dump (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual YAML.parse YAML.dump (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual YAML.parse YAML.dump (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual YAML.parse YAML.dump (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'date': aDate
)
describe 'Dumped YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual YAML.parse YAML.dump 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual YAML.parse YAML.dump (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual YAML.parse YAML.dump [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
Mark McGwire's
year was crippled
by a knee injury.
source: espn
"""
.toEqual YAML.parse YAML.dump (
'quote': "Mark McGwire's year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
describe 'Dumped YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
describe 'Dumped YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell Steve
- Clark
- Brian
- Oren
- *showell
"""
.toEqual YAML.parse YAML.dump ['Steve', 'Clark', 'Brian', 'Oren', 'Steve']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual YAML.parse YAML.dump [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Dumped YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual YAML.parse YAML.dump (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Loading
# (disable test when running locally from file)
#
url = document?.location?.href
if not(url?) or url.indexOf('file://') is -1
examplePath = 'spec/example.yml'
if __dirname?
examplePath = __dirname+'/example.yml'
describe 'YAML loading', ->
it 'can be done synchronously', ->
expect(YAML.load(examplePath)).toEqual (
this: 'is'
a: ['YAML', 'example']
)
it 'can be done asynchronously', (done) ->
YAML.load examplePath, (result) ->
expect(result).toEqual (
this: 'is'
a: ['YAML', 'example']
)
done()
| 93779 |
unless YAML?
YAML = require '../../src/Yaml'
# Parsing
#
describe 'Parsed YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: <NAME>
sport: baseball
"""
.toEqual foo: 'whatever', bar: (fruit: 'apple', name: '<NAME>', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: <NAME>
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual foo: 'whatever', bar: [
(fruit: 'apple', name: '<NAME>', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: <NAME>
job: Accountant
<<:
age: 38
"""
.toEqual mapping:
name: '<NAME>'
job: 'Accountant'
age: 38
describe 'Parsed YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: <NAME>, foo: bar }
"""
.toEqual hash: (name: '<NAME>', foo: 'bar')
it 'can be nested inline hash', ->
expect YAML.parse """
---
hash: { val1: "string", val2: { v2k1: "v2k1v" } }
"""
.toEqual hash: (val1: 'string', val2: (v2k1: 'v2k1v'))
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
describe 'Parsed YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual str: 'string with \n inside'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual str: 'string with \\n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual str: 'string with \\n inside and \\\\ also'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: <NAME>
hosted by: <NAME> and <NAME>
date of next season: ~
"""
.toEqual (
'name': '<NAME>'
'hosted by': '<NAME> <NAME>'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual withDatesToTime (
'date': aDate
)
describe 'Parsed YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
<NAME>
year was crippled
by a knee injury.
source: espn
"""
.toEqual (
'quote': "<NAME>'s year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be the whole document as intented block', ->
expect YAML.parse """
---
foo: "bar"
baz:
- "qux"
- "quxx"
corge: null
"""
.toEqual (
'foo': "bar"
'baz': ['qux', 'quxx']
'corge': null
)
describe 'Parsed YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual (
hello: 'world'
)
it 'can be less indented in mapping', ->
expect YAML.parse """
parts:
a: 'b'
# normally indented comment
c: 'd'
# less indented comment
e: 'f'
"""
.toEqual (
parts: {a: 'b', c: 'd', e: 'f'}
)
it 'can be less indented in sequence', ->
expect YAML.parse """
list-header:
- item1
# - item2
- item3
# - item4
"""
.toEqual (
'list-header': ['item1', 'item3']
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual (
hello: 'world'
)
describe 'Parsed YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell <NAME>
- <NAME>
- <NAME>
- <NAME>
- *showell
"""
.toEqual ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Parsed YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Dumping
#
describe 'Dumped YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual YAML.parse YAML.dump [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual YAML.parse YAML.dump [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: <NAME>
sport: baseball
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: (fruit: 'apple', name: '<NAME>', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: <NAME>
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: [
(fruit: 'apple', name: '<NAME>', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual YAML.parse YAML.dump [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual YAML.parse YAML.dump (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: <NAME>
job: Accountant
<<:
age: 38
"""
.toEqual YAML.parse YAML.dump mapping:
name: '<NAME>'
job: 'Accountant'
age: 38
describe 'Dumped YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual YAML.parse YAML.dump seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: <NAME>, foo: bar }
"""
.toEqual YAML.parse YAML.dump hash: (name: '<NAME>', foo: 'bar')
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual YAML.parse YAML.dump (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
it 'can be dumped empty sequences in mappings', ->
expect YAML.parse(YAML.dump({key:[]}))
.toEqual({key:[]})
describe 'Dumped YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual YAML.parse YAML.dump 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside and \\\\ also'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual YAML.parse YAML.dump [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual YAML.parse YAML.dump (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual YAML.parse YAML.dump (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual YAML.parse YAML.dump (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual YAML.parse YAML.dump (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: <NAME>
hosted by: <NAME> and <NAME>
date of next season: ~
"""
.toEqual YAML.parse YAML.dump (
'name': '<NAME>'
'hosted by': '<NAME> <NAME>'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual YAML.parse YAML.dump (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual YAML.parse YAML.dump (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual YAML.parse YAML.dump (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual YAML.parse YAML.dump (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'date': aDate
)
describe 'Dumped YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual YAML.parse YAML.dump 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual YAML.parse YAML.dump (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual YAML.parse YAML.dump [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
<NAME>'s
year was crippled
by a knee injury.
source: espn
"""
.toEqual YAML.parse YAML.dump (
'quote': "<NAME>'s year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
describe 'Dumped YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
describe 'Dumped YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell <NAME>
- <NAME>
- <NAME>
- <NAME>
- *show<NAME>
"""
.toEqual YAML.parse YAML.dump ['<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual YAML.parse YAML.dump [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Dumped YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual YAML.parse YAML.dump (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Loading
# (disable test when running locally from file)
#
url = document?.location?.href
if not(url?) or url.indexOf('file://') is -1
examplePath = 'spec/example.yml'
if __dirname?
examplePath = __dirname+'/example.yml'
describe 'YAML loading', ->
it 'can be done synchronously', ->
expect(YAML.load(examplePath)).toEqual (
this: 'is'
a: ['YAML', 'example']
)
it 'can be done asynchronously', (done) ->
YAML.load examplePath, (result) ->
expect(result).toEqual (
this: 'is'
a: ['YAML', 'example']
)
done()
| true |
unless YAML?
YAML = require '../../src/Yaml'
# Parsing
#
describe 'Parsed YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: PI:NAME:<NAME>END_PI
sport: baseball
"""
.toEqual foo: 'whatever', bar: (fruit: 'apple', name: 'PI:NAME:<NAME>END_PI', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: PI:NAME:<NAME>END_PI
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual foo: 'whatever', bar: [
(fruit: 'apple', name: 'PI:NAME:<NAME>END_PI', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: PI:NAME:<NAME>END_PI
job: Accountant
<<:
age: 38
"""
.toEqual mapping:
name: 'PI:NAME:<NAME>END_PI'
job: 'Accountant'
age: 38
describe 'Parsed YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: PI:NAME:<NAME>END_PI, foo: bar }
"""
.toEqual hash: (name: 'PI:NAME:<NAME>END_PI', foo: 'bar')
it 'can be nested inline hash', ->
expect YAML.parse """
---
hash: { val1: "string", val2: { v2k1: "v2k1v" } }
"""
.toEqual hash: (val1: 'string', val2: (v2k1: 'v2k1v'))
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
describe 'Parsed YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual str: 'string with \n inside'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual str: 'string with \\n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual str: 'string with \\n inside and \\\\ also'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: PI:NAME:<NAME>END_PI
hosted by: PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI
date of next season: ~
"""
.toEqual (
'name': 'PI:NAME:<NAME>END_PI'
'hosted by': 'PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual withDatesToTime (
'date': aDate
)
describe 'Parsed YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
PI:NAME:<NAME>END_PI
year was crippled
by a knee injury.
source: espn
"""
.toEqual (
'quote': "PI:NAME:<NAME>END_PI's year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be the whole document as intented block', ->
expect YAML.parse """
---
foo: "bar"
baz:
- "qux"
- "quxx"
corge: null
"""
.toEqual (
'foo': "bar"
'baz': ['qux', 'quxx']
'corge': null
)
describe 'Parsed YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual (
hello: 'world'
)
it 'can be less indented in mapping', ->
expect YAML.parse """
parts:
a: 'b'
# normally indented comment
c: 'd'
# less indented comment
e: 'f'
"""
.toEqual (
parts: {a: 'b', c: 'd', e: 'f'}
)
it 'can be less indented in sequence', ->
expect YAML.parse """
list-header:
- item1
# - item2
- item3
# - item4
"""
.toEqual (
'list-header': ['item1', 'item3']
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual (
hello: 'world'
)
describe 'Parsed YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- *showell
"""
.toEqual ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Parsed YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Dumping
#
describe 'Dumped YAML Collections', ->
it 'can be simple sequence', ->
expect YAML.parse """
- apple
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', 'banana', 'carrot']
it 'can be nested sequences', ->
expect YAML.parse """
-
- foo
- bar
- baz
"""
.toEqual YAML.parse YAML.dump [['foo', 'bar', 'baz']]
it 'can be mixed sequences', ->
expect YAML.parse """
- apple
-
- foo
- bar
- x123
- banana
- carrot
"""
.toEqual YAML.parse YAML.dump ['apple', ['foo', 'bar', 'x123'], 'banana', 'carrot']
it 'can be deeply nested sequences', ->
expect YAML.parse """
-
-
- uno
- dos
"""
.toEqual YAML.parse YAML.dump [[['uno', 'dos']]]
it 'can be simple mapping', ->
expect YAML.parse """
foo: whatever
bar: stuff
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: 'stuff'
it 'can be sequence in a mapping', ->
expect YAML.parse """
foo: whatever
bar:
- uno
- dos
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: ['uno', 'dos']
it 'can be nested mappings', ->
expect YAML.parse """
foo: whatever
bar:
fruit: apple
name: PI:NAME:<NAME>END_PI
sport: baseball
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: (fruit: 'apple', name: 'PI:NAME:<NAME>END_PI', sport: 'baseball')
it 'can be mixed mapping', ->
expect YAML.parse """
foo: whatever
bar:
-
fruit: apple
name: PI:NAME:<NAME>END_PI
sport: baseball
- more
-
python: rocks
perl: papers
ruby: scissorses
"""
.toEqual YAML.parse YAML.dump foo: 'whatever', bar: [
(fruit: 'apple', name: 'PI:NAME:<NAME>END_PI', sport: 'baseball'),
'more',
(python: 'rocks', perl: 'papers', ruby: 'scissorses')
]
it 'can have mapping-in-sequence shortcut', ->
expect YAML.parse """
- work on YAML.py:
- work on Store
"""
.toEqual YAML.parse YAML.dump [('work on YAML.py': ['work on Store'])]
it 'can have unindented sequence-in-mapping shortcut', ->
expect YAML.parse """
allow:
- 'localhost'
- '%.sourceforge.net'
- '%.freepan.org'
"""
.toEqual YAML.parse YAML.dump (allow: ['localhost', '%.sourceforge.net', '%.freepan.org'])
it 'can merge key', ->
expect YAML.parse """
mapping:
name: PI:NAME:<NAME>END_PI
job: Accountant
<<:
age: 38
"""
.toEqual YAML.parse YAML.dump mapping:
name: 'PI:NAME:<NAME>END_PI'
job: 'Accountant'
age: 38
describe 'Dumped YAML Inline Collections', ->
it 'can be simple inline array', ->
expect YAML.parse """
---
seq: [ a, b, c ]
"""
.toEqual YAML.parse YAML.dump seq: ['a', 'b', 'c']
it 'can be simple inline hash', ->
expect YAML.parse """
---
hash: { name: PI:NAME:<NAME>END_PI, foo: bar }
"""
.toEqual YAML.parse YAML.dump hash: (name: 'PI:NAME:<NAME>END_PI', foo: 'bar')
it 'can be multi-line inline collections', ->
expect YAML.parse """
languages: [ Ruby,
Perl,
Python ]
websites: { YAML: yaml.org,
Ruby: ruby-lang.org,
Python: python.org,
Perl: use.perl.org }
"""
.toEqual YAML.parse YAML.dump (
languages: ['Ruby', 'Perl', 'Python']
websites:
YAML: 'yaml.org'
Ruby: 'ruby-lang.org'
Python: 'python.org'
Perl: 'use.perl.org'
)
it 'can be dumped empty sequences in mappings', ->
expect YAML.parse(YAML.dump({key:[]}))
.toEqual({key:[]})
describe 'Dumped YAML Basic Types', ->
it 'can be strings', ->
expect YAML.parse """
---
String
"""
.toEqual YAML.parse YAML.dump 'String'
it 'can be double-quoted strings with backslashes', ->
expect YAML.parse """
str:
"string with \\\\ inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \\ inside'
it 'can be single-quoted strings with backslashes', ->
expect YAML.parse """
str:
'string with \\\\ inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\\\ inside'
it 'can be double-quoted strings with line breaks', ->
expect YAML.parse """
str:
"string with \\n inside"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside'
it 'can be double-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
"string with \\n inside and \\\\ also"
"""
.toEqual YAML.parse YAML.dump str: 'string with \n inside and \\ also'
it 'can be single-quoted strings with line breaks and backslashes', ->
expect YAML.parse """
str:
'string with \\n inside and \\\\ also'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside and \\\\ also'
it 'can be single-quoted strings with escaped line breaks', ->
expect YAML.parse """
str:
'string with \\n inside'
"""
.toEqual YAML.parse YAML.dump str: 'string with \\n inside'
it 'can have string characters in sequences', ->
expect YAML.parse """
- What's Yaml?
- It's for writing data structures in plain text.
- And?
- And what? That's not good enough for you?
- No, I mean, "And what about Yaml?"
- Oh, oh yeah. Uh.. Yaml for JavaScript.
"""
.toEqual YAML.parse YAML.dump [
"What's Yaml?",
"It's for writing data structures in plain text.",
"And?",
"And what? That's not good enough for you?",
"No, I mean, \"And what about Yaml?\"",
"Oh, oh yeah. Uh.. Yaml for JavaScript."
]
it 'can have indicators in strings', ->
expect YAML.parse """
the colon followed by space is an indicator: but is a string:right here
same for the pound sign: here we have it#in a string
the comma can, honestly, be used in most cases: [ but not in, inline collections ]
"""
.toEqual YAML.parse YAML.dump (
'the colon followed by space is an indicator': 'but is a string:right here',
'same for the pound sign': 'here we have it#in a string',
'the comma can, honestly, be used in most cases': ['but not in', 'inline collections']
)
it 'can force strings', ->
expect YAML.parse """
date string: !str 2001-08-01
number string: !str 192
date string 2: !!str 2001-08-01
number string 2: !!str 192
"""
.toEqual YAML.parse YAML.dump (
'date string': '2001-08-01',
'number string': '192' ,
'date string 2': '2001-08-01',
'number string 2': '192'
)
it 'can be single-quoted strings', ->
expect YAML.parse """
all my favorite symbols: '#:!/%.)'
a few i hate: '&(*'
why do i hate them?: 'it''s very hard to explain'
"""
.toEqual YAML.parse YAML.dump (
'all my favorite symbols': '#:!/%.)',
'a few i hate': '&(*',
'why do i hate them?': 'it\'s very hard to explain'
)
it 'can be double-quoted strings', ->
expect YAML.parse """
i know where i want my line breaks: "one here\\nand another here\\n"
"""
.toEqual YAML.parse YAML.dump (
'i know where i want my line breaks': "one here\nand another here\n"
)
it 'can be null', ->
expect YAML.parse """
name: PI:NAME:<NAME>END_PI
hosted by: PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI
date of next season: ~
"""
.toEqual YAML.parse YAML.dump (
'name': 'PI:NAME:<NAME>END_PI'
'hosted by': 'PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI'
'date of next season': null
)
it 'can be boolean', ->
expect YAML.parse """
Is Gus a Liar?: true
Do I rely on Gus for Sustenance?: false
"""
.toEqual YAML.parse YAML.dump (
'Is Gus a Liar?': true
'Do I rely on Gus for Sustenance?': false
)
it 'can be integers', ->
expect YAML.parse """
zero: 0
simple: 12
one-thousand: 1,000
negative one-thousand: -1,000
"""
.toEqual YAML.parse YAML.dump (
'zero': 0
'simple': 12
'one-thousand': 1000
'negative one-thousand': -1000
)
it 'can be integers as map keys', ->
expect YAML.parse """
1: one
2: two
3: three
"""
.toEqual YAML.parse YAML.dump (
1: 'one'
2: 'two'
3: 'three'
)
it 'can be floats', ->
expect YAML.parse """
a simple float: 2.00
larger float: 1,000.09
scientific notation: 1.00009e+3
"""
.toEqual YAML.parse YAML.dump (
'a simple float': 2.0
'larger float': 1000.09
'scientific notation': 1000.09
)
it 'can be time', ->
iso8601Date = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
iso8601Date.setTime iso8601Date.getTime() - 5 * 3600 * 1000
spaceSeparatedDate = new Date Date.UTC(2001, 12-1, 14, 21, 59, 43, 10)
spaceSeparatedDate.setTime spaceSeparatedDate.getTime() - 5 * 3600 * 1000
withDatesToTime = (input) ->
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
iso8601: 2001-12-14t21:59:43.10-05:00
space seperated: 2001-12-14 21:59:43.10 -05:00
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'iso8601': iso8601Date
'space seperated': spaceSeparatedDate
)
it 'can be date', ->
aDate = new Date Date.UTC(1976, 7-1, 31, 0, 0, 0, 0)
withDatesToTime = (input) ->
return input
res = {}
for key, val of input
res[key] = Math.round(val.getTime() / 1000) * 1000
return res
expect withDatesToTime(YAML.parse """
date: 1976-07-31
""")
.toEqual YAML.parse YAML.dump withDatesToTime (
'date': aDate
)
describe 'Dumped YAML Blocks', ->
it 'can be single ending newline', ->
expect YAML.parse """
---
this: |
Foo
Bar
"""
.toEqual YAML.parse YAML.dump 'this': "Foo\nBar\n"
it 'can be single ending newline with \'+\' indicator', ->
expect YAML.parse """
normal: |
extra new lines not kept
preserving: |+
extra new lines are kept
dummy: value
"""
.toEqual YAML.parse YAML.dump (
'normal': "extra new lines not kept\n"
'preserving': "extra new lines are kept\n\n\n"
'dummy': 'value'
)
it 'can be multi-line block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: |
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: |-
This has no newline.
same as "stripped" above: "This has no newline."
kept: |+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped':'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
it 'can be folded block in a sequence', ->
expect YAML.parse """
---
- apple
- banana
- >
can't you see
the beauty of yaml?
hmm
- dog
"""
.toEqual YAML.parse YAML.dump [
'apple',
'banana',
"can't you see the beauty of yaml? hmm\n",
'dog'
]
it 'can be folded block as a mapping value', ->
expect YAML.parse """
---
quote: >
PI:NAME:<NAME>END_PI's
year was crippled
by a knee injury.
source: espn
"""
.toEqual YAML.parse YAML.dump (
'quote': "PI:NAME:<NAME>END_PI's year was crippled by a knee injury.\n"
'source': 'espn'
)
it 'can be folded block handling trailing newlines in function of \'+\', \'-\' indicators', ->
expect YAML.parse """
clipped: >
This has one newline.
same as "clipped" above: "This has one newline.\\n"
stripped: >-
This has no newline.
same as "stripped" above: "This has no newline."
kept: >+
This has four newlines.
same as "kept" above: "This has four newlines.\\n\\n\\n\\n"
"""
.toEqual YAML.parse YAML.dump (
'clipped': "This has one newline.\n"
'same as "clipped" above': "This has one newline.\n"
'stripped': 'This has no newline.'
'same as "stripped" above': 'This has no newline.'
'kept': "This has four newlines.\n\n\n\n"
'same as "kept" above': "This has four newlines.\n\n\n\n"
)
describe 'Dumped YAML Comments', ->
it 'can begin the document', ->
expect YAML.parse """
# This is a comment
hello: world
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can finish a line', ->
expect YAML.parse """
hello: world # This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
it 'can end the document', ->
expect YAML.parse """
hello: world
# This is a comment
"""
.toEqual YAML.parse YAML.dump (
hello: 'world'
)
describe 'Dumped YAML Aliases and Anchors', ->
it 'can be simple alias', ->
expect YAML.parse """
- &showell PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- PI:NAME:<NAME>END_PI
- *showPI:NAME:<NAME>END_PI
"""
.toEqual YAML.parse YAML.dump ['PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI']
it 'can be alias of a mapping', ->
expect YAML.parse """
- &hello
Meat: pork
Starch: potato
- banana
- *hello
"""
.toEqual YAML.parse YAML.dump [
Meat: 'pork', Starch: 'potato'
,
'banana'
,
Meat: 'pork', Starch: 'potato'
]
describe 'Dumped YAML Documents', ->
it 'can have YAML header', ->
expect YAML.parse """
--- %YAML:1.0
foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump (
foo: 1
bar: 2
)
it 'can have leading document separator', ->
expect YAML.parse """
---
- foo: 1
bar: 2
"""
.toEqual YAML.parse YAML.dump [(
foo: 1
bar: 2
)]
it 'can have multiple document separators in block', ->
expect YAML.parse """
foo: |
---
foo: bar
---
yo: baz
bar: |
fooness
"""
.toEqual YAML.parse YAML.dump (
foo: "---\nfoo: bar\n---\nyo: baz\n"
bar: "fooness\n"
)
# Loading
# (disable test when running locally from file)
#
url = document?.location?.href
if not(url?) or url.indexOf('file://') is -1
examplePath = 'spec/example.yml'
if __dirname?
examplePath = __dirname+'/example.yml'
describe 'YAML loading', ->
it 'can be done synchronously', ->
expect(YAML.load(examplePath)).toEqual (
this: 'is'
a: ['YAML', 'example']
)
it 'can be done asynchronously', (done) ->
YAML.load examplePath, (result) ->
expect(result).toEqual (
this: 'is'
a: ['YAML', 'example']
)
done()
|
[
{
"context": " backbone-orm.js 0.7.14\n Copyright (c) 2013-2016 Vidigami\n License: MIT (http://www.opensource.org/license",
"end": 63,
"score": 0.9998770952224731,
"start": 55,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": "ses/mit-license.php)\n Source: https://github.com/vidigami/backbone-orm\n Dependencies: Backbone.js and Unde",
"end": 169,
"score": 0.9858354926109314,
"start": 161,
"tag": "USERNAME",
"value": "vidigami"
}
] | src/lib/iteration_utils.coffee | dk-dev/backbone-orm | 54 | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 Vidigami
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
# @nodoc
nextTick = process?.nextTick or (require 'underscore').defer
module.exports = class IterationUtils
@MAX_ITERATION_COUNT: 300
##############################
# Iterating
##############################
# @nodoc
@eachDone: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err, done) ->
return callback(err) if err or (index >= count) or done
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@each: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err) ->
return callback(err) if err or (index >= count)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@popEach: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> index++; iterator array.pop(), (err) ->
return callback(err) if err or (index >= count) or (array.length is 0)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
| 64068 | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 <NAME>
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
# @nodoc
nextTick = process?.nextTick or (require 'underscore').defer
module.exports = class IterationUtils
@MAX_ITERATION_COUNT: 300
##############################
# Iterating
##############################
# @nodoc
@eachDone: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err, done) ->
return callback(err) if err or (index >= count) or done
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@each: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err) ->
return callback(err) if err or (index >= count)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@popEach: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> index++; iterator array.pop(), (err) ->
return callback(err) if err or (index >= count) or (array.length is 0)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
| true | ###
backbone-orm.js 0.7.14
Copyright (c) 2013-2016 PI:NAME:<NAME>END_PI
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/vidigami/backbone-orm
Dependencies: Backbone.js and Underscore.js.
###
# @nodoc
nextTick = process?.nextTick or (require 'underscore').defer
module.exports = class IterationUtils
@MAX_ITERATION_COUNT: 300
##############################
# Iterating
##############################
# @nodoc
@eachDone: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err, done) ->
return callback(err) if err or (index >= count) or done
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@each: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> iterator array[index++], (err) ->
return callback(err) if err or (index >= count)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
# @nodoc
@popEach: (array, iterator, callback) =>
return callback() unless count = array.length
index = 0
iterate = -> index++; iterator array.pop(), (err) ->
return callback(err) if err or (index >= count) or (array.length is 0)
if index and (index % IterationUtils.MAX_ITERATION_COUNT is 0) then nextTick(iterate) else iterate()
iterate()
|
[
{
"context": " (name, age) ->\n user = {\n name: name,\n age: age\n }\n \n ",
"end": 144,
"score": 0.9957616329193115,
"start": 140,
"tag": "NAME",
"value": "name"
}
] | src/models/Users.coffee | leonardolopesinf/coffee-script-server | 0 | import Database from '../database/Database'
db = new Database()
class Users
create: (name, age) ->
user = {
name: name,
age: age
}
return db.insert('users', user)
update: (columns, id) -> db.update('users', columns, id)
delete: (id) -> db.delete('users', id)
get: (id) -> db.select('users', id)
export default Users | 121419 | import Database from '../database/Database'
db = new Database()
class Users
create: (name, age) ->
user = {
name: <NAME>,
age: age
}
return db.insert('users', user)
update: (columns, id) -> db.update('users', columns, id)
delete: (id) -> db.delete('users', id)
get: (id) -> db.select('users', id)
export default Users | true | import Database from '../database/Database'
db = new Database()
class Users
create: (name, age) ->
user = {
name: PI:NAME:<NAME>END_PI,
age: age
}
return db.insert('users', user)
update: (columns, id) -> db.update('users', columns, id)
delete: (id) -> db.delete('users', id)
get: (id) -> db.select('users', id)
export default Users |
[
{
"context": ": 'String'\n email: 'String'\n password: 'String'\n apiSecret: 'String'\n countryCode: 'St",
"end": 319,
"score": 0.9957296252250671,
"start": 313,
"tag": "PASSWORD",
"value": "String"
},
{
"context": " return next(err)\n user.password = hash\n next()\n\n model = dal.modelFactory modelD",
"end": 1318,
"score": 0.9911106824874878,
"start": 1314,
"tag": "PASSWORD",
"value": "hash"
},
{
"context": " if json.password\n user.password = json.password\n user.save (err, savedUser) ->\n ",
"end": 2616,
"score": 0.9990784525871277,
"start": 2603,
"tag": "PASSWORD",
"value": "json.password"
},
{
"context": "savedUser.password\n json.password = savedUser.password\n crud.update id, json, callback\n ",
"end": 2759,
"score": 0.9633216857910156,
"start": 2741,
"tag": "PASSWORD",
"value": "savedUser.password"
}
] | server/models/users.coffee | GoIncremental/gi-security | 0 | crypto = require 'crypto'
bcrypt = require 'bcrypt'
gi = require 'gi-util'
module.exports = (dal, options) ->
SALT_WORK_FACTOR = 10
modelDefinition =
name: 'User'
schemaDefinition:
systemId: 'ObjectId'
firstName: 'String'
lastName: 'String'
email: 'String'
password: 'String'
apiSecret: 'String'
countryCode: 'String'
userIds: [{provider: 'String', providerId: 'String'}]
roles: [{type: 'ObjectId', ref: 'Role'}]
options:
strict: false
schema = dal.schemaFactory modelDefinition
modelDefinition.schema = schema
schema.virtual('name').get () ->
@firstName + ' ' + @lastName
schema.virtual('name').set (name) ->
split = name.split ' '
@firstName = split[0]
@lastName = split[1]
schema.methods.resetAPISecret = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
@apiSecret = buf.toString 'base64'
@save callback
schema.pre 'save', (next) ->
user = @
@confirm = ""
if not @isModified('password')
return next()
bcrypt.genSalt SALT_WORK_FACTOR, (err, salt) ->
if err
return next(err)
bcrypt.hash user.password, salt, (err, hash) ->
if err
return next(err)
user.password = hash
next()
model = dal.modelFactory modelDefinition
crud = dal.crudFactory model
sendResetInstructions = (resetObj, cb) ->
if options.sendResetInstructions?
options.sendResetInstructions resetObj, cb
else
cb "sendResetInstructions function not defined"
generateToken = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
token = buf.toString 'base64'
callback null, token
comparePassword = (user, candidate, callback) ->
if model.comparePassword?
model.comparePassword(user, candidate, callback)
else
if candidate?
if user.password?
bcrypt.compare candidate, user.password, (err, isMatch) ->
if err
return callback(err)
callback null, isMatch
else
callback 'password authentication is not enabled for this user', false
else
callback 'password does not meet minimum requirements', false
update = (id, json, callback) ->
delete json.confirm
crud.findById id, json.systemId, (err, user) ->
if err
callback err, null
else
if user
#call save in case the password has changed
if json.password
user.password = json.password
user.save (err, savedUser) ->
if savedUser and savedUser.password
json.password = savedUser.password
crud.update id, json, callback
else
delete json.password
crud.update id, json, callback
else
callback 'user not found', null
findOneByProviderId = (id, systemId, callback) ->
crud.findOneBy 'userIds.providerId', id, systemId, callback
findOrCreate = (json, callback) ->
delete json.confirm
findOneByProviderId json.providerId, json.systemId (err, user) ->
if user
callback err, user
else
crud.create json, (err, user) ->
callback err, user
resetAPISecret = (id, systemId, callback) ->
crud.findById id, systemId, (err, user) ->
if err
callback err
else if user?
user.resetAPISecret callback
else
callback 'cannot find user'
create = (json, callback) ->
delete json.confirm
crud.findOneBy 'email', json.email, json.systemId , (err, user) ->
if err and err isnt "Cannot find User"
callback err, null
else if user?.email is json.email
callback 'Username already exists'
else
crud.create json, callback
updateQuery = (query, change, callback) ->
delete change.confirm
if not query.systemId?
callback 'SystemId not specified'
else
model.update query, change, {multi: true}, callback
exports = gi.common.extend {}, crud
exports.update = update
exports.updateQuery = updateQuery
exports.findOrCreate = findOrCreate
exports.findOneByProviderId = findOneByProviderId
exports.resetAPISecret = resetAPISecret
exports.comparePassword = comparePassword
exports.create = create
exports.generateToken = generateToken
exports.sendResetInstructions = sendResetInstructions
exports
| 33073 | crypto = require 'crypto'
bcrypt = require 'bcrypt'
gi = require 'gi-util'
module.exports = (dal, options) ->
SALT_WORK_FACTOR = 10
modelDefinition =
name: 'User'
schemaDefinition:
systemId: 'ObjectId'
firstName: 'String'
lastName: 'String'
email: 'String'
password: '<PASSWORD>'
apiSecret: 'String'
countryCode: 'String'
userIds: [{provider: 'String', providerId: 'String'}]
roles: [{type: 'ObjectId', ref: 'Role'}]
options:
strict: false
schema = dal.schemaFactory modelDefinition
modelDefinition.schema = schema
schema.virtual('name').get () ->
@firstName + ' ' + @lastName
schema.virtual('name').set (name) ->
split = name.split ' '
@firstName = split[0]
@lastName = split[1]
schema.methods.resetAPISecret = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
@apiSecret = buf.toString 'base64'
@save callback
schema.pre 'save', (next) ->
user = @
@confirm = ""
if not @isModified('password')
return next()
bcrypt.genSalt SALT_WORK_FACTOR, (err, salt) ->
if err
return next(err)
bcrypt.hash user.password, salt, (err, hash) ->
if err
return next(err)
user.password = <PASSWORD>
next()
model = dal.modelFactory modelDefinition
crud = dal.crudFactory model
sendResetInstructions = (resetObj, cb) ->
if options.sendResetInstructions?
options.sendResetInstructions resetObj, cb
else
cb "sendResetInstructions function not defined"
generateToken = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
token = buf.toString 'base64'
callback null, token
comparePassword = (user, candidate, callback) ->
if model.comparePassword?
model.comparePassword(user, candidate, callback)
else
if candidate?
if user.password?
bcrypt.compare candidate, user.password, (err, isMatch) ->
if err
return callback(err)
callback null, isMatch
else
callback 'password authentication is not enabled for this user', false
else
callback 'password does not meet minimum requirements', false
update = (id, json, callback) ->
delete json.confirm
crud.findById id, json.systemId, (err, user) ->
if err
callback err, null
else
if user
#call save in case the password has changed
if json.password
user.password = <PASSWORD>
user.save (err, savedUser) ->
if savedUser and savedUser.password
json.password = <PASSWORD>
crud.update id, json, callback
else
delete json.password
crud.update id, json, callback
else
callback 'user not found', null
findOneByProviderId = (id, systemId, callback) ->
crud.findOneBy 'userIds.providerId', id, systemId, callback
findOrCreate = (json, callback) ->
delete json.confirm
findOneByProviderId json.providerId, json.systemId (err, user) ->
if user
callback err, user
else
crud.create json, (err, user) ->
callback err, user
resetAPISecret = (id, systemId, callback) ->
crud.findById id, systemId, (err, user) ->
if err
callback err
else if user?
user.resetAPISecret callback
else
callback 'cannot find user'
create = (json, callback) ->
delete json.confirm
crud.findOneBy 'email', json.email, json.systemId , (err, user) ->
if err and err isnt "Cannot find User"
callback err, null
else if user?.email is json.email
callback 'Username already exists'
else
crud.create json, callback
updateQuery = (query, change, callback) ->
delete change.confirm
if not query.systemId?
callback 'SystemId not specified'
else
model.update query, change, {multi: true}, callback
exports = gi.common.extend {}, crud
exports.update = update
exports.updateQuery = updateQuery
exports.findOrCreate = findOrCreate
exports.findOneByProviderId = findOneByProviderId
exports.resetAPISecret = resetAPISecret
exports.comparePassword = comparePassword
exports.create = create
exports.generateToken = generateToken
exports.sendResetInstructions = sendResetInstructions
exports
| true | crypto = require 'crypto'
bcrypt = require 'bcrypt'
gi = require 'gi-util'
module.exports = (dal, options) ->
SALT_WORK_FACTOR = 10
modelDefinition =
name: 'User'
schemaDefinition:
systemId: 'ObjectId'
firstName: 'String'
lastName: 'String'
email: 'String'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
apiSecret: 'String'
countryCode: 'String'
userIds: [{provider: 'String', providerId: 'String'}]
roles: [{type: 'ObjectId', ref: 'Role'}]
options:
strict: false
schema = dal.schemaFactory modelDefinition
modelDefinition.schema = schema
schema.virtual('name').get () ->
@firstName + ' ' + @lastName
schema.virtual('name').set (name) ->
split = name.split ' '
@firstName = split[0]
@lastName = split[1]
schema.methods.resetAPISecret = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
@apiSecret = buf.toString 'base64'
@save callback
schema.pre 'save', (next) ->
user = @
@confirm = ""
if not @isModified('password')
return next()
bcrypt.genSalt SALT_WORK_FACTOR, (err, salt) ->
if err
return next(err)
bcrypt.hash user.password, salt, (err, hash) ->
if err
return next(err)
user.password = PI:PASSWORD:<PASSWORD>END_PI
next()
model = dal.modelFactory modelDefinition
crud = dal.crudFactory model
sendResetInstructions = (resetObj, cb) ->
if options.sendResetInstructions?
options.sendResetInstructions resetObj, cb
else
cb "sendResetInstructions function not defined"
generateToken = (callback) ->
crypto.randomBytes 18, (err, buf) =>
if err
callback err
else
token = buf.toString 'base64'
callback null, token
comparePassword = (user, candidate, callback) ->
if model.comparePassword?
model.comparePassword(user, candidate, callback)
else
if candidate?
if user.password?
bcrypt.compare candidate, user.password, (err, isMatch) ->
if err
return callback(err)
callback null, isMatch
else
callback 'password authentication is not enabled for this user', false
else
callback 'password does not meet minimum requirements', false
update = (id, json, callback) ->
delete json.confirm
crud.findById id, json.systemId, (err, user) ->
if err
callback err, null
else
if user
#call save in case the password has changed
if json.password
user.password = PI:PASSWORD:<PASSWORD>END_PI
user.save (err, savedUser) ->
if savedUser and savedUser.password
json.password = PI:PASSWORD:<PASSWORD>END_PI
crud.update id, json, callback
else
delete json.password
crud.update id, json, callback
else
callback 'user not found', null
findOneByProviderId = (id, systemId, callback) ->
crud.findOneBy 'userIds.providerId', id, systemId, callback
findOrCreate = (json, callback) ->
delete json.confirm
findOneByProviderId json.providerId, json.systemId (err, user) ->
if user
callback err, user
else
crud.create json, (err, user) ->
callback err, user
resetAPISecret = (id, systemId, callback) ->
crud.findById id, systemId, (err, user) ->
if err
callback err
else if user?
user.resetAPISecret callback
else
callback 'cannot find user'
create = (json, callback) ->
delete json.confirm
crud.findOneBy 'email', json.email, json.systemId , (err, user) ->
if err and err isnt "Cannot find User"
callback err, null
else if user?.email is json.email
callback 'Username already exists'
else
crud.create json, callback
updateQuery = (query, change, callback) ->
delete change.confirm
if not query.systemId?
callback 'SystemId not specified'
else
model.update query, change, {multi: true}, callback
exports = gi.common.extend {}, crud
exports.update = update
exports.updateQuery = updateQuery
exports.findOrCreate = findOrCreate
exports.findOneByProviderId = findOneByProviderId
exports.resetAPISecret = resetAPISecret
exports.comparePassword = comparePassword
exports.create = create
exports.generateToken = generateToken
exports.sendResetInstructions = sendResetInstructions
exports
|
[
{
"context": "s.env.HUBOT_QQ_ID or 2769546520\n password: process.env.HUBOT_QQ_PASS\n groupname: process.env.HUBOT_QQ_GROUP or 'q",
"end": 654,
"score": 0.9993192553520203,
"start": 629,
"tag": "PASSWORD",
"value": "process.env.HUBOT_QQ_PASS"
}
] | src/hubot-qq.coffee | lsc20051426/qqbot | 2 | {Robot, Adapter, EnterMessage, LeaveMessage, TextMessage} = require('hubot')
auth = require "../src/qqauth"
api = require "../src/qqapi"
QQBot= require "../src/qqbot"
defaults = require "../src/defaults"
class QQHubotAdapter extends Adapter
send: (envelope, strings...) ->
@robot.logger.info "hubot is sending #{strings}"
@group.send str for str in strings
reply: (user, strings...) ->
@send user, strings...
emote: (envelope, strings...) ->
@send envelope, "* #{str}" for str in strings
run: ->
self = @
options =
account: process.env.HUBOT_QQ_ID or 2769546520
password: process.env.HUBOT_QQ_PASS
groupname: process.env.HUBOT_QQ_GROUP or 'qqbot群'
port: process.env.HUBOT_QQ_IMGPORT or 3000
host: process.env.HUBOT_QQ_IMGHOST or 'localhost'
plugins: ['help']
skip_login = process.env.HUBOT_QQ_SKIP_LOGIN is 'true'
unless options.account? and options.password? and options.groupname?
@robot.logger.error "请配置qq 密码 和监听群名字,具体查阅帮助"
process.exit(1)
# TODO: login failed callback
@login_qq skip_login,options,(cookies,auth_info)=>
@qqbot = new QQBot(cookies,auth_info,options)
@qqbot.update_buddy_list (ret,error)->
log.info '√ buddy list fetched' if ret
@qqbot.listen_group options.groupname , (@group,error)=>
@robot.logger.info "enter long poll mode, have fun"
@qqbot.runloop()
@emit "connected"
@group.on_message (content ,send, robot, message)=>
@robot.logger.info "#{message.from_user.nick} : #{content}"
# uin changed every-time
user = @robot.brain.userForId message.from_uin , name:message.from_user.nick , room:options.groupname
@receive new TextMessage user, content, message.uid
# @callback (cookies,auth_info)
login_qq: (skip_login, options,callback)->
defaults.set_path '/tmp/store.json'
if skip_login
cookies = defaults.data 'qq-cookies'
auth_info = defaults.data 'qq-auth'
@robot.logger.info "skip login",auth_info
callback(cookies , auth_info )
else
auth.login options , (cookies,auth_info)=>
if process.env.HUBOT_QQ_DEBUG?
defaults.data 'qq-cookies', cookies
defaults.data 'qq-auth' , auth_info
defaults.save()
callback(cookies,auth_info)
exports.use = (robot) ->
new QQHubotAdapter robot
| 148025 | {Robot, Adapter, EnterMessage, LeaveMessage, TextMessage} = require('hubot')
auth = require "../src/qqauth"
api = require "../src/qqapi"
QQBot= require "../src/qqbot"
defaults = require "../src/defaults"
class QQHubotAdapter extends Adapter
send: (envelope, strings...) ->
@robot.logger.info "hubot is sending #{strings}"
@group.send str for str in strings
reply: (user, strings...) ->
@send user, strings...
emote: (envelope, strings...) ->
@send envelope, "* #{str}" for str in strings
run: ->
self = @
options =
account: process.env.HUBOT_QQ_ID or 2769546520
password: <PASSWORD>
groupname: process.env.HUBOT_QQ_GROUP or 'qqbot群'
port: process.env.HUBOT_QQ_IMGPORT or 3000
host: process.env.HUBOT_QQ_IMGHOST or 'localhost'
plugins: ['help']
skip_login = process.env.HUBOT_QQ_SKIP_LOGIN is 'true'
unless options.account? and options.password? and options.groupname?
@robot.logger.error "请配置qq 密码 和监听群名字,具体查阅帮助"
process.exit(1)
# TODO: login failed callback
@login_qq skip_login,options,(cookies,auth_info)=>
@qqbot = new QQBot(cookies,auth_info,options)
@qqbot.update_buddy_list (ret,error)->
log.info '√ buddy list fetched' if ret
@qqbot.listen_group options.groupname , (@group,error)=>
@robot.logger.info "enter long poll mode, have fun"
@qqbot.runloop()
@emit "connected"
@group.on_message (content ,send, robot, message)=>
@robot.logger.info "#{message.from_user.nick} : #{content}"
# uin changed every-time
user = @robot.brain.userForId message.from_uin , name:message.from_user.nick , room:options.groupname
@receive new TextMessage user, content, message.uid
# @callback (cookies,auth_info)
login_qq: (skip_login, options,callback)->
defaults.set_path '/tmp/store.json'
if skip_login
cookies = defaults.data 'qq-cookies'
auth_info = defaults.data 'qq-auth'
@robot.logger.info "skip login",auth_info
callback(cookies , auth_info )
else
auth.login options , (cookies,auth_info)=>
if process.env.HUBOT_QQ_DEBUG?
defaults.data 'qq-cookies', cookies
defaults.data 'qq-auth' , auth_info
defaults.save()
callback(cookies,auth_info)
exports.use = (robot) ->
new QQHubotAdapter robot
| true | {Robot, Adapter, EnterMessage, LeaveMessage, TextMessage} = require('hubot')
auth = require "../src/qqauth"
api = require "../src/qqapi"
QQBot= require "../src/qqbot"
defaults = require "../src/defaults"
class QQHubotAdapter extends Adapter
send: (envelope, strings...) ->
@robot.logger.info "hubot is sending #{strings}"
@group.send str for str in strings
reply: (user, strings...) ->
@send user, strings...
emote: (envelope, strings...) ->
@send envelope, "* #{str}" for str in strings
run: ->
self = @
options =
account: process.env.HUBOT_QQ_ID or 2769546520
password: PI:PASSWORD:<PASSWORD>END_PI
groupname: process.env.HUBOT_QQ_GROUP or 'qqbot群'
port: process.env.HUBOT_QQ_IMGPORT or 3000
host: process.env.HUBOT_QQ_IMGHOST or 'localhost'
plugins: ['help']
skip_login = process.env.HUBOT_QQ_SKIP_LOGIN is 'true'
unless options.account? and options.password? and options.groupname?
@robot.logger.error "请配置qq 密码 和监听群名字,具体查阅帮助"
process.exit(1)
# TODO: login failed callback
@login_qq skip_login,options,(cookies,auth_info)=>
@qqbot = new QQBot(cookies,auth_info,options)
@qqbot.update_buddy_list (ret,error)->
log.info '√ buddy list fetched' if ret
@qqbot.listen_group options.groupname , (@group,error)=>
@robot.logger.info "enter long poll mode, have fun"
@qqbot.runloop()
@emit "connected"
@group.on_message (content ,send, robot, message)=>
@robot.logger.info "#{message.from_user.nick} : #{content}"
# uin changed every-time
user = @robot.brain.userForId message.from_uin , name:message.from_user.nick , room:options.groupname
@receive new TextMessage user, content, message.uid
# @callback (cookies,auth_info)
login_qq: (skip_login, options,callback)->
defaults.set_path '/tmp/store.json'
if skip_login
cookies = defaults.data 'qq-cookies'
auth_info = defaults.data 'qq-auth'
@robot.logger.info "skip login",auth_info
callback(cookies , auth_info )
else
auth.login options , (cookies,auth_info)=>
if process.env.HUBOT_QQ_DEBUG?
defaults.data 'qq-cookies', cookies
defaults.data 'qq-auth' , auth_info
defaults.save()
callback(cookies,auth_info)
exports.use = (robot) ->
new QQHubotAdapter robot
|
[
{
"context": "> path.join component, file\n\t\t\t\t\tkey = path.join component, componentType\n\t\t\t\t\tcompo[key] = [] if ",
"end": 1137,
"score": 0.8148002624511719,
"start": 1128,
"tag": "KEY",
"value": "path.join"
}
] | tasks/bower/bower.coffee | webmaster89898/CaryLandholt-fatarrow | 0 | fs = require 'fs'
q = require 'q'
path = require 'path'
{BOWER_DIRECTORY, BOWER_FILE} = require '../constants'
{BOWER_COMPONENTS} = require '../../config/bower'
pkg = require '../../package.json'
module.exports = (gulp, plugins) -> ->
{onError} = require('../events') plugins
# we only want the bower task to run ones
unless require('../options').firstRun
deferred = q.defer()
deferred.resolve()
return deferred
components = []
urlExpression = /[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi
urlRegEx = new RegExp urlExpression
do ->
bowerJson =
_comment: 'THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.'
name: pkg.name
version: pkg.version
devDependencies: {}
compo = {}
for component, value of BOWER_COMPONENTS
for version, componentTypes of value
bowerJson.devDependencies[component] = version
for componentType, files of componentTypes
isArray = Array.isArray files
filesToAdd = if isArray then files else [files]
filesToAdd = filesToAdd.map (file) -> path.join component, file
key = path.join component, componentType
compo[key] = [] if not compo[key]
compo[key] = compo[key].concat filesToAdd
fs.writeFile BOWER_FILE, JSON.stringify bowerJson, {}, '\t'
| 190494 | fs = require 'fs'
q = require 'q'
path = require 'path'
{BOWER_DIRECTORY, BOWER_FILE} = require '../constants'
{BOWER_COMPONENTS} = require '../../config/bower'
pkg = require '../../package.json'
module.exports = (gulp, plugins) -> ->
{onError} = require('../events') plugins
# we only want the bower task to run ones
unless require('../options').firstRun
deferred = q.defer()
deferred.resolve()
return deferred
components = []
urlExpression = /[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi
urlRegEx = new RegExp urlExpression
do ->
bowerJson =
_comment: 'THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.'
name: pkg.name
version: pkg.version
devDependencies: {}
compo = {}
for component, value of BOWER_COMPONENTS
for version, componentTypes of value
bowerJson.devDependencies[component] = version
for componentType, files of componentTypes
isArray = Array.isArray files
filesToAdd = if isArray then files else [files]
filesToAdd = filesToAdd.map (file) -> path.join component, file
key = <KEY> component, componentType
compo[key] = [] if not compo[key]
compo[key] = compo[key].concat filesToAdd
fs.writeFile BOWER_FILE, JSON.stringify bowerJson, {}, '\t'
| true | fs = require 'fs'
q = require 'q'
path = require 'path'
{BOWER_DIRECTORY, BOWER_FILE} = require '../constants'
{BOWER_COMPONENTS} = require '../../config/bower'
pkg = require '../../package.json'
module.exports = (gulp, plugins) -> ->
{onError} = require('../events') plugins
# we only want the bower task to run ones
unless require('../options').firstRun
deferred = q.defer()
deferred.resolve()
return deferred
components = []
urlExpression = /[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi
urlRegEx = new RegExp urlExpression
do ->
bowerJson =
_comment: 'THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.'
name: pkg.name
version: pkg.version
devDependencies: {}
compo = {}
for component, value of BOWER_COMPONENTS
for version, componentTypes of value
bowerJson.devDependencies[component] = version
for componentType, files of componentTypes
isArray = Array.isArray files
filesToAdd = if isArray then files else [files]
filesToAdd = filesToAdd.map (file) -> path.join component, file
key = PI:KEY:<KEY>END_PI component, componentType
compo[key] = [] if not compo[key]
compo[key] = compo[key].concat filesToAdd
fs.writeFile BOWER_FILE, JSON.stringify bowerJson, {}, '\t'
|
[
{
"context": "ipple extends Skill\n target: TARGET_DIR8\n key: 'cripple'\n name: 'cripple'\n mp: 0\n cooldown: 10\n\n run:",
"end": 65,
"score": 0.9956731796264648,
"start": 58,
"tag": "KEY",
"value": "cripple"
},
{
"context": "l\n target: TARGET_DIR8\n key: 'cripple'\n name: 'cripple'\n mp: 0\n cooldown: 10\n\n run: (dir) ->\n true",
"end": 83,
"score": 0.7077762484550476,
"start": 76,
"tag": "NAME",
"value": "cripple"
}
] | js/skills/cripple.coffee | ktchernov/7drl-lion.github.io | 27 | class Cripple extends Skill
target: TARGET_DIR8
key: 'cripple'
name: 'cripple'
mp: 0
cooldown: 10
run: (dir) ->
true
# register_skill 'cripple', Cripple
| 165993 | class Cripple extends Skill
target: TARGET_DIR8
key: '<KEY>'
name: '<KEY>'
mp: 0
cooldown: 10
run: (dir) ->
true
# register_skill 'cripple', Cripple
| true | class Cripple extends Skill
target: TARGET_DIR8
key: 'PI:KEY:<KEY>END_PI'
name: 'PI:NAME:<KEY>END_PI'
mp: 0
cooldown: 10
run: (dir) ->
true
# register_skill 'cripple', Cripple
|
[
{
"context": "nts(show_move_comments, {\n csrfToken: \"foobar\",\n target: \"/foo/\"\n })\n\n aft",
"end": 490,
"score": 0.6345034837722778,
"start": 484,
"tag": "PASSWORD",
"value": "foobar"
}
] | game/static/spirit/scripts/test/suites/move_comments-spec.coffee | Yoann-Vie/esgi-hearthstone | 3 | describe "move_comments plugin tests", ->
show_move_comments = null
plugin_move_comments = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('move_comments.html')
show_move_comments = document.querySelectorAll('.js-show-move-comments')
plugin_move_comments = stModules.moveComments(show_move_comments, {
csrfToken: "foobar",
target: "/foo/"
})
afterEach ->
# Fixture will only remove itself not nodes appended to body
# so we have to manually remove forms
Array.from(document.querySelectorAll('.js-move-comment-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "shows the move form on click", ->
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(true)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(0)
show_move_comments[0].click()
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(false)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(2)
it "prevents the default click behaviour on show move comments", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
show_move_comments[0].dispatchEvent(evt)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "prevents the default click behaviour on submit", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-move-comments").dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-show-move-comments").click()
document.querySelector(".js-move-comments").click()
form = document.querySelector(".js-move-comment-form")
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/foo/")
expect(isHidden([form])).toEqual(true)
expect(form.querySelector("input[name=csrfmiddlewaretoken]").value).toEqual("foobar")
expect(form.querySelector("input[name=topic]").value).toEqual("10")
expect(form.querySelectorAll("input[name=comments]").length).toEqual(2)
| 107065 | describe "move_comments plugin tests", ->
show_move_comments = null
plugin_move_comments = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('move_comments.html')
show_move_comments = document.querySelectorAll('.js-show-move-comments')
plugin_move_comments = stModules.moveComments(show_move_comments, {
csrfToken: "<PASSWORD>",
target: "/foo/"
})
afterEach ->
# Fixture will only remove itself not nodes appended to body
# so we have to manually remove forms
Array.from(document.querySelectorAll('.js-move-comment-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "shows the move form on click", ->
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(true)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(0)
show_move_comments[0].click()
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(false)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(2)
it "prevents the default click behaviour on show move comments", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
show_move_comments[0].dispatchEvent(evt)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "prevents the default click behaviour on submit", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-move-comments").dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-show-move-comments").click()
document.querySelector(".js-move-comments").click()
form = document.querySelector(".js-move-comment-form")
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/foo/")
expect(isHidden([form])).toEqual(true)
expect(form.querySelector("input[name=csrfmiddlewaretoken]").value).toEqual("foobar")
expect(form.querySelector("input[name=topic]").value).toEqual("10")
expect(form.querySelectorAll("input[name=comments]").length).toEqual(2)
| true | describe "move_comments plugin tests", ->
show_move_comments = null
plugin_move_comments = null
isHidden = stModules.utils.isHidden
beforeEach ->
fixtures = jasmine.getFixtures()
fixtures.fixturesPath = 'base/test/fixtures/'
loadFixtures('move_comments.html')
show_move_comments = document.querySelectorAll('.js-show-move-comments')
plugin_move_comments = stModules.moveComments(show_move_comments, {
csrfToken: "PI:PASSWORD:<PASSWORD>END_PI",
target: "/foo/"
})
afterEach ->
# Fixture will only remove itself not nodes appended to body
# so we have to manually remove forms
Array.from(document.querySelectorAll('.js-move-comment-form')).forEach((elm) ->
elm.parentNode.removeChild(elm)
)
it "shows the move form on click", ->
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(true)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(0)
show_move_comments[0].click()
expect(isHidden(document.querySelectorAll(".move-comments"))).toEqual(false)
expect(document.querySelectorAll(".move-comment-checkbox").length).toEqual(2)
it "prevents the default click behaviour on show move comments", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
show_move_comments[0].dispatchEvent(evt)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "prevents the default click behaviour on submit", ->
evt = document.createEvent("HTMLEvents")
evt.initEvent("click", false, true)
stopPropagation = spyOn(evt, 'stopPropagation')
preventDefault = spyOn(evt, 'preventDefault')
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-move-comments").dispatchEvent(evt)
expect(submit.calls.count()).toEqual(1)
expect(stopPropagation).toHaveBeenCalled()
expect(preventDefault).toHaveBeenCalled()
it "submits the form", ->
submit = spyOn(window.HTMLFormElement.prototype, 'submit')
submit.and.callFake( -> )
document.querySelector(".js-show-move-comments").click()
document.querySelector(".js-move-comments").click()
form = document.querySelector(".js-move-comment-form")
expect(submit.calls.count()).toEqual(1)
expect(form.getAttribute('action')).toEqual("/foo/")
expect(isHidden([form])).toEqual(true)
expect(form.querySelector("input[name=csrfmiddlewaretoken]").value).toEqual("foobar")
expect(form.querySelector("input[name=topic]").value).toEqual("10")
expect(form.querySelectorAll("input[name=comments]").length).toEqual(2)
|
[
{
"context": " name:\n en: 'hello'\n de: 'Hallo'\n expect(json).toEqual [ 'ciao', 'Hallo' ]\n\n",
"end": 1539,
"score": 0.714374840259552,
"start": 1534,
"tag": "NAME",
"value": "Hallo"
},
{
"context": " 'nl': 'slak'\n name:\n 'nl-BE': 'alee'\n 'de': 'hallo'\n expect(json).toEqu",
"end": 1987,
"score": 0.9617511034011841,
"start": 1983,
"tag": "NAME",
"value": "alee"
},
{
"context": " name:\n 'nl-BE': 'alee'\n 'de': 'hallo'\n expect(json).toEqual [ 'slak', 'alee' ]\n\n ",
"end": 2011,
"score": 0.9650855660438538,
"start": 2006,
"tag": "NAME",
"value": "hallo"
},
{
"context": "'\n en: 'hi'\n name:\n en: 'hello'\n de: 'Hallo'\n expect(json).toEqual",
"end": 2452,
"score": 0.6330479979515076,
"start": 2447,
"tag": "NAME",
"value": "hello"
},
{
"context": " name:\n en: 'hello'\n de: 'Hallo'\n expect(json).toEqual [ '', '' ]\n\n it 's",
"end": 2474,
"score": 0.991767406463623,
"start": 2469,
"tag": "NAME",
"value": "Hallo"
}
] | src/spec/csv/exportmapping.spec.coffee | celeste-horgan/sphere-category-sync | 0 | _ = require 'underscore'
ExportMapping = require '../../lib/csv/exportmapping'
describe 'ExportMapping', ->
describe '#constructor', ->
it 'should initialize', ->
expect(-> new ExportMapping()).toBeDefined()
describe '#validate', ->
it 'should map a simple entry', ->
ex = new ExportMapping [ 'id' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'foo'
expect(json).toEqual [ 'foo' ]
it 'should map parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
parent:
type: 'category'
id: 'root'
expect(json).toEqual [ 'root' ]
it 'should not map empty parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 123
expect(json).toEqual [ '' ]
it 'should map a localized entry', ->
ex = new ExportMapping [ 'slug.it', 'name.de' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: 'hello'
de: 'Hallo'
expect(json).toEqual [ 'ciao', 'Hallo' ]
it 'should support region subtags', ->
ex = new ExportMapping [ 'slug.nl', 'name.nl-BE' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
'en-US': 'ciao'
'nl': 'slak'
name:
'nl-BE': 'alee'
'de': 'hallo'
expect(json).toEqual [ 'slak', 'alee' ]
it 'should not map an empty localized entry', ->
ex = new ExportMapping [ 'slug.de', 'name.it' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: 'hello'
de: 'Hallo'
expect(json).toEqual [ '', '' ]
it 'should map to undefined for any unknown header', ->
ex = new ExportMapping [ 'foo.en', 'bar' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV {}
expect(json).toEqual [ undefined, undefined ]
it 'should map externalId into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], parentBy: 'externalId'
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i1'
externalId: 'e1'
parent:
type: 'category'
id: 'i2'
obj:
id: 'i2'
externalId: 'e2'
expect(json).toEqual [ 'e2' ]
it 'should map slug into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], { language: 'en', parentBy: 'slug' }
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i3'
externalId: 'e3'
parent:
type: 'category'
id: 'i4'
obj:
id: 'i4'
externalId: 'e4'
slug:
en: 'slug-4'
expect(json).toEqual [ 'slug-4' ]
| 128189 | _ = require 'underscore'
ExportMapping = require '../../lib/csv/exportmapping'
describe 'ExportMapping', ->
describe '#constructor', ->
it 'should initialize', ->
expect(-> new ExportMapping()).toBeDefined()
describe '#validate', ->
it 'should map a simple entry', ->
ex = new ExportMapping [ 'id' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'foo'
expect(json).toEqual [ 'foo' ]
it 'should map parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
parent:
type: 'category'
id: 'root'
expect(json).toEqual [ 'root' ]
it 'should not map empty parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 123
expect(json).toEqual [ '' ]
it 'should map a localized entry', ->
ex = new ExportMapping [ 'slug.it', 'name.de' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: 'hello'
de: '<NAME>'
expect(json).toEqual [ 'ciao', 'Hallo' ]
it 'should support region subtags', ->
ex = new ExportMapping [ 'slug.nl', 'name.nl-BE' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
'en-US': 'ciao'
'nl': 'slak'
name:
'nl-BE': '<NAME>'
'de': '<NAME>'
expect(json).toEqual [ 'slak', 'alee' ]
it 'should not map an empty localized entry', ->
ex = new ExportMapping [ 'slug.de', 'name.it' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: '<NAME>'
de: '<NAME>'
expect(json).toEqual [ '', '' ]
it 'should map to undefined for any unknown header', ->
ex = new ExportMapping [ 'foo.en', 'bar' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV {}
expect(json).toEqual [ undefined, undefined ]
it 'should map externalId into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], parentBy: 'externalId'
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i1'
externalId: 'e1'
parent:
type: 'category'
id: 'i2'
obj:
id: 'i2'
externalId: 'e2'
expect(json).toEqual [ 'e2' ]
it 'should map slug into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], { language: 'en', parentBy: 'slug' }
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i3'
externalId: 'e3'
parent:
type: 'category'
id: 'i4'
obj:
id: 'i4'
externalId: 'e4'
slug:
en: 'slug-4'
expect(json).toEqual [ 'slug-4' ]
| true | _ = require 'underscore'
ExportMapping = require '../../lib/csv/exportmapping'
describe 'ExportMapping', ->
describe '#constructor', ->
it 'should initialize', ->
expect(-> new ExportMapping()).toBeDefined()
describe '#validate', ->
it 'should map a simple entry', ->
ex = new ExportMapping [ 'id' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'foo'
expect(json).toEqual [ 'foo' ]
it 'should map parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
parent:
type: 'category'
id: 'root'
expect(json).toEqual [ 'root' ]
it 'should not map empty parentId entry', ->
ex = new ExportMapping [ 'parentId' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 123
expect(json).toEqual [ '' ]
it 'should map a localized entry', ->
ex = new ExportMapping [ 'slug.it', 'name.de' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: 'hello'
de: 'PI:NAME:<NAME>END_PI'
expect(json).toEqual [ 'ciao', 'Hallo' ]
it 'should support region subtags', ->
ex = new ExportMapping [ 'slug.nl', 'name.nl-BE' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
'en-US': 'ciao'
'nl': 'slak'
name:
'nl-BE': 'PI:NAME:<NAME>END_PI'
'de': 'PI:NAME:<NAME>END_PI'
expect(json).toEqual [ 'slak', 'alee' ]
it 'should not map an empty localized entry', ->
ex = new ExportMapping [ 'slug.de', 'name.it' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV
slug:
it: 'ciao'
en: 'hi'
name:
en: 'PI:NAME:<NAME>END_PI'
de: 'PI:NAME:<NAME>END_PI'
expect(json).toEqual [ '', '' ]
it 'should map to undefined for any unknown header', ->
ex = new ExportMapping [ 'foo.en', 'bar' ]
ex.validate()
expect(_.size ex.index2CsvFn).toBe 2
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
expect(_.isFunction(ex.index2CsvFn[1])).toBe true
json = ex.toCSV {}
expect(json).toEqual [ undefined, undefined ]
it 'should map externalId into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], parentBy: 'externalId'
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i1'
externalId: 'e1'
parent:
type: 'category'
id: 'i2'
obj:
id: 'i2'
externalId: 'e2'
expect(json).toEqual [ 'e2' ]
it 'should map slug into parentId if requested', ->
ex = new ExportMapping [ 'parentId' ], { language: 'en', parentBy: 'slug' }
ex.validate()
expect(_.size ex.index2CsvFn).toBe 1
expect(_.isFunction(ex.index2CsvFn[0])).toBe true
json = ex.toCSV
id: 'i3'
externalId: 'e3'
parent:
type: 'category'
id: 'i4'
obj:
id: 'i4'
externalId: 'e4'
slug:
en: 'slug-4'
expect(json).toEqual [ 'slug-4' ]
|
[
{
"context": " requireTLS: true\n authMethod: ''\n user: 'user'\n password: 'password'\n recipients: []\n\n s",
"end": 232,
"score": 0.7480193376541138,
"start": 228,
"tag": "USERNAME",
"value": "user"
},
{
"context": " authMethod: ''\n user: 'user'\n password: 'password'\n recipients: []\n\n slack:\n webhook_url: ''",
"end": 257,
"score": 0.99932861328125,
"start": 249,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " 5432\n database: \"mydb\" \n user: \"user\"\n password: \"password\"\n staging:\n dr",
"end": 3158,
"score": 0.6069768667221069,
"start": 3154,
"tag": "USERNAME",
"value": "user"
},
{
"context": "mydb\" \n user: \"user\"\n password: \"password\"\n staging:\n driver: \"postgresql\"\n ho",
"end": 3185,
"score": 0.9995605945587158,
"start": 3177,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "\" \n user: \"user\"\n password: \"password\"\n production:\n driver: \"postgresql\"\n ",
"end": 3347,
"score": 0.9996166229248047,
"start": 3339,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "\" \n user: \"user\"\n password: \"password\"\ndefaults:\n database: \"mydb\"\n environment: \"dev",
"end": 3512,
"score": 0.9996100068092346,
"start": 3504,
"tag": "PASSWORD",
"value": "password"
}
] | src/bin/recipes/config.workshop.template.cson | assignittous/knodeo_workshop | 0 | version: "0.1.9"
logging:
events: ["info","error","warn","debug","info","error","warn","shell"]
notifications:
email:
port: 465
host: 'localhost'
secure: true
requireTLS: true
authMethod: ''
user: 'user'
password: 'password'
recipients: []
slack:
webhook_url: ''
liquibase:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
staging:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
production:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
scriptella:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: []
staging:
slack: ""
email: []
events: []
production:
slack: ""
email: []
events: []
etl_properties:
environments:
development:
data_working_directory: "{{cwd}}"
staging:
data_working_directory: "{{cwd}}"
production:
data_working_directory: "{{cwd}}"
cloud:
asana:
key: ""
datafile_pattern: ""
data_path: "_data/asana"
output_formats: ["csv","xlsx"]
basecamp:
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/basecamp"
output_formats: ["csv","xlsx"]
fogbugz:
host: "{{subdomain}}.fogbugz.com"
username: ""
password: ""
filter: "etl"
datafile_pattern: ""
data_path: "_data/fogbugz"
output_formats: ["csv","xlsx"]
freshbooks:
api_url: "https://{{subdomain}}.freshbooks.com/api/2.1/xml-in"
api_token: ""
datafile_pattern: ""
data_path: "_data/freshbooks"
output_formats: ["csv","xlsx"]
github:
token: ""
datafile_pattern: ""
data_path: "_data/github"
output_formats: ["csv","xlsx"]
google_analytics:
service_email: ""
pem_path: ""
profile: ""
datafile_pattern: ""
data_path: "_data/google-analytics"
output_formats: ["csv","xlsx"]
harvest:
subdomain: ""
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/harvest"
output_formats: ["csv","xlsx"]
highrise:
username: ''
token: ''
datafile_pattern: ""
data_path: "_data/highrise"
output_formats: ["csv","xlsx"]
mailchimp:
token: ""
datafile_pattern: ""
data_path: "_data/mailchimp"
output_formats: ["csv","xlsx"]
open_exchange_rates:
app_id: '='
base: "USD"
plan: "free"
currencies: []
datafile_pattern: ""
data_path: "_data/open-exchange-rates"
output_formats: ["csv","xlsx"]
trello:
key: ""
token: ""
datafile_pattern: ""
data_path: "_data/trello"
output_formats: ["csv","xlsx"]
databases:
drivers:
postgresql:
class: "org.postgresql.Driver"
classPath: "{{cwd}}/_workshop/drivers/postgresql-9.3-1103.jdbc4.jar"
baseUrl: "jdbc:postgresql://"
mydb:
development:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "password"
staging:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "password"
production:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "password"
defaults:
database: "mydb"
environment: "development" | 146743 | version: "0.1.9"
logging:
events: ["info","error","warn","debug","info","error","warn","shell"]
notifications:
email:
port: 465
host: 'localhost'
secure: true
requireTLS: true
authMethod: ''
user: 'user'
password: '<PASSWORD>'
recipients: []
slack:
webhook_url: ''
liquibase:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
staging:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
production:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
scriptella:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: []
staging:
slack: ""
email: []
events: []
production:
slack: ""
email: []
events: []
etl_properties:
environments:
development:
data_working_directory: "{{cwd}}"
staging:
data_working_directory: "{{cwd}}"
production:
data_working_directory: "{{cwd}}"
cloud:
asana:
key: ""
datafile_pattern: ""
data_path: "_data/asana"
output_formats: ["csv","xlsx"]
basecamp:
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/basecamp"
output_formats: ["csv","xlsx"]
fogbugz:
host: "{{subdomain}}.fogbugz.com"
username: ""
password: ""
filter: "etl"
datafile_pattern: ""
data_path: "_data/fogbugz"
output_formats: ["csv","xlsx"]
freshbooks:
api_url: "https://{{subdomain}}.freshbooks.com/api/2.1/xml-in"
api_token: ""
datafile_pattern: ""
data_path: "_data/freshbooks"
output_formats: ["csv","xlsx"]
github:
token: ""
datafile_pattern: ""
data_path: "_data/github"
output_formats: ["csv","xlsx"]
google_analytics:
service_email: ""
pem_path: ""
profile: ""
datafile_pattern: ""
data_path: "_data/google-analytics"
output_formats: ["csv","xlsx"]
harvest:
subdomain: ""
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/harvest"
output_formats: ["csv","xlsx"]
highrise:
username: ''
token: ''
datafile_pattern: ""
data_path: "_data/highrise"
output_formats: ["csv","xlsx"]
mailchimp:
token: ""
datafile_pattern: ""
data_path: "_data/mailchimp"
output_formats: ["csv","xlsx"]
open_exchange_rates:
app_id: '='
base: "USD"
plan: "free"
currencies: []
datafile_pattern: ""
data_path: "_data/open-exchange-rates"
output_formats: ["csv","xlsx"]
trello:
key: ""
token: ""
datafile_pattern: ""
data_path: "_data/trello"
output_formats: ["csv","xlsx"]
databases:
drivers:
postgresql:
class: "org.postgresql.Driver"
classPath: "{{cwd}}/_workshop/drivers/postgresql-9.3-1103.jdbc4.jar"
baseUrl: "jdbc:postgresql://"
mydb:
development:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "<PASSWORD>"
staging:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "<PASSWORD>"
production:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "<PASSWORD>"
defaults:
database: "mydb"
environment: "development" | true | version: "0.1.9"
logging:
events: ["info","error","warn","debug","info","error","warn","shell"]
notifications:
email:
port: 465
host: 'localhost'
secure: true
requireTLS: true
authMethod: ''
user: 'user'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
recipients: []
slack:
webhook_url: ''
liquibase:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
staging:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
production:
slack: ""
email: []
events: ["info","error","warn","debug","shell", "exec"]
scriptella:
working_directory: ""
notifications:
development:
slack: ""
email: []
events: []
staging:
slack: ""
email: []
events: []
production:
slack: ""
email: []
events: []
etl_properties:
environments:
development:
data_working_directory: "{{cwd}}"
staging:
data_working_directory: "{{cwd}}"
production:
data_working_directory: "{{cwd}}"
cloud:
asana:
key: ""
datafile_pattern: ""
data_path: "_data/asana"
output_formats: ["csv","xlsx"]
basecamp:
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/basecamp"
output_formats: ["csv","xlsx"]
fogbugz:
host: "{{subdomain}}.fogbugz.com"
username: ""
password: ""
filter: "etl"
datafile_pattern: ""
data_path: "_data/fogbugz"
output_formats: ["csv","xlsx"]
freshbooks:
api_url: "https://{{subdomain}}.freshbooks.com/api/2.1/xml-in"
api_token: ""
datafile_pattern: ""
data_path: "_data/freshbooks"
output_formats: ["csv","xlsx"]
github:
token: ""
datafile_pattern: ""
data_path: "_data/github"
output_formats: ["csv","xlsx"]
google_analytics:
service_email: ""
pem_path: ""
profile: ""
datafile_pattern: ""
data_path: "_data/google-analytics"
output_formats: ["csv","xlsx"]
harvest:
subdomain: ""
email: ""
password: ""
datafile_pattern: ""
data_path: "_data/harvest"
output_formats: ["csv","xlsx"]
highrise:
username: ''
token: ''
datafile_pattern: ""
data_path: "_data/highrise"
output_formats: ["csv","xlsx"]
mailchimp:
token: ""
datafile_pattern: ""
data_path: "_data/mailchimp"
output_formats: ["csv","xlsx"]
open_exchange_rates:
app_id: '='
base: "USD"
plan: "free"
currencies: []
datafile_pattern: ""
data_path: "_data/open-exchange-rates"
output_formats: ["csv","xlsx"]
trello:
key: ""
token: ""
datafile_pattern: ""
data_path: "_data/trello"
output_formats: ["csv","xlsx"]
databases:
drivers:
postgresql:
class: "org.postgresql.Driver"
classPath: "{{cwd}}/_workshop/drivers/postgresql-9.3-1103.jdbc4.jar"
baseUrl: "jdbc:postgresql://"
mydb:
development:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "PI:PASSWORD:<PASSWORD>END_PI"
staging:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "PI:PASSWORD:<PASSWORD>END_PI"
production:
driver: "postgresql"
host: "localhost"
port: 5432
database: "mydb"
user: "user"
password: "PI:PASSWORD:<PASSWORD>END_PI"
defaults:
database: "mydb"
environment: "development" |
[
{
"context": "'\n 'Wharrimean is:'\n 'Examples:'\n 'Givun '\n 'Youse know when youse got '\n 'Wun '",
"end": 197,
"score": 0.9942393898963928,
"start": 192,
"tag": "NAME",
"value": "Givun"
}
] | settings/language-gherkin_en-Scouse.cson | mackoj/language-gherkin-i18n | 17 | '.text.gherkin.feature.en-Scouse':
'editor':
'completions': [
'Feature:'
'Dis is what went down:'
'The thing of it is:'
'Wharrimean is:'
'Examples:'
'Givun '
'Youse know when youse got '
'Wun '
'Youse know like when '
'Dun '
'Den youse gotta '
'Buh '
'An '
]
'increaseIndentPattern': 'The thing of it is: .*'
'commentStart': '# '
| 194927 | '.text.gherkin.feature.en-Scouse':
'editor':
'completions': [
'Feature:'
'Dis is what went down:'
'The thing of it is:'
'Wharrimean is:'
'Examples:'
'<NAME> '
'Youse know when youse got '
'Wun '
'Youse know like when '
'Dun '
'Den youse gotta '
'Buh '
'An '
]
'increaseIndentPattern': 'The thing of it is: .*'
'commentStart': '# '
| true | '.text.gherkin.feature.en-Scouse':
'editor':
'completions': [
'Feature:'
'Dis is what went down:'
'The thing of it is:'
'Wharrimean is:'
'Examples:'
'PI:NAME:<NAME>END_PI '
'Youse know when youse got '
'Wun '
'Youse know like when '
'Dun '
'Den youse gotta '
'Buh '
'An '
]
'increaseIndentPattern': 'The thing of it is: .*'
'commentStart': '# '
|
[
{
"context": "ubot\n# pipeline.\n#\n# @author Kevin Netherton\n#\n# @requires NPM:request-promise\n# @requir",
"end": 134,
"score": 0.9998505711555481,
"start": 119,
"tag": "NAME",
"value": "Kevin Netherton"
},
{
"context": " this:\n # docker-registry.default.svc:5000/databcdc/bcdc-test-dev@sha256:edcf5c6221be569a366cc0903",
"end": 11382,
"score": 0.9146645665168762,
"start": 11377,
"tag": "USERNAME",
"value": "datab"
},
{
"context": " # docker-registry.default.svc:5000/databcdc/bcdc-test:latest\n #",
"end": 13350,
"score": 0.8425378203392029,
"start": 13345,
"tag": "USERNAME",
"value": "datab"
},
{
"context": "oks like:\n # docker-registry.default.svc:5000/databcdc/datapusher@sha256:2eff082c999cbe0eff08816d2b8d",
"end": 16905,
"score": 0.6819789409637451,
"start": 16900,
"tag": "USERNAME",
"value": "datab"
}
] | scripts/request.coffee | bcgov/pipeline-bot | 7 | ###*
# @fileOverview Wrapper methods to openshift to be used in a hubot
# pipeline.
#
# @author Kevin Netherton
#
# @requires NPM:request-promise
# @requires NPM:lodash
# @requires NPM:oboe
#
###
request = require('request-promise')
_ = require('lodash')
oboe = require('oboe')
###*
# Class used to wrap up various openshift methods with the goal
# of making it easy for hubot calls to interact with openshift.
###
class exports.OCAPI
domain = null
protocol = 'https'
buildStatus = 'NOT STARTED'
deployStatus = 'NOT STARTED'
# ckan build taks around 10 minutes.. making timeout 20 should be adequate
requestTimeoutSeconds = 60 * 30
###*
# @param {string} domain - The domain to use in the url when communicating with
# openshift.
# @param {string} apikey - The api key to use when making api calls
###
constructor : (domain, apikey=null) ->
@domain = domain
@protocol = protocol
@apikey = apikey
@statuses = new OCStatus()
###*
# Joins the protocol 'https' with the domain to form the root
# of the url
#
# @returns {url} the domain and protocol joined together.
###
baseUrl : ->
return "#{protocol}://#{@domain}"
###*
# returns a basic request object that other methods can then add
# to, always required will the addition of the uri
#
# @returns {reqObj} a basic request object with some commonly used
# parameters
###
getCoreRequest : ->
reqObj = {
json : true,
method: 'GET',
headers: {
Accept: 'application/json, */*'
}
}
if this.apikey?
reqObj.headers.Authorization = "Bearer #{this.apikey}"
return reqObj
###*
# queries openshift to get a json struct that describes the end
# points supported by the openshift instance
#
# @returns {reqObj} a promise that will return the api end points
# available for the openshift api.
###
getAPIEndPoints : ->
urldomain = this.baseUrl()
apiEndPoints = '/oapi/v1/'
urlString = "#{urldomain}#{apiEndPoints}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log response.resources.length
json = response
.catch (err) ->
console.log '------- error called -------' + err.resources.length
json = err
###*
# starts a build in the project specified using the build config
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} openshift build config name that is to be built
#
# @returns {reqObj} a promise that will return the payload retured by the start build event
###
startBuild : (ocProject, ocBuildConfigName ) ->
urldomain = this.baseUrl()
initBuildPath = "/apis/build.openshift.io/v1/namespaces/#{ocProject}/buildconfigs/#{ocBuildConfigName}/instantiate"
urlString = "#{urldomain}#{initBuildPath}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.body = {
kind: "BuildRequest",
apiVersion: "build.openshift.io/v1",
metadata: {
name: ocBuildConfigName,
creationTimestamp: null
},
triggeredBy: [
{
message: "Triggered with coffee"
}
],
dockerStrategyOptions: {},
sourceStrategyOptions: {}
}
return request reqObj
.then (response) ->
#console.log JSON.stringify(response, undefined, 2)
return response
.catch (err) ->
console.log "------- error: #{err}-------"
console.log err.stack
###*
# Gets the data from a build instantiate event (type: build), and extracts the build config name
# to define the end point for the build that is to be watched.
#
# @param {string} ocProject- openshift project
# @param {string} buildData - the payload returned by the instantiate (start build) event
#
# @returns {Promise} a promise that will untimately yield the results of the watch
# event that concludes the build, the promise will return a list
# with the following elements
# 1. record type: (MODIFIED|ADDED|?)
# 2. phase: (completed|cancelled|failed)
# 3. build name: the unique name that is assigned to this
# build attempt
###
watchBuild : (ocProject, buildData)->
urldomain = this.baseUrl()
reqObj = this.getCoreRequest()
delete reqObj.json
watchBuildUrl = "#{urldomain}/apis/build.openshift.io/v1/watch/namespaces/#{ocProject}/builds/#{buildData.metadata.name}"
watchBuildUrl = watchBuildUrl + "?timeoutSeconds=#{requestTimeoutSeconds}"
reqObj.url = watchBuildUrl
oboePromise = new Promise (resolve) ->
recordtype = undefined
phase = undefined
buildname = undefined
oboeRequest = oboe(reqObj)
.node('*', (node, path) ->
console.log "path: #{path}, #{typeof path}, #{path.length}, #{Array.isArray(path)}"
# extracting the required data from the stream
if ( path.length == 1) and path[0] == 'type'
# type is the first value of the object so putting
# other values to undefined so they can be repopulated
# if this condition is satisfied it indicates a new record has
phase = undefined
buildname = undefined
cnt = cnt + 1
recordtype = node
else if (path.length == 3) and _.isEqual(path, ["object", "status", "phase"])
# extracting the phase value
phase = node
console.log "-------- phase: #{phase}"
else if (path.length == 3) and _.isEqual(path, ["object", "metadata", "name"])
buildname = node
console.log "-------- buildname: #{buildname}"
# Evaluating the extracted data.
#if recordtype == 'ADDED' and phase == 'New'
# First make sure we have read enough from the stream
if (buildname != undefined and phase != undefined) and \
recordtype == 'MODIFIED' and ( phase in ['Complete', 'Cancelled', 'Failed'])
console.log "returning data: #{recordtype} #{phase}"
this.abort()
resolve [recordtype, phase, buildname]
#this.done()
)
.fail( ( errorReport ) ->
console.log "status code: #{errorReport.statusCode}"
)
.done( () ->
console.log "done")
return oboePromise
###*
# hits the api and returns the json that is used to describe the
# provided build name.
#
# @param {string} ocProject - the openshift project name
# @param {object} buildData - the json returned by the instantiate build api call
#
# @return {Promise} - a request promise that will ultimately yield the
# concluding event to associated with the build
###
getBuildStatus : (ocProject, ocBuildName) ->
# calls build list on the specific build, returns the promise
# that will yield the payload
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/#{ocBuildName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "response is: #{response}"
#console.log typeof response, JSON.stringify(response)
return response
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Initates and monitors the build for the specified project / buildconfig
# and returns a status object
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} the build config that is to be run and monitored
# @returns {OCStatus} status- a status object with the properties associated
# with this build
###
buildSync : (ocProject, ocBuildConfigName) ->
try
console.log "ocProject: #{ocProject}, buildconfig: #{ocBuildConfigName}"
watchBuildStatus = undefined
buildPayload = await this.startBuild(ocProject, ocBuildConfigName)
this.statuses.updateStatus('build', 'initiated', buildPayload)
watchBuildStatus = await this.watchBuild(ocProject, buildPayload)
this.statuses.updateStatus('build', watchBuildStatus[1])
console.log "---watchBuild---: #{watchBuildStatus} #{typeof watchBuildStatus}"
#console.log JSON.stringify(watchBuildStatus)
buildStatus = await this.getBuildStatus(ocProject, watchBuildStatus[2])
console.log "buildstatus kind: #{buildStatus.kind}"
#console.log "buildstatus: #{JSON.stringify(buildStatus)}"
# put the update into a promise to ensure it gets completed before the status
# object is returned.
return await this.statuses.updateStatusAsync('build', buildStatus.status.phase, buildStatus)
# create a promise in the status object and return that, with an await
#resolve this.statuses
catch err
console.log "error encountered in buildSync: #{err}"
console.log err.stack
return await this.statuses.updateStatusAsync('build', 'error', err)
###*
# Gets the latest image that was built using the specified
# buildconfig name
#
# - iterates over all builds in the project
# - finds builds that used the build config name provided as arg
# - checks the build dates, to get the last image built
#
# @param {string} ocProject - The name of the oc project
# @param {string} ocBuildConfigName - The name of the build config
# @return {promise} - yields the name of the build image
###
getLatestBuildImage : (ocProject, ocBuildConfigName) ->
# ocProject: name of the openshift project
# ocBuildConfigName: build config name
#
# returns: a promise with the most recently build image
# name / identifier.. looks something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test-dev@sha256:edcf5c6221be569a366cc09034bfdc2986f37d18c6b269790b0185b238f19c81
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "build request ----- "
latestBuild = undefined
latestBuildDate = undefined
imageName = undefined
for item in response.items
#console.log "item: #{JSON.stringify(item)}"
console.log "buildconfig: #{item.metadata.labels.buildconfig}"
console.log "phase: #{item.status.phase}"
console.log "phase: #{item.metadata.labels.buildconfig}"
if item.metadata.labels.buildconfig == ocBuildConfigName and \
item.status.phase == 'Complete'
console.log "passed conditional"
curentBuildDate = new Date(item.status.completionTimestamp)
if latestBuildDate == undefined
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
else if curentBuildDate > latestBuildDate
console.log "found the the build: #{latestBuildDate} #{curentBuildDate}"
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
# latest build is something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test:latest
# need to combine with the property status.output.to.imageDigest
# to create docker-registry.default.svc:5000/databcdc/pipeline-bot@sha256:56f2a697134f04e1d519e7d063c0c0da7832e5fe0f3f007d10edf5f1b05b8724
re = new RegExp('\:latest$')
endPos = latestBuild.search(re)
console.log "latestBuild: #{latestBuild}"
if endPos != -1
console.log "imageName: #{imageName}"
imageName = "#{latestBuild.slice(0, endPos)}@#{imageDigest}"
else
console.log "#{latestBuild} - #{imageDigest}"
console.log "imagename: #{imageName}"
return imageName
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Gets the name of the last image that was built and the image
# that is currently deployed, compares the names and returns
# true or false indicating whether the latest image has been
# deployed
#
# @param {string} ocProject - the name of the oc project
# @param {string} buildConifg - the name of the build config
# @param {string} deployConfig - the name of the deploy config
# @return {boolean} - indicator of whether the latest image built has been
# or is currently being deployed.
###
isLatestImageDeployed : (ocBuildProject, buildConifg, ocDeployProject, deployConfig) ->
# ocProject: the openshift project
# ocBuildConfigName: a build config name
# deployConfig: a deployment config
#
# using the build config, identifies the last image that was build
# using the depoly config identifies the image that is currently
# deployed. If they are the same returns true otherwise returns
# false.
# Will return true even if the replication is only part way complete
mostRecentlyBuildImage = await this.getLatestBuildImage(ocBuildProject, buildConifg)
currentDeployedImage = await this.getDeployedImage(ocDeployProject, deployConfig)
console.log "#{currentDeployedImage} currentDeployedImage"
console.log "#{mostRecentlyBuildImage} mostRecentlyBuildImage"
return currentDeployedImage == mostRecentlyBuildImage
###*
# Hits the deployment config status end point and returns json
# @param {string} ocProject - openshift project name
# @param {string} deployConfig - the name of the deployment config
# @return {object} - the json object that is returned by the end point
###
getDeploymentStatus: (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
return request reqObj
.then (response) ->
console.log('getDeploymentStatus called')
return response
.catch (err) ->
console.log "caught error #{err}"
console.log "#{err.stack}"
###*
# Gets the currently configured image name... looks like:
# docker-registry.default.svc:5000/databcdc/datapusher@sha256:2eff082c999cbe0eff08816d2b8d4d7b97e6e7d5825ca85ef3714990752b1c7c
#
# does this by getting the latestVersion property from the deploy config
# status end point, then appends the latestVersion to the end of the
# deployconfig name to get the replicationcontroller name
# queries the replication controller end point to get the image that
# that the latest replication controller deployed.
#
# @param {string} ocProject - the name of the oc project
# @param {string} deployConfig - the name of the deploy config
# @return {promise} - will yield the name of the image
#
###
getDeployedImage : (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
# this first request gets the "latestVersion" which oc uses to name the
# replication controller, then queries the status of the replication controller.
return request reqObj
.then (response) ->
replicationController = "#{deployConfig}-#{response.status.latestVersion}"
console.log "replication controller: #{replicationController}"
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationController}"
urlString = "#{urldomain}#{apiEndPoints}"
repContReq = reqObj
repContReq.uri = urlString
repControllerRequest = request repContReq
.then (response) ->
containers = response.spec.template.spec.containers
#console.log "containers: #{containers}"
for container in containers
console.log "container name: #{container.name}"
#console.log "container: #{JSON.stringify(container)}"
if container.name == deployConfig
imageName = container.image
console.log "image name: #{imageName}"
return imageName
.catch (err) ->
console.log "Error: Unable to get the replication controller: #{replicationController}"
console.log "response was: #{JSON.stringify(response)}"
.catch (err) ->
console.log '------- error called -------'
console.log "error: #{err}"
console.log "request: #{JSON.stringify(reqObj)}"
###*
# Calls the deployment instantiation end point and returns the json
# data
#
# @param {string} ocProject - the openshift project
# @param {deployConfig} deployConfig - The deploy config
# @return {promise} - promise that will yield the payload from the deployment
# instantiation event
###
deploy : (ocProject, deployConfig) ->
# ocProject: the openshift project
# deployConfig: the deployment config to be deployed
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/apis/apps.openshift.io/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/instantiate"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.headers.Content-type = "application/json"
reqObj.body = {
"kind":"DeploymentRequest",
"apiVersion":"apps.openshift.io/v1",
"name":"#{deployConfig}",
"latest":true,
"force":true}
return request reqObj
###*
# returns a promise that will wait for the specified number or seconds to
# complete
#
# @param {number} waittime - the amount of time in milliseconds to wait
# @return {promise} - that waits for a set amount of time
###
delay : (waittime) ->
ms = new Promise (resolve) ->
setTimeout(resolve, waittime)
###*
# Queries the status of the replication controller and retrieves the desired
# number of controllers from `spec.replicas` and compares against
# status.replicas. Cancels out when they are either equal, or the maxuimum
# number of recursions is exceeded.
#
# When desired replicas vs existing replicas is not equal will wait for
# 5 seconds then check again.
#
# @param {string} ocProject - The openshift project
# @param {replicationControllerName} - name of the replication controller
# @param {number} cnt - Leave this parameter, it is used internally to manage
# recursion depth. Used to cancel out beyond a set
# number of iterations
#
###
deployWatch : (ocProject, replicationControllerName, cnt=0) ->
maxIterations = 5
timeBetweenIterations = 5000
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationControllerName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
repQuery = await request reqObj
this.statuses.updateStatus('deploy', 'deploying', repQuery)
#console.log "repQuery: #{JSON.stringify(repQuery)}"
console.log "requested replicas: #{repQuery.spec.replicas} ?= existing replicas: #{repQuery.status.replicas}"
console.log "kubectl.kubernetes.io/desired-replicas: #{repQuery.metadata.annotations['kubectl.kubernetes.io/desired-replicas']}"
# Code below is monitoring the target replicas vs the actual
# replicas... waits until they are equal
#
# Possible source of target pods: repQuery.metadata.annotations["kubectl.kubernetes.io/desired-replicas"]
# and.. repQuery.spec.replicas
if repQuery.spec.replicas == repQuery.status.replicas
console.log "requested replicas are up"
this.statuses.updateStatus('deploy', 'success', repQuery)
return repQuery
else if cnt > maxIterations
console.log("max attempts exceeded #{cnt}")
this.statuses.updateStatus('deploy', 'failed', repQuery)
return repQuery
else
cnt = cnt + 1
console.log("attempting await")
await this.delay(timeBetweenIterations)
console.log("await complete")
this.deployWatch(ocProject, replicationControllerName, cnt)
###*
#
# Checks to see if the latest build is the version that is
# currently deployed, and if it is stops, otherwise proceeds
# with a deployment.
# @param {string} ocProject - The name of the openshift project
# @param {string} buildConfig - The build config name
# @return {object} - returns a OCStatus object
#
###
deployLatest : (ocBuildProject, buildConfig, ocDeployProject, deployConfig) ->
#
replicationController = undefined
this.statuses.updateStatus('deploy', 'checking')
try
console.log "getting latest..."
isLatest = await this.isLatestImageDeployed(ocBuildProject, buildConfig, ocDeployProject, deployConfig)
if !isLatest
console.log "instantiating a deploy..."
this.statuses.updateStatus('deploy', 'initiated')
deployObj = await this.deploy(ocDeployProject, deployConfig)
replicationController = "#{deployObj.metadata.name}-#{deployObj.status.latestVersion}"
this.statuses.updateStatus('deploy', 'initiated', deployObj)
if replicationController == undefined
# Getting the name of the replication controller that is doing
# the rollout for the depolyment
console.log "getting the replication controller name..."
this.statuses.updateStatus('deploy', 'initiated')
# should get the actual object instead of just the status, then could
# update the status object
latestDeployment = await this.getDeploymentStatus(ocDeployProject, deployConfig)
replicationController = "#{deployConfig}-#{latestDeployment.status.latestVersion}"
# is latest only indicates that the deployment has already been triggered
# code below will monitor for its completion.
this.statuses.updateStatus('deploy', 'deploying')
deployStatus = await this.deployWatch(ocDeployProject, replicationController)
return this.statuses
console.log "----------Deploy complete ----------"
catch err
console.log "error encountered in attempt to deploy..", err
return await this.statuses.updateStatusAsync('deploy', 'error', err)
###*
# Class that keeps track of what actions have been performed and what their
# statuses are.
# action:
# - build
# - buildwatch
# - deploy
# - deploywatch
#
# status:
# - completed
# - cancelled
# - failed
# - running
# - initiated
#
# payload:
# The last js object returned by the last operation relating to the action
###
class OCStatus
constructor: () ->
@statuses = {}
###*
# updates the status of an action, if the action has not been defined then
# it gets added to this object. OC status object is used to collect the
# statuses of a number of different actions
#
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - a string describing the status of the action (completed | cancelled | failed | running | instantiated )
###
updateStatus : (action, status, payload=undefined) ->
if @statuses[action] == undefined
@statuses[action] = {}
@statuses[action]['status'] = status
if payload != undefined
@statuses[action]['payload'] = payload
###*
# Finds the status record that alignes with the action and updates the payload
# associated with that action
#
# @param {string} action - a string describing the action (build | deploy)
# @param {object} payload - an object that describes the action.. typically this
# is json data returned by an oc endpoint
###
setPayload : (action, payload) ->
@statuses[action]['payload'] = payload
###*
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - status - a string describing the status of the
# action (completed | cancelled | failed | running |
# instantiated )
# @return {Promise} - a promise that will resolve to a reference to this
# status object
###
updateStatusAsync : (action, status, payload=undefined) ->
objref = this
return val = new Promise (resolve) ->
objref.updateStatus(action, status, payload)
resolve objref
| 148038 | ###*
# @fileOverview Wrapper methods to openshift to be used in a hubot
# pipeline.
#
# @author <NAME>
#
# @requires NPM:request-promise
# @requires NPM:lodash
# @requires NPM:oboe
#
###
request = require('request-promise')
_ = require('lodash')
oboe = require('oboe')
###*
# Class used to wrap up various openshift methods with the goal
# of making it easy for hubot calls to interact with openshift.
###
class exports.OCAPI
domain = null
protocol = 'https'
buildStatus = 'NOT STARTED'
deployStatus = 'NOT STARTED'
# ckan build taks around 10 minutes.. making timeout 20 should be adequate
requestTimeoutSeconds = 60 * 30
###*
# @param {string} domain - The domain to use in the url when communicating with
# openshift.
# @param {string} apikey - The api key to use when making api calls
###
constructor : (domain, apikey=null) ->
@domain = domain
@protocol = protocol
@apikey = apikey
@statuses = new OCStatus()
###*
# Joins the protocol 'https' with the domain to form the root
# of the url
#
# @returns {url} the domain and protocol joined together.
###
baseUrl : ->
return "#{protocol}://#{@domain}"
###*
# returns a basic request object that other methods can then add
# to, always required will the addition of the uri
#
# @returns {reqObj} a basic request object with some commonly used
# parameters
###
getCoreRequest : ->
reqObj = {
json : true,
method: 'GET',
headers: {
Accept: 'application/json, */*'
}
}
if this.apikey?
reqObj.headers.Authorization = "Bearer #{this.apikey}"
return reqObj
###*
# queries openshift to get a json struct that describes the end
# points supported by the openshift instance
#
# @returns {reqObj} a promise that will return the api end points
# available for the openshift api.
###
getAPIEndPoints : ->
urldomain = this.baseUrl()
apiEndPoints = '/oapi/v1/'
urlString = "#{urldomain}#{apiEndPoints}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log response.resources.length
json = response
.catch (err) ->
console.log '------- error called -------' + err.resources.length
json = err
###*
# starts a build in the project specified using the build config
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} openshift build config name that is to be built
#
# @returns {reqObj} a promise that will return the payload retured by the start build event
###
startBuild : (ocProject, ocBuildConfigName ) ->
urldomain = this.baseUrl()
initBuildPath = "/apis/build.openshift.io/v1/namespaces/#{ocProject}/buildconfigs/#{ocBuildConfigName}/instantiate"
urlString = "#{urldomain}#{initBuildPath}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.body = {
kind: "BuildRequest",
apiVersion: "build.openshift.io/v1",
metadata: {
name: ocBuildConfigName,
creationTimestamp: null
},
triggeredBy: [
{
message: "Triggered with coffee"
}
],
dockerStrategyOptions: {},
sourceStrategyOptions: {}
}
return request reqObj
.then (response) ->
#console.log JSON.stringify(response, undefined, 2)
return response
.catch (err) ->
console.log "------- error: #{err}-------"
console.log err.stack
###*
# Gets the data from a build instantiate event (type: build), and extracts the build config name
# to define the end point for the build that is to be watched.
#
# @param {string} ocProject- openshift project
# @param {string} buildData - the payload returned by the instantiate (start build) event
#
# @returns {Promise} a promise that will untimately yield the results of the watch
# event that concludes the build, the promise will return a list
# with the following elements
# 1. record type: (MODIFIED|ADDED|?)
# 2. phase: (completed|cancelled|failed)
# 3. build name: the unique name that is assigned to this
# build attempt
###
watchBuild : (ocProject, buildData)->
urldomain = this.baseUrl()
reqObj = this.getCoreRequest()
delete reqObj.json
watchBuildUrl = "#{urldomain}/apis/build.openshift.io/v1/watch/namespaces/#{ocProject}/builds/#{buildData.metadata.name}"
watchBuildUrl = watchBuildUrl + "?timeoutSeconds=#{requestTimeoutSeconds}"
reqObj.url = watchBuildUrl
oboePromise = new Promise (resolve) ->
recordtype = undefined
phase = undefined
buildname = undefined
oboeRequest = oboe(reqObj)
.node('*', (node, path) ->
console.log "path: #{path}, #{typeof path}, #{path.length}, #{Array.isArray(path)}"
# extracting the required data from the stream
if ( path.length == 1) and path[0] == 'type'
# type is the first value of the object so putting
# other values to undefined so they can be repopulated
# if this condition is satisfied it indicates a new record has
phase = undefined
buildname = undefined
cnt = cnt + 1
recordtype = node
else if (path.length == 3) and _.isEqual(path, ["object", "status", "phase"])
# extracting the phase value
phase = node
console.log "-------- phase: #{phase}"
else if (path.length == 3) and _.isEqual(path, ["object", "metadata", "name"])
buildname = node
console.log "-------- buildname: #{buildname}"
# Evaluating the extracted data.
#if recordtype == 'ADDED' and phase == 'New'
# First make sure we have read enough from the stream
if (buildname != undefined and phase != undefined) and \
recordtype == 'MODIFIED' and ( phase in ['Complete', 'Cancelled', 'Failed'])
console.log "returning data: #{recordtype} #{phase}"
this.abort()
resolve [recordtype, phase, buildname]
#this.done()
)
.fail( ( errorReport ) ->
console.log "status code: #{errorReport.statusCode}"
)
.done( () ->
console.log "done")
return oboePromise
###*
# hits the api and returns the json that is used to describe the
# provided build name.
#
# @param {string} ocProject - the openshift project name
# @param {object} buildData - the json returned by the instantiate build api call
#
# @return {Promise} - a request promise that will ultimately yield the
# concluding event to associated with the build
###
getBuildStatus : (ocProject, ocBuildName) ->
# calls build list on the specific build, returns the promise
# that will yield the payload
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/#{ocBuildName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "response is: #{response}"
#console.log typeof response, JSON.stringify(response)
return response
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Initates and monitors the build for the specified project / buildconfig
# and returns a status object
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} the build config that is to be run and monitored
# @returns {OCStatus} status- a status object with the properties associated
# with this build
###
buildSync : (ocProject, ocBuildConfigName) ->
try
console.log "ocProject: #{ocProject}, buildconfig: #{ocBuildConfigName}"
watchBuildStatus = undefined
buildPayload = await this.startBuild(ocProject, ocBuildConfigName)
this.statuses.updateStatus('build', 'initiated', buildPayload)
watchBuildStatus = await this.watchBuild(ocProject, buildPayload)
this.statuses.updateStatus('build', watchBuildStatus[1])
console.log "---watchBuild---: #{watchBuildStatus} #{typeof watchBuildStatus}"
#console.log JSON.stringify(watchBuildStatus)
buildStatus = await this.getBuildStatus(ocProject, watchBuildStatus[2])
console.log "buildstatus kind: #{buildStatus.kind}"
#console.log "buildstatus: #{JSON.stringify(buildStatus)}"
# put the update into a promise to ensure it gets completed before the status
# object is returned.
return await this.statuses.updateStatusAsync('build', buildStatus.status.phase, buildStatus)
# create a promise in the status object and return that, with an await
#resolve this.statuses
catch err
console.log "error encountered in buildSync: #{err}"
console.log err.stack
return await this.statuses.updateStatusAsync('build', 'error', err)
###*
# Gets the latest image that was built using the specified
# buildconfig name
#
# - iterates over all builds in the project
# - finds builds that used the build config name provided as arg
# - checks the build dates, to get the last image built
#
# @param {string} ocProject - The name of the oc project
# @param {string} ocBuildConfigName - The name of the build config
# @return {promise} - yields the name of the build image
###
getLatestBuildImage : (ocProject, ocBuildConfigName) ->
# ocProject: name of the openshift project
# ocBuildConfigName: build config name
#
# returns: a promise with the most recently build image
# name / identifier.. looks something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test-dev@sha256:edcf5c6221be569a366cc09034bfdc2986f37d18c6b269790b0185b238f19c81
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "build request ----- "
latestBuild = undefined
latestBuildDate = undefined
imageName = undefined
for item in response.items
#console.log "item: #{JSON.stringify(item)}"
console.log "buildconfig: #{item.metadata.labels.buildconfig}"
console.log "phase: #{item.status.phase}"
console.log "phase: #{item.metadata.labels.buildconfig}"
if item.metadata.labels.buildconfig == ocBuildConfigName and \
item.status.phase == 'Complete'
console.log "passed conditional"
curentBuildDate = new Date(item.status.completionTimestamp)
if latestBuildDate == undefined
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
else if curentBuildDate > latestBuildDate
console.log "found the the build: #{latestBuildDate} #{curentBuildDate}"
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
# latest build is something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test:latest
# need to combine with the property status.output.to.imageDigest
# to create docker-registry.default.svc:5000/databcdc/pipeline-bot@sha256:56f2a697134f04e1d519e7d063c0c0da7832e5fe0f3f007d10edf5f1b05b8724
re = new RegExp('\:latest$')
endPos = latestBuild.search(re)
console.log "latestBuild: #{latestBuild}"
if endPos != -1
console.log "imageName: #{imageName}"
imageName = "#{latestBuild.slice(0, endPos)}@#{imageDigest}"
else
console.log "#{latestBuild} - #{imageDigest}"
console.log "imagename: #{imageName}"
return imageName
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Gets the name of the last image that was built and the image
# that is currently deployed, compares the names and returns
# true or false indicating whether the latest image has been
# deployed
#
# @param {string} ocProject - the name of the oc project
# @param {string} buildConifg - the name of the build config
# @param {string} deployConfig - the name of the deploy config
# @return {boolean} - indicator of whether the latest image built has been
# or is currently being deployed.
###
isLatestImageDeployed : (ocBuildProject, buildConifg, ocDeployProject, deployConfig) ->
# ocProject: the openshift project
# ocBuildConfigName: a build config name
# deployConfig: a deployment config
#
# using the build config, identifies the last image that was build
# using the depoly config identifies the image that is currently
# deployed. If they are the same returns true otherwise returns
# false.
# Will return true even if the replication is only part way complete
mostRecentlyBuildImage = await this.getLatestBuildImage(ocBuildProject, buildConifg)
currentDeployedImage = await this.getDeployedImage(ocDeployProject, deployConfig)
console.log "#{currentDeployedImage} currentDeployedImage"
console.log "#{mostRecentlyBuildImage} mostRecentlyBuildImage"
return currentDeployedImage == mostRecentlyBuildImage
###*
# Hits the deployment config status end point and returns json
# @param {string} ocProject - openshift project name
# @param {string} deployConfig - the name of the deployment config
# @return {object} - the json object that is returned by the end point
###
getDeploymentStatus: (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
return request reqObj
.then (response) ->
console.log('getDeploymentStatus called')
return response
.catch (err) ->
console.log "caught error #{err}"
console.log "#{err.stack}"
###*
# Gets the currently configured image name... looks like:
# docker-registry.default.svc:5000/databcdc/datapusher@sha256:2eff082c999cbe0eff08816d2b8d4d7b97e6e7d5825ca85ef3714990752b1c7c
#
# does this by getting the latestVersion property from the deploy config
# status end point, then appends the latestVersion to the end of the
# deployconfig name to get the replicationcontroller name
# queries the replication controller end point to get the image that
# that the latest replication controller deployed.
#
# @param {string} ocProject - the name of the oc project
# @param {string} deployConfig - the name of the deploy config
# @return {promise} - will yield the name of the image
#
###
getDeployedImage : (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
# this first request gets the "latestVersion" which oc uses to name the
# replication controller, then queries the status of the replication controller.
return request reqObj
.then (response) ->
replicationController = "#{deployConfig}-#{response.status.latestVersion}"
console.log "replication controller: #{replicationController}"
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationController}"
urlString = "#{urldomain}#{apiEndPoints}"
repContReq = reqObj
repContReq.uri = urlString
repControllerRequest = request repContReq
.then (response) ->
containers = response.spec.template.spec.containers
#console.log "containers: #{containers}"
for container in containers
console.log "container name: #{container.name}"
#console.log "container: #{JSON.stringify(container)}"
if container.name == deployConfig
imageName = container.image
console.log "image name: #{imageName}"
return imageName
.catch (err) ->
console.log "Error: Unable to get the replication controller: #{replicationController}"
console.log "response was: #{JSON.stringify(response)}"
.catch (err) ->
console.log '------- error called -------'
console.log "error: #{err}"
console.log "request: #{JSON.stringify(reqObj)}"
###*
# Calls the deployment instantiation end point and returns the json
# data
#
# @param {string} ocProject - the openshift project
# @param {deployConfig} deployConfig - The deploy config
# @return {promise} - promise that will yield the payload from the deployment
# instantiation event
###
deploy : (ocProject, deployConfig) ->
# ocProject: the openshift project
# deployConfig: the deployment config to be deployed
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/apis/apps.openshift.io/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/instantiate"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.headers.Content-type = "application/json"
reqObj.body = {
"kind":"DeploymentRequest",
"apiVersion":"apps.openshift.io/v1",
"name":"#{deployConfig}",
"latest":true,
"force":true}
return request reqObj
###*
# returns a promise that will wait for the specified number or seconds to
# complete
#
# @param {number} waittime - the amount of time in milliseconds to wait
# @return {promise} - that waits for a set amount of time
###
delay : (waittime) ->
ms = new Promise (resolve) ->
setTimeout(resolve, waittime)
###*
# Queries the status of the replication controller and retrieves the desired
# number of controllers from `spec.replicas` and compares against
# status.replicas. Cancels out when they are either equal, or the maxuimum
# number of recursions is exceeded.
#
# When desired replicas vs existing replicas is not equal will wait for
# 5 seconds then check again.
#
# @param {string} ocProject - The openshift project
# @param {replicationControllerName} - name of the replication controller
# @param {number} cnt - Leave this parameter, it is used internally to manage
# recursion depth. Used to cancel out beyond a set
# number of iterations
#
###
deployWatch : (ocProject, replicationControllerName, cnt=0) ->
maxIterations = 5
timeBetweenIterations = 5000
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationControllerName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
repQuery = await request reqObj
this.statuses.updateStatus('deploy', 'deploying', repQuery)
#console.log "repQuery: #{JSON.stringify(repQuery)}"
console.log "requested replicas: #{repQuery.spec.replicas} ?= existing replicas: #{repQuery.status.replicas}"
console.log "kubectl.kubernetes.io/desired-replicas: #{repQuery.metadata.annotations['kubectl.kubernetes.io/desired-replicas']}"
# Code below is monitoring the target replicas vs the actual
# replicas... waits until they are equal
#
# Possible source of target pods: repQuery.metadata.annotations["kubectl.kubernetes.io/desired-replicas"]
# and.. repQuery.spec.replicas
if repQuery.spec.replicas == repQuery.status.replicas
console.log "requested replicas are up"
this.statuses.updateStatus('deploy', 'success', repQuery)
return repQuery
else if cnt > maxIterations
console.log("max attempts exceeded #{cnt}")
this.statuses.updateStatus('deploy', 'failed', repQuery)
return repQuery
else
cnt = cnt + 1
console.log("attempting await")
await this.delay(timeBetweenIterations)
console.log("await complete")
this.deployWatch(ocProject, replicationControllerName, cnt)
###*
#
# Checks to see if the latest build is the version that is
# currently deployed, and if it is stops, otherwise proceeds
# with a deployment.
# @param {string} ocProject - The name of the openshift project
# @param {string} buildConfig - The build config name
# @return {object} - returns a OCStatus object
#
###
deployLatest : (ocBuildProject, buildConfig, ocDeployProject, deployConfig) ->
#
replicationController = undefined
this.statuses.updateStatus('deploy', 'checking')
try
console.log "getting latest..."
isLatest = await this.isLatestImageDeployed(ocBuildProject, buildConfig, ocDeployProject, deployConfig)
if !isLatest
console.log "instantiating a deploy..."
this.statuses.updateStatus('deploy', 'initiated')
deployObj = await this.deploy(ocDeployProject, deployConfig)
replicationController = "#{deployObj.metadata.name}-#{deployObj.status.latestVersion}"
this.statuses.updateStatus('deploy', 'initiated', deployObj)
if replicationController == undefined
# Getting the name of the replication controller that is doing
# the rollout for the depolyment
console.log "getting the replication controller name..."
this.statuses.updateStatus('deploy', 'initiated')
# should get the actual object instead of just the status, then could
# update the status object
latestDeployment = await this.getDeploymentStatus(ocDeployProject, deployConfig)
replicationController = "#{deployConfig}-#{latestDeployment.status.latestVersion}"
# is latest only indicates that the deployment has already been triggered
# code below will monitor for its completion.
this.statuses.updateStatus('deploy', 'deploying')
deployStatus = await this.deployWatch(ocDeployProject, replicationController)
return this.statuses
console.log "----------Deploy complete ----------"
catch err
console.log "error encountered in attempt to deploy..", err
return await this.statuses.updateStatusAsync('deploy', 'error', err)
###*
# Class that keeps track of what actions have been performed and what their
# statuses are.
# action:
# - build
# - buildwatch
# - deploy
# - deploywatch
#
# status:
# - completed
# - cancelled
# - failed
# - running
# - initiated
#
# payload:
# The last js object returned by the last operation relating to the action
###
class OCStatus
constructor: () ->
@statuses = {}
###*
# updates the status of an action, if the action has not been defined then
# it gets added to this object. OC status object is used to collect the
# statuses of a number of different actions
#
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - a string describing the status of the action (completed | cancelled | failed | running | instantiated )
###
updateStatus : (action, status, payload=undefined) ->
if @statuses[action] == undefined
@statuses[action] = {}
@statuses[action]['status'] = status
if payload != undefined
@statuses[action]['payload'] = payload
###*
# Finds the status record that alignes with the action and updates the payload
# associated with that action
#
# @param {string} action - a string describing the action (build | deploy)
# @param {object} payload - an object that describes the action.. typically this
# is json data returned by an oc endpoint
###
setPayload : (action, payload) ->
@statuses[action]['payload'] = payload
###*
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - status - a string describing the status of the
# action (completed | cancelled | failed | running |
# instantiated )
# @return {Promise} - a promise that will resolve to a reference to this
# status object
###
updateStatusAsync : (action, status, payload=undefined) ->
objref = this
return val = new Promise (resolve) ->
objref.updateStatus(action, status, payload)
resolve objref
| true | ###*
# @fileOverview Wrapper methods to openshift to be used in a hubot
# pipeline.
#
# @author PI:NAME:<NAME>END_PI
#
# @requires NPM:request-promise
# @requires NPM:lodash
# @requires NPM:oboe
#
###
request = require('request-promise')
_ = require('lodash')
oboe = require('oboe')
###*
# Class used to wrap up various openshift methods with the goal
# of making it easy for hubot calls to interact with openshift.
###
class exports.OCAPI
domain = null
protocol = 'https'
buildStatus = 'NOT STARTED'
deployStatus = 'NOT STARTED'
# ckan build taks around 10 minutes.. making timeout 20 should be adequate
requestTimeoutSeconds = 60 * 30
###*
# @param {string} domain - The domain to use in the url when communicating with
# openshift.
# @param {string} apikey - The api key to use when making api calls
###
constructor : (domain, apikey=null) ->
@domain = domain
@protocol = protocol
@apikey = apikey
@statuses = new OCStatus()
###*
# Joins the protocol 'https' with the domain to form the root
# of the url
#
# @returns {url} the domain and protocol joined together.
###
baseUrl : ->
return "#{protocol}://#{@domain}"
###*
# returns a basic request object that other methods can then add
# to, always required will the addition of the uri
#
# @returns {reqObj} a basic request object with some commonly used
# parameters
###
getCoreRequest : ->
reqObj = {
json : true,
method: 'GET',
headers: {
Accept: 'application/json, */*'
}
}
if this.apikey?
reqObj.headers.Authorization = "Bearer #{this.apikey}"
return reqObj
###*
# queries openshift to get a json struct that describes the end
# points supported by the openshift instance
#
# @returns {reqObj} a promise that will return the api end points
# available for the openshift api.
###
getAPIEndPoints : ->
urldomain = this.baseUrl()
apiEndPoints = '/oapi/v1/'
urlString = "#{urldomain}#{apiEndPoints}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log response.resources.length
json = response
.catch (err) ->
console.log '------- error called -------' + err.resources.length
json = err
###*
# starts a build in the project specified using the build config
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} openshift build config name that is to be built
#
# @returns {reqObj} a promise that will return the payload retured by the start build event
###
startBuild : (ocProject, ocBuildConfigName ) ->
urldomain = this.baseUrl()
initBuildPath = "/apis/build.openshift.io/v1/namespaces/#{ocProject}/buildconfigs/#{ocBuildConfigName}/instantiate"
urlString = "#{urldomain}#{initBuildPath}"
reqObj = this.getCoreRequest()
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.body = {
kind: "BuildRequest",
apiVersion: "build.openshift.io/v1",
metadata: {
name: ocBuildConfigName,
creationTimestamp: null
},
triggeredBy: [
{
message: "Triggered with coffee"
}
],
dockerStrategyOptions: {},
sourceStrategyOptions: {}
}
return request reqObj
.then (response) ->
#console.log JSON.stringify(response, undefined, 2)
return response
.catch (err) ->
console.log "------- error: #{err}-------"
console.log err.stack
###*
# Gets the data from a build instantiate event (type: build), and extracts the build config name
# to define the end point for the build that is to be watched.
#
# @param {string} ocProject- openshift project
# @param {string} buildData - the payload returned by the instantiate (start build) event
#
# @returns {Promise} a promise that will untimately yield the results of the watch
# event that concludes the build, the promise will return a list
# with the following elements
# 1. record type: (MODIFIED|ADDED|?)
# 2. phase: (completed|cancelled|failed)
# 3. build name: the unique name that is assigned to this
# build attempt
###
watchBuild : (ocProject, buildData)->
urldomain = this.baseUrl()
reqObj = this.getCoreRequest()
delete reqObj.json
watchBuildUrl = "#{urldomain}/apis/build.openshift.io/v1/watch/namespaces/#{ocProject}/builds/#{buildData.metadata.name}"
watchBuildUrl = watchBuildUrl + "?timeoutSeconds=#{requestTimeoutSeconds}"
reqObj.url = watchBuildUrl
oboePromise = new Promise (resolve) ->
recordtype = undefined
phase = undefined
buildname = undefined
oboeRequest = oboe(reqObj)
.node('*', (node, path) ->
console.log "path: #{path}, #{typeof path}, #{path.length}, #{Array.isArray(path)}"
# extracting the required data from the stream
if ( path.length == 1) and path[0] == 'type'
# type is the first value of the object so putting
# other values to undefined so they can be repopulated
# if this condition is satisfied it indicates a new record has
phase = undefined
buildname = undefined
cnt = cnt + 1
recordtype = node
else if (path.length == 3) and _.isEqual(path, ["object", "status", "phase"])
# extracting the phase value
phase = node
console.log "-------- phase: #{phase}"
else if (path.length == 3) and _.isEqual(path, ["object", "metadata", "name"])
buildname = node
console.log "-------- buildname: #{buildname}"
# Evaluating the extracted data.
#if recordtype == 'ADDED' and phase == 'New'
# First make sure we have read enough from the stream
if (buildname != undefined and phase != undefined) and \
recordtype == 'MODIFIED' and ( phase in ['Complete', 'Cancelled', 'Failed'])
console.log "returning data: #{recordtype} #{phase}"
this.abort()
resolve [recordtype, phase, buildname]
#this.done()
)
.fail( ( errorReport ) ->
console.log "status code: #{errorReport.statusCode}"
)
.done( () ->
console.log "done")
return oboePromise
###*
# hits the api and returns the json that is used to describe the
# provided build name.
#
# @param {string} ocProject - the openshift project name
# @param {object} buildData - the json returned by the instantiate build api call
#
# @return {Promise} - a request promise that will ultimately yield the
# concluding event to associated with the build
###
getBuildStatus : (ocProject, ocBuildName) ->
# calls build list on the specific build, returns the promise
# that will yield the payload
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/#{ocBuildName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "response is: #{response}"
#console.log typeof response, JSON.stringify(response)
return response
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Initates and monitors the build for the specified project / buildconfig
# and returns a status object
#
# @param {ocProject} openshift project
# @param {ocBuildConfigName} the build config that is to be run and monitored
# @returns {OCStatus} status- a status object with the properties associated
# with this build
###
buildSync : (ocProject, ocBuildConfigName) ->
try
console.log "ocProject: #{ocProject}, buildconfig: #{ocBuildConfigName}"
watchBuildStatus = undefined
buildPayload = await this.startBuild(ocProject, ocBuildConfigName)
this.statuses.updateStatus('build', 'initiated', buildPayload)
watchBuildStatus = await this.watchBuild(ocProject, buildPayload)
this.statuses.updateStatus('build', watchBuildStatus[1])
console.log "---watchBuild---: #{watchBuildStatus} #{typeof watchBuildStatus}"
#console.log JSON.stringify(watchBuildStatus)
buildStatus = await this.getBuildStatus(ocProject, watchBuildStatus[2])
console.log "buildstatus kind: #{buildStatus.kind}"
#console.log "buildstatus: #{JSON.stringify(buildStatus)}"
# put the update into a promise to ensure it gets completed before the status
# object is returned.
return await this.statuses.updateStatusAsync('build', buildStatus.status.phase, buildStatus)
# create a promise in the status object and return that, with an await
#resolve this.statuses
catch err
console.log "error encountered in buildSync: #{err}"
console.log err.stack
return await this.statuses.updateStatusAsync('build', 'error', err)
###*
# Gets the latest image that was built using the specified
# buildconfig name
#
# - iterates over all builds in the project
# - finds builds that used the build config name provided as arg
# - checks the build dates, to get the last image built
#
# @param {string} ocProject - The name of the oc project
# @param {string} ocBuildConfigName - The name of the build config
# @return {promise} - yields the name of the build image
###
getLatestBuildImage : (ocProject, ocBuildConfigName) ->
# ocProject: name of the openshift project
# ocBuildConfigName: build config name
#
# returns: a promise with the most recently build image
# name / identifier.. looks something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test-dev@sha256:edcf5c6221be569a366cc09034bfdc2986f37d18c6b269790b0185b238f19c81
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/builds/"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
return request reqObj
.then (response) ->
console.log "build request ----- "
latestBuild = undefined
latestBuildDate = undefined
imageName = undefined
for item in response.items
#console.log "item: #{JSON.stringify(item)}"
console.log "buildconfig: #{item.metadata.labels.buildconfig}"
console.log "phase: #{item.status.phase}"
console.log "phase: #{item.metadata.labels.buildconfig}"
if item.metadata.labels.buildconfig == ocBuildConfigName and \
item.status.phase == 'Complete'
console.log "passed conditional"
curentBuildDate = new Date(item.status.completionTimestamp)
if latestBuildDate == undefined
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
else if curentBuildDate > latestBuildDate
console.log "found the the build: #{latestBuildDate} #{curentBuildDate}"
latestBuildDate = curentBuildDate
latestBuild = item.status.outputDockerImageReference
imageDigest = item.status.output.to.imageDigest
# latest build is something like this:
# docker-registry.default.svc:5000/databcdc/bcdc-test:latest
# need to combine with the property status.output.to.imageDigest
# to create docker-registry.default.svc:5000/databcdc/pipeline-bot@sha256:56f2a697134f04e1d519e7d063c0c0da7832e5fe0f3f007d10edf5f1b05b8724
re = new RegExp('\:latest$')
endPos = latestBuild.search(re)
console.log "latestBuild: #{latestBuild}"
if endPos != -1
console.log "imageName: #{imageName}"
imageName = "#{latestBuild.slice(0, endPos)}@#{imageDigest}"
else
console.log "#{latestBuild} - #{imageDigest}"
console.log "imagename: #{imageName}"
return imageName
.catch (err) ->
console.log '------- error called -------' + err
json = err
###*
# Gets the name of the last image that was built and the image
# that is currently deployed, compares the names and returns
# true or false indicating whether the latest image has been
# deployed
#
# @param {string} ocProject - the name of the oc project
# @param {string} buildConifg - the name of the build config
# @param {string} deployConfig - the name of the deploy config
# @return {boolean} - indicator of whether the latest image built has been
# or is currently being deployed.
###
isLatestImageDeployed : (ocBuildProject, buildConifg, ocDeployProject, deployConfig) ->
# ocProject: the openshift project
# ocBuildConfigName: a build config name
# deployConfig: a deployment config
#
# using the build config, identifies the last image that was build
# using the depoly config identifies the image that is currently
# deployed. If they are the same returns true otherwise returns
# false.
# Will return true even if the replication is only part way complete
mostRecentlyBuildImage = await this.getLatestBuildImage(ocBuildProject, buildConifg)
currentDeployedImage = await this.getDeployedImage(ocDeployProject, deployConfig)
console.log "#{currentDeployedImage} currentDeployedImage"
console.log "#{mostRecentlyBuildImage} mostRecentlyBuildImage"
return currentDeployedImage == mostRecentlyBuildImage
###*
# Hits the deployment config status end point and returns json
# @param {string} ocProject - openshift project name
# @param {string} deployConfig - the name of the deployment config
# @return {object} - the json object that is returned by the end point
###
getDeploymentStatus: (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
return request reqObj
.then (response) ->
console.log('getDeploymentStatus called')
return response
.catch (err) ->
console.log "caught error #{err}"
console.log "#{err.stack}"
###*
# Gets the currently configured image name... looks like:
# docker-registry.default.svc:5000/databcdc/datapusher@sha256:2eff082c999cbe0eff08816d2b8d4d7b97e6e7d5825ca85ef3714990752b1c7c
#
# does this by getting the latestVersion property from the deploy config
# status end point, then appends the latestVersion to the end of the
# deployconfig name to get the replicationcontroller name
# queries the replication controller end point to get the image that
# that the latest replication controller deployed.
#
# @param {string} ocProject - the name of the oc project
# @param {string} deployConfig - the name of the deploy config
# @return {promise} - will yield the name of the image
#
###
getDeployedImage : (ocProject, deployConfig) ->
imageName = undefined
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/oapi/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/status"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
console.log "getting: #{urlString}"
# this first request gets the "latestVersion" which oc uses to name the
# replication controller, then queries the status of the replication controller.
return request reqObj
.then (response) ->
replicationController = "#{deployConfig}-#{response.status.latestVersion}"
console.log "replication controller: #{replicationController}"
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationController}"
urlString = "#{urldomain}#{apiEndPoints}"
repContReq = reqObj
repContReq.uri = urlString
repControllerRequest = request repContReq
.then (response) ->
containers = response.spec.template.spec.containers
#console.log "containers: #{containers}"
for container in containers
console.log "container name: #{container.name}"
#console.log "container: #{JSON.stringify(container)}"
if container.name == deployConfig
imageName = container.image
console.log "image name: #{imageName}"
return imageName
.catch (err) ->
console.log "Error: Unable to get the replication controller: #{replicationController}"
console.log "response was: #{JSON.stringify(response)}"
.catch (err) ->
console.log '------- error called -------'
console.log "error: #{err}"
console.log "request: #{JSON.stringify(reqObj)}"
###*
# Calls the deployment instantiation end point and returns the json
# data
#
# @param {string} ocProject - the openshift project
# @param {deployConfig} deployConfig - The deploy config
# @return {promise} - promise that will yield the payload from the deployment
# instantiation event
###
deploy : (ocProject, deployConfig) ->
# ocProject: the openshift project
# deployConfig: the deployment config to be deployed
#
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/apis/apps.openshift.io/v1/namespaces/#{ocProject}/deploymentconfigs/#{deployConfig}/instantiate"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
reqObj.method = 'POST'
reqObj.headers.Content-type = "application/json"
reqObj.body = {
"kind":"DeploymentRequest",
"apiVersion":"apps.openshift.io/v1",
"name":"#{deployConfig}",
"latest":true,
"force":true}
return request reqObj
###*
# returns a promise that will wait for the specified number or seconds to
# complete
#
# @param {number} waittime - the amount of time in milliseconds to wait
# @return {promise} - that waits for a set amount of time
###
delay : (waittime) ->
ms = new Promise (resolve) ->
setTimeout(resolve, waittime)
###*
# Queries the status of the replication controller and retrieves the desired
# number of controllers from `spec.replicas` and compares against
# status.replicas. Cancels out when they are either equal, or the maxuimum
# number of recursions is exceeded.
#
# When desired replicas vs existing replicas is not equal will wait for
# 5 seconds then check again.
#
# @param {string} ocProject - The openshift project
# @param {replicationControllerName} - name of the replication controller
# @param {number} cnt - Leave this parameter, it is used internally to manage
# recursion depth. Used to cancel out beyond a set
# number of iterations
#
###
deployWatch : (ocProject, replicationControllerName, cnt=0) ->
maxIterations = 5
timeBetweenIterations = 5000
reqObj = this.getCoreRequest()
urldomain = this.baseUrl()
apiEndPoints = "/api/v1/namespaces/#{ocProject}/replicationcontrollers/#{replicationControllerName}"
urlString = "#{urldomain}#{apiEndPoints}"
reqObj.uri = urlString
repQuery = await request reqObj
this.statuses.updateStatus('deploy', 'deploying', repQuery)
#console.log "repQuery: #{JSON.stringify(repQuery)}"
console.log "requested replicas: #{repQuery.spec.replicas} ?= existing replicas: #{repQuery.status.replicas}"
console.log "kubectl.kubernetes.io/desired-replicas: #{repQuery.metadata.annotations['kubectl.kubernetes.io/desired-replicas']}"
# Code below is monitoring the target replicas vs the actual
# replicas... waits until they are equal
#
# Possible source of target pods: repQuery.metadata.annotations["kubectl.kubernetes.io/desired-replicas"]
# and.. repQuery.spec.replicas
if repQuery.spec.replicas == repQuery.status.replicas
console.log "requested replicas are up"
this.statuses.updateStatus('deploy', 'success', repQuery)
return repQuery
else if cnt > maxIterations
console.log("max attempts exceeded #{cnt}")
this.statuses.updateStatus('deploy', 'failed', repQuery)
return repQuery
else
cnt = cnt + 1
console.log("attempting await")
await this.delay(timeBetweenIterations)
console.log("await complete")
this.deployWatch(ocProject, replicationControllerName, cnt)
###*
#
# Checks to see if the latest build is the version that is
# currently deployed, and if it is stops, otherwise proceeds
# with a deployment.
# @param {string} ocProject - The name of the openshift project
# @param {string} buildConfig - The build config name
# @return {object} - returns a OCStatus object
#
###
deployLatest : (ocBuildProject, buildConfig, ocDeployProject, deployConfig) ->
#
replicationController = undefined
this.statuses.updateStatus('deploy', 'checking')
try
console.log "getting latest..."
isLatest = await this.isLatestImageDeployed(ocBuildProject, buildConfig, ocDeployProject, deployConfig)
if !isLatest
console.log "instantiating a deploy..."
this.statuses.updateStatus('deploy', 'initiated')
deployObj = await this.deploy(ocDeployProject, deployConfig)
replicationController = "#{deployObj.metadata.name}-#{deployObj.status.latestVersion}"
this.statuses.updateStatus('deploy', 'initiated', deployObj)
if replicationController == undefined
# Getting the name of the replication controller that is doing
# the rollout for the depolyment
console.log "getting the replication controller name..."
this.statuses.updateStatus('deploy', 'initiated')
# should get the actual object instead of just the status, then could
# update the status object
latestDeployment = await this.getDeploymentStatus(ocDeployProject, deployConfig)
replicationController = "#{deployConfig}-#{latestDeployment.status.latestVersion}"
# is latest only indicates that the deployment has already been triggered
# code below will monitor for its completion.
this.statuses.updateStatus('deploy', 'deploying')
deployStatus = await this.deployWatch(ocDeployProject, replicationController)
return this.statuses
console.log "----------Deploy complete ----------"
catch err
console.log "error encountered in attempt to deploy..", err
return await this.statuses.updateStatusAsync('deploy', 'error', err)
###*
# Class that keeps track of what actions have been performed and what their
# statuses are.
# action:
# - build
# - buildwatch
# - deploy
# - deploywatch
#
# status:
# - completed
# - cancelled
# - failed
# - running
# - initiated
#
# payload:
# The last js object returned by the last operation relating to the action
###
class OCStatus
constructor: () ->
@statuses = {}
###*
# updates the status of an action, if the action has not been defined then
# it gets added to this object. OC status object is used to collect the
# statuses of a number of different actions
#
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - a string describing the status of the action (completed | cancelled | failed | running | instantiated )
###
updateStatus : (action, status, payload=undefined) ->
if @statuses[action] == undefined
@statuses[action] = {}
@statuses[action]['status'] = status
if payload != undefined
@statuses[action]['payload'] = payload
###*
# Finds the status record that alignes with the action and updates the payload
# associated with that action
#
# @param {string} action - a string describing the action (build | deploy)
# @param {object} payload - an object that describes the action.. typically this
# is json data returned by an oc endpoint
###
setPayload : (action, payload) ->
@statuses[action]['payload'] = payload
###*
# @param {string} action - a string describing the action (build | deploy)
# @param {string} status - status - a string describing the status of the
# action (completed | cancelled | failed | running |
# instantiated )
# @return {Promise} - a promise that will resolve to a reference to this
# status object
###
updateStatusAsync : (action, status, payload=undefined) ->
objref = this
return val = new Promise (resolve) ->
objref.updateStatus(action, status, payload)
resolve objref
|
[
{
"context": " pet_status: \"petStatus\"\n pet_name: \"petName\"\n\n expect(@result.messagePropertyMap).",
"end": 3275,
"score": 0.7801046371459961,
"start": 3272,
"tag": "NAME",
"value": "pet"
},
{
"context": "h ->\n @parameters = [\n { name: \"Bandit\" }\n { name: \"mastiff\" }\n { ",
"end": 3777,
"score": 0.9794925451278687,
"start": 3771,
"tag": "NAME",
"value": "Bandit"
},
{
"context": " { name: \"Bandit\" }\n { name: \"mastiff\" }\n { name: \"bandit_captain\" }\n ",
"end": 3809,
"score": 0.9871435165405273,
"start": 3802,
"tag": "NAME",
"value": "mastiff"
},
{
"context": "h ->\n @parameters = [\n { name: \"Bandit\" }\n { name: \"mastiff\" }\n { ",
"end": 4232,
"score": 0.9682815074920654,
"start": 4226,
"tag": "NAME",
"value": "Bandit"
},
{
"context": " { name: \"Bandit\" }\n { name: \"mastiff\" }\n { name: \"bandit_captain\" }\n ",
"end": 4264,
"score": 0.9145700335502625,
"start": 4257,
"tag": "NAME",
"value": "mastiff"
},
{
"context": "h ->\n @parameters = [\n { name: \"Bandit\" }\n { name: \"mastiff\" }\n { ",
"end": 4673,
"score": 0.9652990698814392,
"start": 4667,
"tag": "NAME",
"value": "Bandit"
},
{
"context": " { name: \"Bandit\" }\n { name: \"mastiff\" }\n { name: \"bandit_captain\" }\n ",
"end": 4705,
"score": 0.9728521108627319,
"start": 4698,
"tag": "NAME",
"value": "mastiff"
},
{
"context": "h ->\n @parameters = [\n { name: \"Bandit\" }\n { name: \"mastiff\" }\n { ",
"end": 5536,
"score": 0.9564736485481262,
"start": 5530,
"tag": "NAME",
"value": "Bandit"
},
{
"context": " { name: \"Bandit\" }\n { name: \"mastiff\" }\n { name: \"bandit_captain\" }\n ",
"end": 5568,
"score": 0.9394408464431763,
"start": 5561,
"tag": "NAME",
"value": "mastiff"
},
{
"context": " { name: \"bandit_captain\" }\n { name: \"stats\", schema:\n allOf: [\n ",
"end": 5637,
"score": 0.8197252750396729,
"start": 5632,
"tag": "NAME",
"value": "stats"
}
] | test/parser/swagger-2-to-proxy-config-spec.coffee | octoblu/swagger-device-generator | 0 | _ = require 'lodash'
Swagger2ToProxyConfig = require '../../parser/swagger-2-to-proxy-config'
describe 'Swagger2ToProxyConfig', ->
beforeEach ->
@petsSwagger = require '../samples/swagger/pets-resolved.json'
@sut = new Swagger2ToProxyConfig @petsSwagger
it 'should exist', ->
expect(@sut).to.exist
describe '.generateProxyConfig', ->
describe 'when called', ->
beforeEach ->
@result = @sut.generateProxyConfig()
it 'should return an object with requestOptions', ->
expect(@result.requestOptions).to.exist
it 'should return an object with requestOptions for each action', ->
expect(_.keys @result.requestOptions).to.deep.equal [
"getAllPets"
"createPet"
"deletePet"
"getPetById"
]
it 'should add a uri property to getAllPets', ->
expect(@result.requestOptions.getAllPets.uri).to.exist
describe '.generateProxyActionConfig', ->
it 'should exist', ->
expect(@sut.generateProxyActionConfig).to.exist
describe 'when called with an action name', ->
beforeEach ->
@result = @sut.generateProxyActionConfig 'getPetById'
it 'should return the proxy config uri', ->
expect(@result.uri).to.equal '\"http://petstore.swagger.wordnik.com/api/pets/#{options.id}\"'
it 'should return the method', ->
expect(@result.method).to.equal 'GET'
it 'should not return body params', ->
expect(@result.body).to.deep.equal []
describe 'when called with an action name with query and body params', ->
beforeEach ->
@petsSwagger.paths['/pets'].get.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
{ name: "pet_type", in: "body"}
{ name: "pet_age", in: "body"}
]
@result = @sut.generateProxyActionConfig 'getAllPets'
it 'should return a proxy config with query parameters', ->
expect(@result.qs).to.exist
it 'should return an array containing the query parameter names', ->
expect(@result.qs).to.deep.equal ['pet_status', 'pet_name']
it 'should return a proxy config with body parameters', ->
expect(@result.body).to.deep.equal ['pet_type', 'pet_age']
it 'should return a proxy config with query parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "petName"
pet_type: "petType"
pet_age: "petAge"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe 'when called with an action name with post data', ->
beforeEach ->
@petsSwagger.paths['/pets'].post.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
]
@result = @sut.generateProxyActionConfig 'createPet'
console.log JSON.stringify @result, null, 2
it 'should return a proxy config with body parameters', ->
expect(@result.messagePropertyMap).to.exist
it 'should return a proxy config with body parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "petName"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe '.getParameterNameMap', ->
describe 'when called with parameters with the names we want', ->
beforeEach ->
@result = @sut.getParameterNameMap [ name: "id" ]
it 'should return an empty map', ->
expect(@result).to.deep.equal {}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "Bandit" }
{ name: "mastiff" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "Bandit" }
{ name: "mastiff" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with a schema", ->
beforeEach ->
@parameters = [
{ name: "Bandit" }
{ name: "mastiff" }
{ name: "bandit_captain" }
{ name: "stats", schema:
allOf: [
{
properties:
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
}
describe "when called with parameters with a schema with nested properties", ->
beforeEach ->
@parameters = [
{ name: "Bandit" }
{ name: "mastiff" }
{ name: "bandit_captain" }
{ name: "stats", schema:
allOf: [
{
properties:
nation:
type: "object"
properties:
population:
type: "integer"
average_education_level:
type: "integer"
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
"averageEducationLevel": "average_education_level"
}
describe 'getParameterTypeMap', ->
describe 'when called with some youtube parameters', ->
beforeEach ->
@parameters = [
{ in: "query", name: "part" }
{ in: "query", name: "onBehalfOfContentOwner" }
{ in: "query", name: "onBehalfOfContentOwnerChannel" }
{
in: "body"
name: "body"
schema:
properties:
contentDetails:
properties:
itemCount:
type: "integer"
type: "object"
etag:
type: "string"
id:
type: "string"
kind:
type: "string"
status:
properties:
privacyStatus:
type: "string"
type: "object"
type: "object"
}
]
@result = @sut.getParameterTypeMap @parameters
it 'should return an object with qs and body keys', ->
expect(@result.qs).to.exist
expect(@result.body).to.exist
it 'should have all the query param names in qs', ->
expect(@result.qs).to.deep.equal [
"part"
"onBehalfOfContentOwner"
"onBehalfOfContentOwnerChannel"
]
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"contentDetails"
"etag"
"id"
"kind"
"status"
]
describe 'when called with parameters that have allOf in their schemas', ->
beforeEach ->
@parameters = [{
in: "body"
name: "pet"
schema:
allOf: [{
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
}
},
"required": [
"id",
"name"
]
}]
}]
@result = @sut.getParameterTypeMap @parameters
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"id"
"name"
]
| 134489 | _ = require 'lodash'
Swagger2ToProxyConfig = require '../../parser/swagger-2-to-proxy-config'
describe 'Swagger2ToProxyConfig', ->
beforeEach ->
@petsSwagger = require '../samples/swagger/pets-resolved.json'
@sut = new Swagger2ToProxyConfig @petsSwagger
it 'should exist', ->
expect(@sut).to.exist
describe '.generateProxyConfig', ->
describe 'when called', ->
beforeEach ->
@result = @sut.generateProxyConfig()
it 'should return an object with requestOptions', ->
expect(@result.requestOptions).to.exist
it 'should return an object with requestOptions for each action', ->
expect(_.keys @result.requestOptions).to.deep.equal [
"getAllPets"
"createPet"
"deletePet"
"getPetById"
]
it 'should add a uri property to getAllPets', ->
expect(@result.requestOptions.getAllPets.uri).to.exist
describe '.generateProxyActionConfig', ->
it 'should exist', ->
expect(@sut.generateProxyActionConfig).to.exist
describe 'when called with an action name', ->
beforeEach ->
@result = @sut.generateProxyActionConfig 'getPetById'
it 'should return the proxy config uri', ->
expect(@result.uri).to.equal '\"http://petstore.swagger.wordnik.com/api/pets/#{options.id}\"'
it 'should return the method', ->
expect(@result.method).to.equal 'GET'
it 'should not return body params', ->
expect(@result.body).to.deep.equal []
describe 'when called with an action name with query and body params', ->
beforeEach ->
@petsSwagger.paths['/pets'].get.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
{ name: "pet_type", in: "body"}
{ name: "pet_age", in: "body"}
]
@result = @sut.generateProxyActionConfig 'getAllPets'
it 'should return a proxy config with query parameters', ->
expect(@result.qs).to.exist
it 'should return an array containing the query parameter names', ->
expect(@result.qs).to.deep.equal ['pet_status', 'pet_name']
it 'should return a proxy config with body parameters', ->
expect(@result.body).to.deep.equal ['pet_type', 'pet_age']
it 'should return a proxy config with query parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "petName"
pet_type: "petType"
pet_age: "petAge"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe 'when called with an action name with post data', ->
beforeEach ->
@petsSwagger.paths['/pets'].post.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
]
@result = @sut.generateProxyActionConfig 'createPet'
console.log JSON.stringify @result, null, 2
it 'should return a proxy config with body parameters', ->
expect(@result.messagePropertyMap).to.exist
it 'should return a proxy config with body parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "<NAME>Name"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe '.getParameterNameMap', ->
describe 'when called with parameters with the names we want', ->
beforeEach ->
@result = @sut.getParameterNameMap [ name: "id" ]
it 'should return an empty map', ->
expect(@result).to.deep.equal {}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "<NAME>" }
{ name: "<NAME>" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "<NAME>" }
{ name: "<NAME>" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with a schema", ->
beforeEach ->
@parameters = [
{ name: "<NAME>" }
{ name: "<NAME>" }
{ name: "bandit_captain" }
{ name: "stats", schema:
allOf: [
{
properties:
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
}
describe "when called with parameters with a schema with nested properties", ->
beforeEach ->
@parameters = [
{ name: "<NAME>" }
{ name: "<NAME>" }
{ name: "bandit_captain" }
{ name: "<NAME>", schema:
allOf: [
{
properties:
nation:
type: "object"
properties:
population:
type: "integer"
average_education_level:
type: "integer"
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
"averageEducationLevel": "average_education_level"
}
describe 'getParameterTypeMap', ->
describe 'when called with some youtube parameters', ->
beforeEach ->
@parameters = [
{ in: "query", name: "part" }
{ in: "query", name: "onBehalfOfContentOwner" }
{ in: "query", name: "onBehalfOfContentOwnerChannel" }
{
in: "body"
name: "body"
schema:
properties:
contentDetails:
properties:
itemCount:
type: "integer"
type: "object"
etag:
type: "string"
id:
type: "string"
kind:
type: "string"
status:
properties:
privacyStatus:
type: "string"
type: "object"
type: "object"
}
]
@result = @sut.getParameterTypeMap @parameters
it 'should return an object with qs and body keys', ->
expect(@result.qs).to.exist
expect(@result.body).to.exist
it 'should have all the query param names in qs', ->
expect(@result.qs).to.deep.equal [
"part"
"onBehalfOfContentOwner"
"onBehalfOfContentOwnerChannel"
]
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"contentDetails"
"etag"
"id"
"kind"
"status"
]
describe 'when called with parameters that have allOf in their schemas', ->
beforeEach ->
@parameters = [{
in: "body"
name: "pet"
schema:
allOf: [{
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
}
},
"required": [
"id",
"name"
]
}]
}]
@result = @sut.getParameterTypeMap @parameters
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"id"
"name"
]
| true | _ = require 'lodash'
Swagger2ToProxyConfig = require '../../parser/swagger-2-to-proxy-config'
describe 'Swagger2ToProxyConfig', ->
beforeEach ->
@petsSwagger = require '../samples/swagger/pets-resolved.json'
@sut = new Swagger2ToProxyConfig @petsSwagger
it 'should exist', ->
expect(@sut).to.exist
describe '.generateProxyConfig', ->
describe 'when called', ->
beforeEach ->
@result = @sut.generateProxyConfig()
it 'should return an object with requestOptions', ->
expect(@result.requestOptions).to.exist
it 'should return an object with requestOptions for each action', ->
expect(_.keys @result.requestOptions).to.deep.equal [
"getAllPets"
"createPet"
"deletePet"
"getPetById"
]
it 'should add a uri property to getAllPets', ->
expect(@result.requestOptions.getAllPets.uri).to.exist
describe '.generateProxyActionConfig', ->
it 'should exist', ->
expect(@sut.generateProxyActionConfig).to.exist
describe 'when called with an action name', ->
beforeEach ->
@result = @sut.generateProxyActionConfig 'getPetById'
it 'should return the proxy config uri', ->
expect(@result.uri).to.equal '\"http://petstore.swagger.wordnik.com/api/pets/#{options.id}\"'
it 'should return the method', ->
expect(@result.method).to.equal 'GET'
it 'should not return body params', ->
expect(@result.body).to.deep.equal []
describe 'when called with an action name with query and body params', ->
beforeEach ->
@petsSwagger.paths['/pets'].get.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
{ name: "pet_type", in: "body"}
{ name: "pet_age", in: "body"}
]
@result = @sut.generateProxyActionConfig 'getAllPets'
it 'should return a proxy config with query parameters', ->
expect(@result.qs).to.exist
it 'should return an array containing the query parameter names', ->
expect(@result.qs).to.deep.equal ['pet_status', 'pet_name']
it 'should return a proxy config with body parameters', ->
expect(@result.body).to.deep.equal ['pet_type', 'pet_age']
it 'should return a proxy config with query parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "petName"
pet_type: "petType"
pet_age: "petAge"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe 'when called with an action name with post data', ->
beforeEach ->
@petsSwagger.paths['/pets'].post.parameters = [
{ name: "pet_status", in: "query"}
{ name: "pet_name", in: "query"}
]
@result = @sut.generateProxyActionConfig 'createPet'
console.log JSON.stringify @result, null, 2
it 'should return a proxy config with body parameters', ->
expect(@result.messagePropertyMap).to.exist
it 'should return a proxy config with body parameters that map to message properties', ->
messagePropertyMap =
pet_status: "petStatus"
pet_name: "PI:NAME:<NAME>END_PIName"
expect(@result.messagePropertyMap).to.deep.equal messagePropertyMap
describe '.getParameterNameMap', ->
describe 'when called with parameters with the names we want', ->
beforeEach ->
@result = @sut.getParameterNameMap [ name: "id" ]
it 'should return an empty map', ->
expect(@result).to.deep.equal {}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with the names we don't want", ->
beforeEach ->
@parameters = [
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "bandit_captain" }
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
}
describe "when called with parameters with a schema", ->
beforeEach ->
@parameters = [
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "bandit_captain" }
{ name: "stats", schema:
allOf: [
{
properties:
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
}
describe "when called with parameters with a schema with nested properties", ->
beforeEach ->
@parameters = [
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "PI:NAME:<NAME>END_PI" }
{ name: "bandit_captain" }
{ name: "PI:NAME:<NAME>END_PI", schema:
allOf: [
{
properties:
nation:
type: "object"
properties:
population:
type: "integer"
average_education_level:
type: "integer"
monster_id: true
bravery_level: true
strength: true
dexterity: true
}
]
}
]
@result = @sut.getParameterNameMap @parameters
it 'should return an empty map', ->
expect(@result).to.deep.equal {
"bandit": "Bandit"
"banditCaptain": "bandit_captain"
"monsterId": "monster_id"
"braveryLevel": "bravery_level"
"averageEducationLevel": "average_education_level"
}
describe 'getParameterTypeMap', ->
describe 'when called with some youtube parameters', ->
beforeEach ->
@parameters = [
{ in: "query", name: "part" }
{ in: "query", name: "onBehalfOfContentOwner" }
{ in: "query", name: "onBehalfOfContentOwnerChannel" }
{
in: "body"
name: "body"
schema:
properties:
contentDetails:
properties:
itemCount:
type: "integer"
type: "object"
etag:
type: "string"
id:
type: "string"
kind:
type: "string"
status:
properties:
privacyStatus:
type: "string"
type: "object"
type: "object"
}
]
@result = @sut.getParameterTypeMap @parameters
it 'should return an object with qs and body keys', ->
expect(@result.qs).to.exist
expect(@result.body).to.exist
it 'should have all the query param names in qs', ->
expect(@result.qs).to.deep.equal [
"part"
"onBehalfOfContentOwner"
"onBehalfOfContentOwnerChannel"
]
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"contentDetails"
"etag"
"id"
"kind"
"status"
]
describe 'when called with parameters that have allOf in their schemas', ->
beforeEach ->
@parameters = [{
in: "body"
name: "pet"
schema:
allOf: [{
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
}
},
"required": [
"id",
"name"
]
}]
}]
@result = @sut.getParameterTypeMap @parameters
it 'should contain all the body params in body', ->
expect(@result.body).to.deep.equal [
"id"
"name"
]
|
[
{
"context": "alStorageService) ->\n\n user = {}\n COOKIE_KEY = \"KIISSTOKEN\"\n\n handleLogin = (theUser) ->\n localStorageSe",
"end": 150,
"score": 0.9992319345474243,
"start": 140,
"tag": "KEY",
"value": "KIISSTOKEN"
}
] | js/services/user_service.coffee | c0ze/kiiss | 0 | ---
---
app.factory 'UserService', ['$rootScope', 'localStorageService', ($rootScope, localStorageService) ->
user = {}
COOKIE_KEY = "KIISSTOKEN"
handleLogin = (theUser) ->
localStorageService.set(COOKIE_KEY, theUser.getAccessToken())
user = theUser
$rootScope.$emit 'login', theUser
@logout = () ->
localStorageService.set(COOKIE_KEY, "")
KiiUser.logOut()
user = {}
@login = (user) ->
userKey = localStorageService.get(COOKIE_KEY)
if userKey
KiiUser.authenticateWithToken(userKey,
success: (theUser) ->
handleLogin(theUser)
failure: (user, errorString) ->
$rootScope.$emit 'failure', errorString
)
else
if user? and user.useremail? and user.password?
KiiUser.authenticate( user.useremail, user.password,
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
if (errorString.indexOf('invalid_grant') > -1)
$rootScope.$emit 'failure', "User or password incorrect"
else
$rootScope.$emit 'failure', errorString
)
else
$rootScope.$emit 'failure', "Not Logged in."
@register = (user) ->
username = user.useremail.replace /@/, ""
kiiUser = KiiUser.userWithEmailAddressAndUsername(user.useremail, username, user.password);
kiiUser.register(
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
$rootScope.$emit 'failure', "Error while registering: " + errorString
)
@
]
| 87757 | ---
---
app.factory 'UserService', ['$rootScope', 'localStorageService', ($rootScope, localStorageService) ->
user = {}
COOKIE_KEY = "<KEY>"
handleLogin = (theUser) ->
localStorageService.set(COOKIE_KEY, theUser.getAccessToken())
user = theUser
$rootScope.$emit 'login', theUser
@logout = () ->
localStorageService.set(COOKIE_KEY, "")
KiiUser.logOut()
user = {}
@login = (user) ->
userKey = localStorageService.get(COOKIE_KEY)
if userKey
KiiUser.authenticateWithToken(userKey,
success: (theUser) ->
handleLogin(theUser)
failure: (user, errorString) ->
$rootScope.$emit 'failure', errorString
)
else
if user? and user.useremail? and user.password?
KiiUser.authenticate( user.useremail, user.password,
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
if (errorString.indexOf('invalid_grant') > -1)
$rootScope.$emit 'failure', "User or password incorrect"
else
$rootScope.$emit 'failure', errorString
)
else
$rootScope.$emit 'failure', "Not Logged in."
@register = (user) ->
username = user.useremail.replace /@/, ""
kiiUser = KiiUser.userWithEmailAddressAndUsername(user.useremail, username, user.password);
kiiUser.register(
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
$rootScope.$emit 'failure', "Error while registering: " + errorString
)
@
]
| true | ---
---
app.factory 'UserService', ['$rootScope', 'localStorageService', ($rootScope, localStorageService) ->
user = {}
COOKIE_KEY = "PI:KEY:<KEY>END_PI"
handleLogin = (theUser) ->
localStorageService.set(COOKIE_KEY, theUser.getAccessToken())
user = theUser
$rootScope.$emit 'login', theUser
@logout = () ->
localStorageService.set(COOKIE_KEY, "")
KiiUser.logOut()
user = {}
@login = (user) ->
userKey = localStorageService.get(COOKIE_KEY)
if userKey
KiiUser.authenticateWithToken(userKey,
success: (theUser) ->
handleLogin(theUser)
failure: (user, errorString) ->
$rootScope.$emit 'failure', errorString
)
else
if user? and user.useremail? and user.password?
KiiUser.authenticate( user.useremail, user.password,
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
if (errorString.indexOf('invalid_grant') > -1)
$rootScope.$emit 'failure', "User or password incorrect"
else
$rootScope.$emit 'failure', errorString
)
else
$rootScope.$emit 'failure', "Not Logged in."
@register = (user) ->
username = user.useremail.replace /@/, ""
kiiUser = KiiUser.userWithEmailAddressAndUsername(user.useremail, username, user.password);
kiiUser.register(
success: (theUser) ->
handleLogin(theUser)
failure: (theUser, errorString) ->
$rootScope.$emit 'failure', "Error while registering: " + errorString
)
@
]
|
[
{
"context": "ple.com',\n GIT_BRANCH: \"master\",\n GIT_COMMIT: \"3f93f2e4ddcf5a216d314d507e8579e99b21c8fb\",\n BUILD_ID: \"1\",\n BUILD_NUMBER: \"2\"\n}\n\nmodule.",
"end": 124,
"score": 0.6682659387588501,
"start": 85,
"tag": "PASSWORD",
"value": "f93f2e4ddcf5a216d314d507e8579e99b21c8fb"
}
] | test/fixtures/jenkins.coffee | holyshared/ci-detector | 3 | env = {
JENKINS_URL: 'http://example.com',
GIT_BRANCH: "master",
GIT_COMMIT: "3f93f2e4ddcf5a216d314d507e8579e99b21c8fb",
BUILD_ID: "1",
BUILD_NUMBER: "2"
}
module.exports = env
| 197267 | env = {
JENKINS_URL: 'http://example.com',
GIT_BRANCH: "master",
GIT_COMMIT: "3<PASSWORD>",
BUILD_ID: "1",
BUILD_NUMBER: "2"
}
module.exports = env
| true | env = {
JENKINS_URL: 'http://example.com',
GIT_BRANCH: "master",
GIT_COMMIT: "3PI:PASSWORD:<PASSWORD>END_PI",
BUILD_ID: "1",
BUILD_NUMBER: "2"
}
module.exports = env
|
[
{
"context": "emory \" + @display_name()\n else if key == \"fs.disk.usage\"\n \"Disk Usage (%) on \" + br + @get(\"tags\"",
"end": 1716,
"score": 0.7349315881729126,
"start": 1706,
"tag": "KEY",
"value": "disk.usage"
}
] | app/assets/javascripts/models/metric.js.coffee | chetan/bixby-manager | 2 |
namespace 'Bixby.model', (exports, top) ->
class exports.Metric extends Stark.Model
@key: "metric"
params: [ { name: "metric", set_id: true }, "host" ]
urlRoot: ->
host_id = @host_id || @get("host_id")
"/rest/hosts/#{host_id}/metrics"
url: ->
s = super() + "?"
if @get("start")
s += "&start=" + @get("start")
if @get("end")
s += "&end=" + @get("end")
if @get("downsample")
s += "&downsample=" + @get("downsample")
return s
display_tags: ->
label = @get("label")
return if label? && label.match(/\$/) # skip labels with vars
tags = _.omit(@get("tags"), ["tenant_id", "org_id", "host_id", "host", "check_id"])
_.map(tags, (v, k) -> "#{k}=#{v}").join(", ")
# Get the display name
# e.g., "Disk Usage (%)"
display_name: ->
s = null
if @get("label")
tags = @get("tags")
l = @get("label")
matches = _.getMatches(l, /(^|[\b\s])\$([\w]+)\b/g, 2)
_.each matches, (m) ->
tag = tags[m]
if tag
l = l.replace("$#{m}", tag)
s = l
else
s = @get("name") || @get("desc")
if @get("unit")
s = s + " (" + @get("unit") + ")"
return s
# Create a condensed metric label with relevant tags included
custom_display_name: (add_br) ->
br = if add_br
"<br>"
else
""
key = @get("key")
label = if key.match(/^cpu.usage/)
"CPU " + @display_name()
else if key.match(/^cpu.loadavg/)
"CPU Load " + br + @display_name()
else if key == "mem.usage"
"Memory " + @display_name()
else if key == "fs.disk.usage"
"Disk Usage (%) on " + br + @get("tags").mount
else if key.match(/^net.[rt]x.bytes/)
@display_name() + " on " + @get("tags").interface
else
@display_name()
return label
# Get the range attribute for this metric
get_range: ->
if !@get("key").match(/^cpu.loadavg/)
return @get("range")
# custom range only for loadavg
# if all values < 1, returns "0..1", else null
larger = false
_.each @get("data"), (p) ->
if p.y > 1
larger = true
return
if larger
return null
else
return "0..1"
# Get a formatted value as a string for display
# e.g., "10.53% @ 2014/08/22 14:19:42"
format_value: (val, date) ->
unit_label = ""
unit = @get("unit")
if unit?
if unit == "%"
unit_label = "%"
else
unit_label = " " + unit
date = moment(date) if !moment.isMoment(date)
date.format("L HH:mm:ss")
val = Bixby.monitoring.Graph.format_value(val)
return _.str.sprintf("%s%s @ %s", val, unit_label, date)
# get only the metric attributes (the actual data elements)
# { key, tags, vals: [ {time, val}, ... ]}
metrics: ->
metrics = []
_.each @attributes, (v, k) ->
if _.isObject(v)
metrics.push(v)
return metrics
# Return list of tuples with time in millisec
tuples: ->
vals = _.map _.sortBy(@get("data"), "x"), (v) ->
[ new Date(v.x * 1000), v.y ]
class exports.MetricList extends Stark.Collection
model: exports.Metric
@key: "metrics"
url: -> "/rest/hosts/#{@host_id}/metrics"
params: [ "host" ]
comparator: (metric) ->
metric.display_name()
class exports.CheckMetricList extends exports.MetricList
params: [ { name: "metric", set_id: true }, "host", "check" ]
url: -> "/rest/hosts/#{@host_id}/checks/#{@check_id}/metrics"
class exports.HostSummaryMetricList extends exports.MetricList
url: -> "/rest/hosts/#{@host_id}/metrics/summary"
class exports.SummaryMetricList extends exports.MetricList
url: -> "/rest/metrics/summary"
| 47225 |
namespace 'Bixby.model', (exports, top) ->
class exports.Metric extends Stark.Model
@key: "metric"
params: [ { name: "metric", set_id: true }, "host" ]
urlRoot: ->
host_id = @host_id || @get("host_id")
"/rest/hosts/#{host_id}/metrics"
url: ->
s = super() + "?"
if @get("start")
s += "&start=" + @get("start")
if @get("end")
s += "&end=" + @get("end")
if @get("downsample")
s += "&downsample=" + @get("downsample")
return s
display_tags: ->
label = @get("label")
return if label? && label.match(/\$/) # skip labels with vars
tags = _.omit(@get("tags"), ["tenant_id", "org_id", "host_id", "host", "check_id"])
_.map(tags, (v, k) -> "#{k}=#{v}").join(", ")
# Get the display name
# e.g., "Disk Usage (%)"
display_name: ->
s = null
if @get("label")
tags = @get("tags")
l = @get("label")
matches = _.getMatches(l, /(^|[\b\s])\$([\w]+)\b/g, 2)
_.each matches, (m) ->
tag = tags[m]
if tag
l = l.replace("$#{m}", tag)
s = l
else
s = @get("name") || @get("desc")
if @get("unit")
s = s + " (" + @get("unit") + ")"
return s
# Create a condensed metric label with relevant tags included
custom_display_name: (add_br) ->
br = if add_br
"<br>"
else
""
key = @get("key")
label = if key.match(/^cpu.usage/)
"CPU " + @display_name()
else if key.match(/^cpu.loadavg/)
"CPU Load " + br + @display_name()
else if key == "mem.usage"
"Memory " + @display_name()
else if key == "fs.<KEY>"
"Disk Usage (%) on " + br + @get("tags").mount
else if key.match(/^net.[rt]x.bytes/)
@display_name() + " on " + @get("tags").interface
else
@display_name()
return label
# Get the range attribute for this metric
get_range: ->
if !@get("key").match(/^cpu.loadavg/)
return @get("range")
# custom range only for loadavg
# if all values < 1, returns "0..1", else null
larger = false
_.each @get("data"), (p) ->
if p.y > 1
larger = true
return
if larger
return null
else
return "0..1"
# Get a formatted value as a string for display
# e.g., "10.53% @ 2014/08/22 14:19:42"
format_value: (val, date) ->
unit_label = ""
unit = @get("unit")
if unit?
if unit == "%"
unit_label = "%"
else
unit_label = " " + unit
date = moment(date) if !moment.isMoment(date)
date.format("L HH:mm:ss")
val = Bixby.monitoring.Graph.format_value(val)
return _.str.sprintf("%s%s @ %s", val, unit_label, date)
# get only the metric attributes (the actual data elements)
# { key, tags, vals: [ {time, val}, ... ]}
metrics: ->
metrics = []
_.each @attributes, (v, k) ->
if _.isObject(v)
metrics.push(v)
return metrics
# Return list of tuples with time in millisec
tuples: ->
vals = _.map _.sortBy(@get("data"), "x"), (v) ->
[ new Date(v.x * 1000), v.y ]
class exports.MetricList extends Stark.Collection
model: exports.Metric
@key: "metrics"
url: -> "/rest/hosts/#{@host_id}/metrics"
params: [ "host" ]
comparator: (metric) ->
metric.display_name()
class exports.CheckMetricList extends exports.MetricList
params: [ { name: "metric", set_id: true }, "host", "check" ]
url: -> "/rest/hosts/#{@host_id}/checks/#{@check_id}/metrics"
class exports.HostSummaryMetricList extends exports.MetricList
url: -> "/rest/hosts/#{@host_id}/metrics/summary"
class exports.SummaryMetricList extends exports.MetricList
url: -> "/rest/metrics/summary"
| true |
namespace 'Bixby.model', (exports, top) ->
class exports.Metric extends Stark.Model
@key: "metric"
params: [ { name: "metric", set_id: true }, "host" ]
urlRoot: ->
host_id = @host_id || @get("host_id")
"/rest/hosts/#{host_id}/metrics"
url: ->
s = super() + "?"
if @get("start")
s += "&start=" + @get("start")
if @get("end")
s += "&end=" + @get("end")
if @get("downsample")
s += "&downsample=" + @get("downsample")
return s
display_tags: ->
label = @get("label")
return if label? && label.match(/\$/) # skip labels with vars
tags = _.omit(@get("tags"), ["tenant_id", "org_id", "host_id", "host", "check_id"])
_.map(tags, (v, k) -> "#{k}=#{v}").join(", ")
# Get the display name
# e.g., "Disk Usage (%)"
display_name: ->
s = null
if @get("label")
tags = @get("tags")
l = @get("label")
matches = _.getMatches(l, /(^|[\b\s])\$([\w]+)\b/g, 2)
_.each matches, (m) ->
tag = tags[m]
if tag
l = l.replace("$#{m}", tag)
s = l
else
s = @get("name") || @get("desc")
if @get("unit")
s = s + " (" + @get("unit") + ")"
return s
# Create a condensed metric label with relevant tags included
custom_display_name: (add_br) ->
br = if add_br
"<br>"
else
""
key = @get("key")
label = if key.match(/^cpu.usage/)
"CPU " + @display_name()
else if key.match(/^cpu.loadavg/)
"CPU Load " + br + @display_name()
else if key == "mem.usage"
"Memory " + @display_name()
else if key == "fs.PI:KEY:<KEY>END_PI"
"Disk Usage (%) on " + br + @get("tags").mount
else if key.match(/^net.[rt]x.bytes/)
@display_name() + " on " + @get("tags").interface
else
@display_name()
return label
# Get the range attribute for this metric
get_range: ->
if !@get("key").match(/^cpu.loadavg/)
return @get("range")
# custom range only for loadavg
# if all values < 1, returns "0..1", else null
larger = false
_.each @get("data"), (p) ->
if p.y > 1
larger = true
return
if larger
return null
else
return "0..1"
# Get a formatted value as a string for display
# e.g., "10.53% @ 2014/08/22 14:19:42"
format_value: (val, date) ->
unit_label = ""
unit = @get("unit")
if unit?
if unit == "%"
unit_label = "%"
else
unit_label = " " + unit
date = moment(date) if !moment.isMoment(date)
date.format("L HH:mm:ss")
val = Bixby.monitoring.Graph.format_value(val)
return _.str.sprintf("%s%s @ %s", val, unit_label, date)
# get only the metric attributes (the actual data elements)
# { key, tags, vals: [ {time, val}, ... ]}
metrics: ->
metrics = []
_.each @attributes, (v, k) ->
if _.isObject(v)
metrics.push(v)
return metrics
# Return list of tuples with time in millisec
tuples: ->
vals = _.map _.sortBy(@get("data"), "x"), (v) ->
[ new Date(v.x * 1000), v.y ]
class exports.MetricList extends Stark.Collection
model: exports.Metric
@key: "metrics"
url: -> "/rest/hosts/#{@host_id}/metrics"
params: [ "host" ]
comparator: (metric) ->
metric.display_name()
class exports.CheckMetricList extends exports.MetricList
params: [ { name: "metric", set_id: true }, "host", "check" ]
url: -> "/rest/hosts/#{@host_id}/checks/#{@check_id}/metrics"
class exports.HostSummaryMetricList extends exports.MetricList
url: -> "/rest/hosts/#{@host_id}/metrics/summary"
class exports.SummaryMetricList extends exports.MetricList
url: -> "/rest/metrics/summary"
|
[
{
"context": "# OT storage for CouchDB\n# Author: Max Ogden (@maxogden)\n#\n# The couchdb database contains two",
"end": 44,
"score": 0.9998608827590942,
"start": 35,
"tag": "NAME",
"value": "Max Ogden"
},
{
"context": "# OT storage for CouchDB\n# Author: Max Ogden (@maxogden)\n#\n# The couchdb database contains two kinds of d",
"end": 55,
"score": 0.9994131326675415,
"start": 45,
"tag": "USERNAME",
"value": "(@maxogden"
}
] | public/coffee/ide/editor/sharejs/vendor/server/db/couchdb.coffee | mickaobrien/web-sharelatex | 88 | # OT storage for CouchDB
# Author: Max Ogden (@maxogden)
#
# The couchdb database contains two kinds of documents:
#
# - Document snapshots have a key which is doc:the document name
# - Document ops have a random key, but docName: defined.
request = require('request').defaults json: true
# Helper method to parse errors out of couchdb. There's way more ways
# things can go wrong, but I think this catches all the ones I care about.
#
# callback(error) or callback()
parseError = (err, resp, body, callback) ->
body = body[0] if Array.isArray body and body.length >= 1
if err
# This indicates an HTTP error
callback err
else if resp.statusCode is 404
callback 'Document does not exist'
else if resp.statusCode is 403
callback 'forbidden'
else if typeof body is 'object'
if body.error is 'conflict'
callback 'Document already exists'
else if body.error
callback "#{body.error} reason: #{body.reason}"
else
callback()
else
callback()
module.exports = (options) ->
options ?= {}
db = options.uri or "http://localhost:5984/sharejs"
uriForDoc = (docName) -> "#{db}/doc:#{encodeURIComponent docName}"
uriForOps = (docName, start, end, include_docs) ->
startkey = encodeURIComponent(JSON.stringify [docName, start])
# {} is sorted after all numbers - so this will get all ops in the case that end is null.
endkey = encodeURIComponent(JSON.stringify [docName, end ? {}])
# Another way to write this method would be to use node's builtin uri-encoder.
extra = if include_docs then '&include_docs=true' else ''
"#{db}/_design/sharejs/_view/operations?startkey=#{startkey}&endkey=#{endkey}&inclusive_end=false#{extra}"
# Helper method to get the revision of a document snapshot.
getRev = (docName, dbMeta, callback) ->
if dbMeta?.rev
callback null, dbMeta.rev
else
# JSON defaults to true, and that makes request think I'm trying to sneak a request
# body in. Ugh.
request.head {uri:uriForDoc(docName), json:false}, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
# The etag is the rev in quotes.
callback null, JSON.parse(resp.headers.etag)
writeSnapshotInternal = (docName, data, rev, callback) ->
body = data
body.fieldType = 'Document'
body._rev = rev if rev?
request.put uri:(uriForDoc docName), body:body, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
#console.log 'create error'
# This will send write conflicts as 'document already exists'. Thats kinda wierd, but
# it shouldn't happen anyway
callback? error
else
# We pass the document revision back to the db cache so it can give it back to couchdb on subsequent requests.
callback? null, {rev: body.rev}
# getOps returns all ops between start and end. end can be null.
getOps: (docName, start, end, callback) ->
return callback null, [] if start == end
# Its a bit gross having this end parameter here....
endkey = if end? then [docName, end - 1]
request uriForOps(docName, start, end), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}}}
data = ({op: row.value.op, meta: row.value.meta} for row in body.rows)
callback null, data
# callback(error, db metadata)
create: (docName, data, callback) ->
writeSnapshotInternal docName, data, null, callback
delete: del = (docName, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
docs = [{_id:"doc:#{docName}", _rev:rev, _deleted:true}]
# Its annoying, but we need to get the revision from the document. I don't think there's a simple way to do this.
# This request will get all the ops twice.
request uriForOps(docName, 0, null, true), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}},
# "doc":{"_id":"<uuid>","_rev":"1-21a40c56ebd5d424ffe56950e77bc847","op":[{"p":0,"i":"hi"}],"v":0,"meta":{},"docName":"doc6"}}
for row in body.rows
row.doc._deleted = true
docs.push row.doc
request.post url: "#{db}/_bulk_docs", body: {docs}, (err, resp, body) ->
if body[0].error is 'conflict'
# Somebody has edited the document since we did a GET on the revision information. Recurse.
# By passing null to dbMeta I'm forcing the revision information to be reacquired.
del docName, null, callback
else
parseError err, resp, body, (error) -> callback? error
writeOp: (docName, opData, callback) ->
body =
docName: docName
op: opData.op
v: opData.v
meta: opData.meta
request.post url:db, body:body, (err, resp, body) ->
parseError err, resp, body, callback
writeSnapshot: (docName, docData, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
writeSnapshotInternal docName, docData, rev, callback
getSnapshot: (docName, callback) ->
request uriForDoc(docName), (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
callback null,
snapshot: body.snapshot
type: body.type
meta: body.meta
v: body.v
, {rev: body._rev} # dbMeta
close: ->
| 2582 | # OT storage for CouchDB
# Author: <NAME> (@maxogden)
#
# The couchdb database contains two kinds of documents:
#
# - Document snapshots have a key which is doc:the document name
# - Document ops have a random key, but docName: defined.
request = require('request').defaults json: true
# Helper method to parse errors out of couchdb. There's way more ways
# things can go wrong, but I think this catches all the ones I care about.
#
# callback(error) or callback()
parseError = (err, resp, body, callback) ->
body = body[0] if Array.isArray body and body.length >= 1
if err
# This indicates an HTTP error
callback err
else if resp.statusCode is 404
callback 'Document does not exist'
else if resp.statusCode is 403
callback 'forbidden'
else if typeof body is 'object'
if body.error is 'conflict'
callback 'Document already exists'
else if body.error
callback "#{body.error} reason: #{body.reason}"
else
callback()
else
callback()
module.exports = (options) ->
options ?= {}
db = options.uri or "http://localhost:5984/sharejs"
uriForDoc = (docName) -> "#{db}/doc:#{encodeURIComponent docName}"
uriForOps = (docName, start, end, include_docs) ->
startkey = encodeURIComponent(JSON.stringify [docName, start])
# {} is sorted after all numbers - so this will get all ops in the case that end is null.
endkey = encodeURIComponent(JSON.stringify [docName, end ? {}])
# Another way to write this method would be to use node's builtin uri-encoder.
extra = if include_docs then '&include_docs=true' else ''
"#{db}/_design/sharejs/_view/operations?startkey=#{startkey}&endkey=#{endkey}&inclusive_end=false#{extra}"
# Helper method to get the revision of a document snapshot.
getRev = (docName, dbMeta, callback) ->
if dbMeta?.rev
callback null, dbMeta.rev
else
# JSON defaults to true, and that makes request think I'm trying to sneak a request
# body in. Ugh.
request.head {uri:uriForDoc(docName), json:false}, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
# The etag is the rev in quotes.
callback null, JSON.parse(resp.headers.etag)
writeSnapshotInternal = (docName, data, rev, callback) ->
body = data
body.fieldType = 'Document'
body._rev = rev if rev?
request.put uri:(uriForDoc docName), body:body, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
#console.log 'create error'
# This will send write conflicts as 'document already exists'. Thats kinda wierd, but
# it shouldn't happen anyway
callback? error
else
# We pass the document revision back to the db cache so it can give it back to couchdb on subsequent requests.
callback? null, {rev: body.rev}
# getOps returns all ops between start and end. end can be null.
getOps: (docName, start, end, callback) ->
return callback null, [] if start == end
# Its a bit gross having this end parameter here....
endkey = if end? then [docName, end - 1]
request uriForOps(docName, start, end), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}}}
data = ({op: row.value.op, meta: row.value.meta} for row in body.rows)
callback null, data
# callback(error, db metadata)
create: (docName, data, callback) ->
writeSnapshotInternal docName, data, null, callback
delete: del = (docName, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
docs = [{_id:"doc:#{docName}", _rev:rev, _deleted:true}]
# Its annoying, but we need to get the revision from the document. I don't think there's a simple way to do this.
# This request will get all the ops twice.
request uriForOps(docName, 0, null, true), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}},
# "doc":{"_id":"<uuid>","_rev":"1-21a40c56ebd5d424ffe56950e77bc847","op":[{"p":0,"i":"hi"}],"v":0,"meta":{},"docName":"doc6"}}
for row in body.rows
row.doc._deleted = true
docs.push row.doc
request.post url: "#{db}/_bulk_docs", body: {docs}, (err, resp, body) ->
if body[0].error is 'conflict'
# Somebody has edited the document since we did a GET on the revision information. Recurse.
# By passing null to dbMeta I'm forcing the revision information to be reacquired.
del docName, null, callback
else
parseError err, resp, body, (error) -> callback? error
writeOp: (docName, opData, callback) ->
body =
docName: docName
op: opData.op
v: opData.v
meta: opData.meta
request.post url:db, body:body, (err, resp, body) ->
parseError err, resp, body, callback
writeSnapshot: (docName, docData, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
writeSnapshotInternal docName, docData, rev, callback
getSnapshot: (docName, callback) ->
request uriForDoc(docName), (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
callback null,
snapshot: body.snapshot
type: body.type
meta: body.meta
v: body.v
, {rev: body._rev} # dbMeta
close: ->
| true | # OT storage for CouchDB
# Author: PI:NAME:<NAME>END_PI (@maxogden)
#
# The couchdb database contains two kinds of documents:
#
# - Document snapshots have a key which is doc:the document name
# - Document ops have a random key, but docName: defined.
request = require('request').defaults json: true
# Helper method to parse errors out of couchdb. There's way more ways
# things can go wrong, but I think this catches all the ones I care about.
#
# callback(error) or callback()
parseError = (err, resp, body, callback) ->
body = body[0] if Array.isArray body and body.length >= 1
if err
# This indicates an HTTP error
callback err
else if resp.statusCode is 404
callback 'Document does not exist'
else if resp.statusCode is 403
callback 'forbidden'
else if typeof body is 'object'
if body.error is 'conflict'
callback 'Document already exists'
else if body.error
callback "#{body.error} reason: #{body.reason}"
else
callback()
else
callback()
module.exports = (options) ->
options ?= {}
db = options.uri or "http://localhost:5984/sharejs"
uriForDoc = (docName) -> "#{db}/doc:#{encodeURIComponent docName}"
uriForOps = (docName, start, end, include_docs) ->
startkey = encodeURIComponent(JSON.stringify [docName, start])
# {} is sorted after all numbers - so this will get all ops in the case that end is null.
endkey = encodeURIComponent(JSON.stringify [docName, end ? {}])
# Another way to write this method would be to use node's builtin uri-encoder.
extra = if include_docs then '&include_docs=true' else ''
"#{db}/_design/sharejs/_view/operations?startkey=#{startkey}&endkey=#{endkey}&inclusive_end=false#{extra}"
# Helper method to get the revision of a document snapshot.
getRev = (docName, dbMeta, callback) ->
if dbMeta?.rev
callback null, dbMeta.rev
else
# JSON defaults to true, and that makes request think I'm trying to sneak a request
# body in. Ugh.
request.head {uri:uriForDoc(docName), json:false}, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
# The etag is the rev in quotes.
callback null, JSON.parse(resp.headers.etag)
writeSnapshotInternal = (docName, data, rev, callback) ->
body = data
body.fieldType = 'Document'
body._rev = rev if rev?
request.put uri:(uriForDoc docName), body:body, (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
#console.log 'create error'
# This will send write conflicts as 'document already exists'. Thats kinda wierd, but
# it shouldn't happen anyway
callback? error
else
# We pass the document revision back to the db cache so it can give it back to couchdb on subsequent requests.
callback? null, {rev: body.rev}
# getOps returns all ops between start and end. end can be null.
getOps: (docName, start, end, callback) ->
return callback null, [] if start == end
# Its a bit gross having this end parameter here....
endkey = if end? then [docName, end - 1]
request uriForOps(docName, start, end), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}}}
data = ({op: row.value.op, meta: row.value.meta} for row in body.rows)
callback null, data
# callback(error, db metadata)
create: (docName, data, callback) ->
writeSnapshotInternal docName, data, null, callback
delete: del = (docName, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
docs = [{_id:"doc:#{docName}", _rev:rev, _deleted:true}]
# Its annoying, but we need to get the revision from the document. I don't think there's a simple way to do this.
# This request will get all the ops twice.
request uriForOps(docName, 0, null, true), (err, resp, body) ->
# Rows look like this:
# {"id":"<uuid>","key":["doc name",0],"value":{"op":[{"p":0,"i":"hi"}],"meta":{}},
# "doc":{"_id":"<uuid>","_rev":"1-21a40c56ebd5d424ffe56950e77bc847","op":[{"p":0,"i":"hi"}],"v":0,"meta":{},"docName":"doc6"}}
for row in body.rows
row.doc._deleted = true
docs.push row.doc
request.post url: "#{db}/_bulk_docs", body: {docs}, (err, resp, body) ->
if body[0].error is 'conflict'
# Somebody has edited the document since we did a GET on the revision information. Recurse.
# By passing null to dbMeta I'm forcing the revision information to be reacquired.
del docName, null, callback
else
parseError err, resp, body, (error) -> callback? error
writeOp: (docName, opData, callback) ->
body =
docName: docName
op: opData.op
v: opData.v
meta: opData.meta
request.post url:db, body:body, (err, resp, body) ->
parseError err, resp, body, callback
writeSnapshot: (docName, docData, dbMeta, callback) ->
getRev docName, dbMeta, (error, rev) ->
return callback? error if error
writeSnapshotInternal docName, docData, rev, callback
getSnapshot: (docName, callback) ->
request uriForDoc(docName), (err, resp, body) ->
parseError err, resp, body, (error) ->
if error
callback error
else
callback null,
snapshot: body.snapshot
type: body.type
meta: body.meta
v: body.v
, {rev: body._rev} # dbMeta
close: ->
|
[
{
"context": "/functions/basename\n # + original by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)\n # + ",
"end": 474,
"score": 0.9999046921730042,
"start": 455,
"tag": "NAME",
"value": "Kevin van Zonneveld"
},
{
"context": "n.vanzonneveld.net)\n # + improved by: Ash Searle (http://hexmen.com/blog/)\n # + impro",
"end": 548,
"score": 0.9999111294746399,
"start": 538,
"tag": "NAME",
"value": "Ash Searle"
},
{
"context": "//hexmen.com/blog/)\n # + improved by: Lincoln Ramsay\n # + improved by: djmix\n #\n ",
"end": 620,
"score": 0.9999036192893982,
"start": 606,
"tag": "NAME",
"value": "Lincoln Ramsay"
},
{
"context": "s.org/functions/dirname\n # + original by: Ozh\n # + improved by: XoraX (http://www.xora",
"end": 3856,
"score": 0.9945591688156128,
"start": 3853,
"tag": "USERNAME",
"value": "Ozh"
},
{
"context": " # + original by: Ozh\n # + improved by: XoraX (http://www.xorax.info)\n # * example 1",
"end": 3889,
"score": 0.7299383282661438,
"start": 3884,
"tag": "USERNAME",
"value": "XoraX"
}
] | views/util.coffee | dtjm/opensource.textdropapp.com | 1 | unless window.Util?
window.Util =
# Default options
# ---------------
defaults:
# What to insert when pressing `<Tab>`. (Used in the checkTab
# method.)
tab: " "
# ### basename
#
# Returns the filename component of the path
#
# version: 1008.1718
# discuss at: http://phpjs.org/functions/basename
# + original by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
# + improved by: Ash Searle (http://hexmen.com/blog/)
# + improved by: Lincoln Ramsay
# + improved by: djmix
#
# Examples:
#
# basename('/www/site/home.htm', '.htm');
# => 'home'
#
# basename('ecra.php?p=1')
# => 'ecra.php?p=1'
basename: (path, suffix) ->
b = path.replace /^.*[\/\\]/g, ''
if typeof(suffix) == 'string' && b.substr(b.length-suffix.length) == suffix
b = b.substr 0, b.length-suffix.length
return b
# ### checkTab
checkTab: (evt) ->
if evt.metaKey then return
tab = @defaults.tab
t = evt.target
ss = t.selectionStart
se = t.selectionEnd
# Tab key - insert tab expansion
if evt.keyCode == 9
evt.preventDefault()
# Special case of multi line selection
if (ss != se && t.value.slice(ss,se).indexOf("\n") != -1)
# In case selection was not of entire lines (e.g. selection
# begins in the middle of a line) we ought to tab at the
# beginning as well as at the start of every following line.
pre = t.value.slice 0, ss
sel = t.value.slice(ss,se).replace /\n/g, "\n#{tab}"
post = t.value.slice(se,t.value.length)
t.value = pre.concat(tab).concat(sel).concat(post)
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# "Normal" case (no selection or selection on one line only)
else
t.value = t.value.slice(0,ss).concat(tab).concat(t.value.slice(ss,t.value.length))
if (ss == se)
t.selectionStart = t.selectionEnd = ss + tab.length
else
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# Backspace key - delete preceding tab expansion, if exists
else if evt.keyCode==8 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss - 4).concat(t.value.slice(ss,t.value.length));
t.selectionStart = t.selectionEnd = ss - tab.length
# Delete key - delete following tab expansion, if exists
else if evt.keyCode==46 && t.value.slice(se,se + 4) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss).concat(t.value.slice(ss + 4,t.value.length))
t.selectionStart = t.selectionEnd = ss
# Left/right arrow keys - move across the tab in one go
else if evt.keyCode == 37 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss - 4
else if (evt.keyCode == 39 && t.value.slice(ss,ss + 4) == tab)
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss + 4
# Returns the directory name component of the path
#
# version: 1102.614
# discuss at: http://phpjs.org/functions/dirname
# + original by: Ozh
# + improved by: XoraX (http://www.xorax.info)
# * example 1: dirname('/etc/passwd');
# * returns 1: '/etc'
# * example 2: dirname('c:/Temp/x');
# * returns 2: 'c:/Temp'
# * example 3: dirname('/dir/test/');
# * returns 3: '/dir'
dirname: (path) ->
return "/" if path.indexOf("/") == -1
dirname = path.replace(/\\/g, '/').replace(/\/[^\/]*\/?$/, '')
return "/" if dirname == ""
return dirname
escapeHtml: (str) -> return $("<div/>").text(str).html()
escapeQuotes: (str) -> return str.replace /(['"])/, "\\$1"
# Check whether a string starts with another string
startsWith: (needle, haystack) ->
return haystack.length >= needle.length &&
haystack.substr(0, needle.length) == needle
| 120012 | unless window.Util?
window.Util =
# Default options
# ---------------
defaults:
# What to insert when pressing `<Tab>`. (Used in the checkTab
# method.)
tab: " "
# ### basename
#
# Returns the filename component of the path
#
# version: 1008.1718
# discuss at: http://phpjs.org/functions/basename
# + original by: <NAME> (http://kevin.vanzonneveld.net)
# + improved by: <NAME> (http://hexmen.com/blog/)
# + improved by: <NAME>
# + improved by: djmix
#
# Examples:
#
# basename('/www/site/home.htm', '.htm');
# => 'home'
#
# basename('ecra.php?p=1')
# => 'ecra.php?p=1'
basename: (path, suffix) ->
b = path.replace /^.*[\/\\]/g, ''
if typeof(suffix) == 'string' && b.substr(b.length-suffix.length) == suffix
b = b.substr 0, b.length-suffix.length
return b
# ### checkTab
checkTab: (evt) ->
if evt.metaKey then return
tab = @defaults.tab
t = evt.target
ss = t.selectionStart
se = t.selectionEnd
# Tab key - insert tab expansion
if evt.keyCode == 9
evt.preventDefault()
# Special case of multi line selection
if (ss != se && t.value.slice(ss,se).indexOf("\n") != -1)
# In case selection was not of entire lines (e.g. selection
# begins in the middle of a line) we ought to tab at the
# beginning as well as at the start of every following line.
pre = t.value.slice 0, ss
sel = t.value.slice(ss,se).replace /\n/g, "\n#{tab}"
post = t.value.slice(se,t.value.length)
t.value = pre.concat(tab).concat(sel).concat(post)
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# "Normal" case (no selection or selection on one line only)
else
t.value = t.value.slice(0,ss).concat(tab).concat(t.value.slice(ss,t.value.length))
if (ss == se)
t.selectionStart = t.selectionEnd = ss + tab.length
else
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# Backspace key - delete preceding tab expansion, if exists
else if evt.keyCode==8 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss - 4).concat(t.value.slice(ss,t.value.length));
t.selectionStart = t.selectionEnd = ss - tab.length
# Delete key - delete following tab expansion, if exists
else if evt.keyCode==46 && t.value.slice(se,se + 4) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss).concat(t.value.slice(ss + 4,t.value.length))
t.selectionStart = t.selectionEnd = ss
# Left/right arrow keys - move across the tab in one go
else if evt.keyCode == 37 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss - 4
else if (evt.keyCode == 39 && t.value.slice(ss,ss + 4) == tab)
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss + 4
# Returns the directory name component of the path
#
# version: 1102.614
# discuss at: http://phpjs.org/functions/dirname
# + original by: Ozh
# + improved by: XoraX (http://www.xorax.info)
# * example 1: dirname('/etc/passwd');
# * returns 1: '/etc'
# * example 2: dirname('c:/Temp/x');
# * returns 2: 'c:/Temp'
# * example 3: dirname('/dir/test/');
# * returns 3: '/dir'
dirname: (path) ->
return "/" if path.indexOf("/") == -1
dirname = path.replace(/\\/g, '/').replace(/\/[^\/]*\/?$/, '')
return "/" if dirname == ""
return dirname
escapeHtml: (str) -> return $("<div/>").text(str).html()
escapeQuotes: (str) -> return str.replace /(['"])/, "\\$1"
# Check whether a string starts with another string
startsWith: (needle, haystack) ->
return haystack.length >= needle.length &&
haystack.substr(0, needle.length) == needle
| true | unless window.Util?
window.Util =
# Default options
# ---------------
defaults:
# What to insert when pressing `<Tab>`. (Used in the checkTab
# method.)
tab: " "
# ### basename
#
# Returns the filename component of the path
#
# version: 1008.1718
# discuss at: http://phpjs.org/functions/basename
# + original by: PI:NAME:<NAME>END_PI (http://kevin.vanzonneveld.net)
# + improved by: PI:NAME:<NAME>END_PI (http://hexmen.com/blog/)
# + improved by: PI:NAME:<NAME>END_PI
# + improved by: djmix
#
# Examples:
#
# basename('/www/site/home.htm', '.htm');
# => 'home'
#
# basename('ecra.php?p=1')
# => 'ecra.php?p=1'
basename: (path, suffix) ->
b = path.replace /^.*[\/\\]/g, ''
if typeof(suffix) == 'string' && b.substr(b.length-suffix.length) == suffix
b = b.substr 0, b.length-suffix.length
return b
# ### checkTab
checkTab: (evt) ->
if evt.metaKey then return
tab = @defaults.tab
t = evt.target
ss = t.selectionStart
se = t.selectionEnd
# Tab key - insert tab expansion
if evt.keyCode == 9
evt.preventDefault()
# Special case of multi line selection
if (ss != se && t.value.slice(ss,se).indexOf("\n") != -1)
# In case selection was not of entire lines (e.g. selection
# begins in the middle of a line) we ought to tab at the
# beginning as well as at the start of every following line.
pre = t.value.slice 0, ss
sel = t.value.slice(ss,se).replace /\n/g, "\n#{tab}"
post = t.value.slice(se,t.value.length)
t.value = pre.concat(tab).concat(sel).concat(post)
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# "Normal" case (no selection or selection on one line only)
else
t.value = t.value.slice(0,ss).concat(tab).concat(t.value.slice(ss,t.value.length))
if (ss == se)
t.selectionStart = t.selectionEnd = ss + tab.length
else
t.selectionStart = ss + tab.length
t.selectionEnd = se + tab.length
# Backspace key - delete preceding tab expansion, if exists
else if evt.keyCode==8 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss - 4).concat(t.value.slice(ss,t.value.length));
t.selectionStart = t.selectionEnd = ss - tab.length
# Delete key - delete following tab expansion, if exists
else if evt.keyCode==46 && t.value.slice(se,se + 4) == tab
evt.preventDefault()
t.value = t.value.slice(0,ss).concat(t.value.slice(ss + 4,t.value.length))
t.selectionStart = t.selectionEnd = ss
# Left/right arrow keys - move across the tab in one go
else if evt.keyCode == 37 && t.value.slice(ss - 4,ss) == tab
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss - 4
else if (evt.keyCode == 39 && t.value.slice(ss,ss + 4) == tab)
evt.preventDefault(); t.selectionStart = t.selectionEnd = ss + 4
# Returns the directory name component of the path
#
# version: 1102.614
# discuss at: http://phpjs.org/functions/dirname
# + original by: Ozh
# + improved by: XoraX (http://www.xorax.info)
# * example 1: dirname('/etc/passwd');
# * returns 1: '/etc'
# * example 2: dirname('c:/Temp/x');
# * returns 2: 'c:/Temp'
# * example 3: dirname('/dir/test/');
# * returns 3: '/dir'
dirname: (path) ->
return "/" if path.indexOf("/") == -1
dirname = path.replace(/\\/g, '/').replace(/\/[^\/]*\/?$/, '')
return "/" if dirname == ""
return dirname
escapeHtml: (str) -> return $("<div/>").text(str).html()
escapeQuotes: (str) -> return str.replace /(['"])/, "\\$1"
# Check whether a string starts with another string
startsWith: (needle, haystack) ->
return haystack.length >= needle.length &&
haystack.substr(0, needle.length) == needle
|
[
{
"context": "#*\n# @fileoverview Tests for radix rule.\n# @author James Allardice\n###\n\n'use strict'\n\n#-----------------------------",
"end": 68,
"score": 0.9998774528503418,
"start": 53,
"tag": "NAME",
"value": "James Allardice"
}
] | src/tests/rules/radix.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for radix rule.
# @author James Allardice
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/radix'
{RuleTester} = require 'eslint'
path = require 'path'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'radix', rule,
valid: [
'parseInt("10", 10)'
'parseInt("10", foo)'
'Number.parseInt("10", foo)'
,
code: 'parseInt("10", 10)'
options: ['always']
,
code: 'parseInt("10")'
options: ['as-needed']
,
code: 'parseInt("10", 8)'
options: ['as-needed']
,
code: 'parseInt("10", foo)'
options: ['as-needed']
,
'parseInt'
'Number.foo()'
'Number[parseInt]()'
# Ignores if it's shadowed.
'''
parseInt = ->
parseInt()
'''
,
code: '''
parseInt = ->
parseInt(foo)
'''
options: ['always']
,
code: '''
parseInt = ->
parseInt foo, 10
'''
options: ['as-needed']
,
'''
Number = {}
Number.parseInt()
'''
,
code: '''
Number = {}
Number.parseInt(foo)
'''
options: ['always']
,
code: '''
Number = {}
Number.parseInt(foo, 10)
'''
options: ['as-needed']
]
invalid: [
code: 'parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt "10"'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", null)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", undefined)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt "10", true'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "foo")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "123")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt("10")'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", 10)'
options: ['as-needed']
errors: [
message: 'Redundant radix parameter.'
type: 'CallExpression'
]
]
| 39852 | ###*
# @fileoverview Tests for radix rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/radix'
{RuleTester} = require 'eslint'
path = require 'path'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'radix', rule,
valid: [
'parseInt("10", 10)'
'parseInt("10", foo)'
'Number.parseInt("10", foo)'
,
code: 'parseInt("10", 10)'
options: ['always']
,
code: 'parseInt("10")'
options: ['as-needed']
,
code: 'parseInt("10", 8)'
options: ['as-needed']
,
code: 'parseInt("10", foo)'
options: ['as-needed']
,
'parseInt'
'Number.foo()'
'Number[parseInt]()'
# Ignores if it's shadowed.
'''
parseInt = ->
parseInt()
'''
,
code: '''
parseInt = ->
parseInt(foo)
'''
options: ['always']
,
code: '''
parseInt = ->
parseInt foo, 10
'''
options: ['as-needed']
,
'''
Number = {}
Number.parseInt()
'''
,
code: '''
Number = {}
Number.parseInt(foo)
'''
options: ['always']
,
code: '''
Number = {}
Number.parseInt(foo, 10)
'''
options: ['as-needed']
]
invalid: [
code: 'parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt "10"'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", null)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", undefined)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt "10", true'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "foo")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "123")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt("10")'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", 10)'
options: ['as-needed']
errors: [
message: 'Redundant radix parameter.'
type: 'CallExpression'
]
]
| true | ###*
# @fileoverview Tests for radix rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/radix'
{RuleTester} = require 'eslint'
path = require 'path'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'radix', rule,
valid: [
'parseInt("10", 10)'
'parseInt("10", foo)'
'Number.parseInt("10", foo)'
,
code: 'parseInt("10", 10)'
options: ['always']
,
code: 'parseInt("10")'
options: ['as-needed']
,
code: 'parseInt("10", 8)'
options: ['as-needed']
,
code: 'parseInt("10", foo)'
options: ['as-needed']
,
'parseInt'
'Number.foo()'
'Number[parseInt]()'
# Ignores if it's shadowed.
'''
parseInt = ->
parseInt()
'''
,
code: '''
parseInt = ->
parseInt(foo)
'''
options: ['always']
,
code: '''
parseInt = ->
parseInt foo, 10
'''
options: ['as-needed']
,
'''
Number = {}
Number.parseInt()
'''
,
code: '''
Number = {}
Number.parseInt(foo)
'''
options: ['always']
,
code: '''
Number = {}
Number.parseInt(foo, 10)
'''
options: ['as-needed']
]
invalid: [
code: 'parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'parseInt "10"'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", null)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", undefined)'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt "10", true'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "foo")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", "123")'
errors: [
message: 'Invalid radix parameter.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt()'
options: ['as-needed']
errors: [
message: 'Missing parameters.'
type: 'CallExpression'
]
,
code: 'Number.parseInt("10")'
errors: [
message: 'Missing radix parameter.'
type: 'CallExpression'
]
,
code: 'parseInt("10", 10)'
options: ['as-needed']
errors: [
message: 'Redundant radix parameter.'
type: 'CallExpression'
]
]
|
[
{
"context": "inspire\n#\n# Known Issues:\n# none\n#\n# Author:\n# Riley Mills\n\nUtil = require \"util\"\n\nmodule.exports = (robot) ",
"end": 168,
"score": 0.9997950792312622,
"start": 157,
"tag": "NAME",
"value": "Riley Mills"
}
] | Hubot/inspirobot.coffee | FaytLeingod007/JabbR | 3 | # Description:
# Inspirational
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# inspire
#
# Known Issues:
# none
#
# Author:
# Riley Mills
Util = require "util"
module.exports = (robot) =>
robot.respond /inspire/i, (msg) ->
msg.http('http://inspirobot.me/api?generate=true')
.get() (err, res, body) ->
if err
msg.send "Encountered an error :( #{err}"
return
else
msg.send(body)
| 127004 | # Description:
# Inspirational
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# inspire
#
# Known Issues:
# none
#
# Author:
# <NAME>
Util = require "util"
module.exports = (robot) =>
robot.respond /inspire/i, (msg) ->
msg.http('http://inspirobot.me/api?generate=true')
.get() (err, res, body) ->
if err
msg.send "Encountered an error :( #{err}"
return
else
msg.send(body)
| true | # Description:
# Inspirational
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# inspire
#
# Known Issues:
# none
#
# Author:
# PI:NAME:<NAME>END_PI
Util = require "util"
module.exports = (robot) =>
robot.respond /inspire/i, (msg) ->
msg.http('http://inspirobot.me/api?generate=true')
.get() (err, res, body) ->
if err
msg.send "Encountered an error :( #{err}"
return
else
msg.send(body)
|
[
{
"context": "tokenArray.length - 1 ] then tokenArray.push ''\n\t# Amy -> [ a, m, i ] -> [ '', a, m, i, '' ]\n\n\tfor i in ",
"end": 1600,
"score": 0.9808356761932373,
"start": 1597,
"tag": "NAME",
"value": "Amy"
}
] | digester.flavored.ru.coffee | joedski/word-generator-sketching | 2 | ###
Digester: Flavored Rule Generator
####
exports.digest = ( corpusText ) -> # :Array<Rule>
digestWords breakIntoWords corpusText
breakIntoWords = ( corpusText ) -> # :Array<String>
# Unfortunately, since orthography is arbitrary, there's no real uniform way to do this,
# although with options this could cover the majority of orthographies.
breakAtWordBoundaries normalizeText corpusText
digestWords = ( wordArray ) -> # :Array<Rule>
ruleArrayArray = (digestSingleWord word for word in wordArray when word)
ruleArray = []
(ruleArray = ruleArray.concat nextRuleArray for nextRuleArray in ruleArrayArray)
ruleArray
normalizeText = ( corpusText ) -> corpusText.toLowerCase()#.replace( /[^,. '"!a-z]/g, '' )
breakAtWordBoundaries = ( text ) -> text.split /[,. '"!]+/
digestSingleWord = ( word ) -> # :Rule
# English isn't quite this simple since y is sometimes a vowel and sometimes a consonant,
# but we'll roll with this for now.
tokenizer = /[яоэиюыуьаёйе]+|[^яоэиюыуьаёйе]+/g # must be /g to avoid infinite loop.
vowelTokenTest = /^[яоэиюыуьаёйе]+$/
nonvowelTokenTest = /^[^яоэиюыуьаёйе]+$/
isMedialToken = ( token ) -> !token or nonvowelTokenTest.test token
match = null
tokenArray = (match[ 0 ] while match = tokenizer.exec word)
# rules always have consonants in the medial position.
# A rule which starts a word with vowels is considered to have a null medial position.
if ! isMedialToken tokenArray[ 0 ] then tokenArray.unshift ''
# As is a rule which ends a word with vowels.
if ! isMedialToken tokenArray[ tokenArray.length - 1 ] then tokenArray.push ''
# Amy -> [ a, m, i ] -> [ '', a, m, i, '' ]
for i in [ 0 ... tokenArray.length ] when i % 2 == 0
new Rule( tokenArray[ i - 1 ] or '', tokenArray[ i ], tokenArray[ i + 1 ] or '' )
class Rule
@fromJSON = ( jsonObject ) ->
new Rule jsonObject.initial, jsonObject.medial, jsonObject.final
wordInitial: false
wordFinal: false
initial: ''
medial: ''
final: ''
constructor: ( @initial, @medial, @final ) ->
@wordInitial = true if not @initial
@wordFinal = true if not @final
canFollow: ( beforeRule ) -> beforeRule.final == @initial
canLead: ( afterRule ) -> afterRule.initial == @final
# We can always omit @initial because,
# when @wordInitial is true then @initial is '',
# and when @wordInitial is not ture, then @initial == previous Rule's @final.
toString: -> "#{ @medial }#{ @final }"
toJSON: ->
initial: @initial
medial: @medial
final: @final
exports.Rule = Rule
| 23470 | ###
Digester: Flavored Rule Generator
####
exports.digest = ( corpusText ) -> # :Array<Rule>
digestWords breakIntoWords corpusText
breakIntoWords = ( corpusText ) -> # :Array<String>
# Unfortunately, since orthography is arbitrary, there's no real uniform way to do this,
# although with options this could cover the majority of orthographies.
breakAtWordBoundaries normalizeText corpusText
digestWords = ( wordArray ) -> # :Array<Rule>
ruleArrayArray = (digestSingleWord word for word in wordArray when word)
ruleArray = []
(ruleArray = ruleArray.concat nextRuleArray for nextRuleArray in ruleArrayArray)
ruleArray
normalizeText = ( corpusText ) -> corpusText.toLowerCase()#.replace( /[^,. '"!a-z]/g, '' )
breakAtWordBoundaries = ( text ) -> text.split /[,. '"!]+/
digestSingleWord = ( word ) -> # :Rule
# English isn't quite this simple since y is sometimes a vowel and sometimes a consonant,
# but we'll roll with this for now.
tokenizer = /[яоэиюыуьаёйе]+|[^яоэиюыуьаёйе]+/g # must be /g to avoid infinite loop.
vowelTokenTest = /^[яоэиюыуьаёйе]+$/
nonvowelTokenTest = /^[^яоэиюыуьаёйе]+$/
isMedialToken = ( token ) -> !token or nonvowelTokenTest.test token
match = null
tokenArray = (match[ 0 ] while match = tokenizer.exec word)
# rules always have consonants in the medial position.
# A rule which starts a word with vowels is considered to have a null medial position.
if ! isMedialToken tokenArray[ 0 ] then tokenArray.unshift ''
# As is a rule which ends a word with vowels.
if ! isMedialToken tokenArray[ tokenArray.length - 1 ] then tokenArray.push ''
# <NAME> -> [ a, m, i ] -> [ '', a, m, i, '' ]
for i in [ 0 ... tokenArray.length ] when i % 2 == 0
new Rule( tokenArray[ i - 1 ] or '', tokenArray[ i ], tokenArray[ i + 1 ] or '' )
class Rule
@fromJSON = ( jsonObject ) ->
new Rule jsonObject.initial, jsonObject.medial, jsonObject.final
wordInitial: false
wordFinal: false
initial: ''
medial: ''
final: ''
constructor: ( @initial, @medial, @final ) ->
@wordInitial = true if not @initial
@wordFinal = true if not @final
canFollow: ( beforeRule ) -> beforeRule.final == @initial
canLead: ( afterRule ) -> afterRule.initial == @final
# We can always omit @initial because,
# when @wordInitial is true then @initial is '',
# and when @wordInitial is not ture, then @initial == previous Rule's @final.
toString: -> "#{ @medial }#{ @final }"
toJSON: ->
initial: @initial
medial: @medial
final: @final
exports.Rule = Rule
| true | ###
Digester: Flavored Rule Generator
####
exports.digest = ( corpusText ) -> # :Array<Rule>
digestWords breakIntoWords corpusText
breakIntoWords = ( corpusText ) -> # :Array<String>
# Unfortunately, since orthography is arbitrary, there's no real uniform way to do this,
# although with options this could cover the majority of orthographies.
breakAtWordBoundaries normalizeText corpusText
digestWords = ( wordArray ) -> # :Array<Rule>
ruleArrayArray = (digestSingleWord word for word in wordArray when word)
ruleArray = []
(ruleArray = ruleArray.concat nextRuleArray for nextRuleArray in ruleArrayArray)
ruleArray
normalizeText = ( corpusText ) -> corpusText.toLowerCase()#.replace( /[^,. '"!a-z]/g, '' )
breakAtWordBoundaries = ( text ) -> text.split /[,. '"!]+/
digestSingleWord = ( word ) -> # :Rule
# English isn't quite this simple since y is sometimes a vowel and sometimes a consonant,
# but we'll roll with this for now.
tokenizer = /[яоэиюыуьаёйе]+|[^яоэиюыуьаёйе]+/g # must be /g to avoid infinite loop.
vowelTokenTest = /^[яоэиюыуьаёйе]+$/
nonvowelTokenTest = /^[^яоэиюыуьаёйе]+$/
isMedialToken = ( token ) -> !token or nonvowelTokenTest.test token
match = null
tokenArray = (match[ 0 ] while match = tokenizer.exec word)
# rules always have consonants in the medial position.
# A rule which starts a word with vowels is considered to have a null medial position.
if ! isMedialToken tokenArray[ 0 ] then tokenArray.unshift ''
# As is a rule which ends a word with vowels.
if ! isMedialToken tokenArray[ tokenArray.length - 1 ] then tokenArray.push ''
# PI:NAME:<NAME>END_PI -> [ a, m, i ] -> [ '', a, m, i, '' ]
for i in [ 0 ... tokenArray.length ] when i % 2 == 0
new Rule( tokenArray[ i - 1 ] or '', tokenArray[ i ], tokenArray[ i + 1 ] or '' )
class Rule
@fromJSON = ( jsonObject ) ->
new Rule jsonObject.initial, jsonObject.medial, jsonObject.final
wordInitial: false
wordFinal: false
initial: ''
medial: ''
final: ''
constructor: ( @initial, @medial, @final ) ->
@wordInitial = true if not @initial
@wordFinal = true if not @final
canFollow: ( beforeRule ) -> beforeRule.final == @initial
canLead: ( afterRule ) -> afterRule.initial == @final
# We can always omit @initial because,
# when @wordInitial is true then @initial is '',
# and when @wordInitial is not ture, then @initial == previous Rule's @final.
toString: -> "#{ @medial }#{ @final }"
toJSON: ->
initial: @initial
medial: @medial
final: @final
exports.Rule = Rule
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.