entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "@fileoverview Tests for no-bitwise rule.\n# @author Nicholas C. Zakas\n###\n\n'use strict'\n\n#-----------------------------",
"end": 75,
"score": 0.999800443649292,
"start": 58,
"tag": "NAME",
"value": "Nicholas C. Zakas"
}
] | src/tests/rules/no-bitwise.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-bitwise rule.
# @author Nicholas C. Zakas
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-bitwise'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-bitwise', rule,
valid: [
'a + b'
'!a'
'a += b'
,
code: '~[1, 2, 3].indexOf(1)', options: [allow: ['~']]
,
code: '~1<<2 is -8', options: [allow: ['~', '<<']]
,
code: '~1<<2 == -8', options: [allow: ['~', '<<']]
,
code: 'a|0', options: [int32Hint: yes]
,
code: 'a|0', options: [allow: ['|'], int32Hint: no]
]
invalid: [
code: 'a ^ b'
errors: [
messageId: 'unexpected', data: {operator: '^'}, type: 'BinaryExpression'
]
,
code: 'a | b'
errors: [
messageId: 'unexpected', data: {operator: '|'}, type: 'BinaryExpression'
]
,
code: 'a & b'
errors: [
messageId: 'unexpected', data: {operator: '&'}, type: 'BinaryExpression'
]
,
code: 'a << b'
errors: [
messageId: 'unexpected', data: {operator: '<<'}, type: 'BinaryExpression'
]
,
code: 'a >> b'
errors: [
messageId: 'unexpected', data: {operator: '>>'}, type: 'BinaryExpression'
]
,
code: 'a >>> b'
errors: [
messageId: 'unexpected', data: {operator: '>>>'}, type: 'BinaryExpression'
]
,
code: '~a'
errors: [
messageId: 'unexpected', data: {operator: '~'}, type: 'UnaryExpression'
]
,
code: 'a ^= b'
errors: [
messageId: 'unexpected'
data: operator: '^='
type: 'AssignmentExpression'
]
,
code: 'a |= b'
errors: [
messageId: 'unexpected'
data: operator: '|='
type: 'AssignmentExpression'
]
,
code: 'a &= b'
errors: [
messageId: 'unexpected'
data: operator: '&='
type: 'AssignmentExpression'
]
,
code: 'a <<= b'
errors: [
messageId: 'unexpected'
data: operator: '<<='
type: 'AssignmentExpression'
]
,
code: 'a >>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>='
type: 'AssignmentExpression'
]
,
code: 'a >>>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>>='
type: 'AssignmentExpression'
]
]
| 149587 | ###*
# @fileoverview Tests for no-bitwise rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-bitwise'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-bitwise', rule,
valid: [
'a + b'
'!a'
'a += b'
,
code: '~[1, 2, 3].indexOf(1)', options: [allow: ['~']]
,
code: '~1<<2 is -8', options: [allow: ['~', '<<']]
,
code: '~1<<2 == -8', options: [allow: ['~', '<<']]
,
code: 'a|0', options: [int32Hint: yes]
,
code: 'a|0', options: [allow: ['|'], int32Hint: no]
]
invalid: [
code: 'a ^ b'
errors: [
messageId: 'unexpected', data: {operator: '^'}, type: 'BinaryExpression'
]
,
code: 'a | b'
errors: [
messageId: 'unexpected', data: {operator: '|'}, type: 'BinaryExpression'
]
,
code: 'a & b'
errors: [
messageId: 'unexpected', data: {operator: '&'}, type: 'BinaryExpression'
]
,
code: 'a << b'
errors: [
messageId: 'unexpected', data: {operator: '<<'}, type: 'BinaryExpression'
]
,
code: 'a >> b'
errors: [
messageId: 'unexpected', data: {operator: '>>'}, type: 'BinaryExpression'
]
,
code: 'a >>> b'
errors: [
messageId: 'unexpected', data: {operator: '>>>'}, type: 'BinaryExpression'
]
,
code: '~a'
errors: [
messageId: 'unexpected', data: {operator: '~'}, type: 'UnaryExpression'
]
,
code: 'a ^= b'
errors: [
messageId: 'unexpected'
data: operator: '^='
type: 'AssignmentExpression'
]
,
code: 'a |= b'
errors: [
messageId: 'unexpected'
data: operator: '|='
type: 'AssignmentExpression'
]
,
code: 'a &= b'
errors: [
messageId: 'unexpected'
data: operator: '&='
type: 'AssignmentExpression'
]
,
code: 'a <<= b'
errors: [
messageId: 'unexpected'
data: operator: '<<='
type: 'AssignmentExpression'
]
,
code: 'a >>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>='
type: 'AssignmentExpression'
]
,
code: 'a >>>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>>='
type: 'AssignmentExpression'
]
]
| true | ###*
# @fileoverview Tests for no-bitwise rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-bitwise'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-bitwise', rule,
valid: [
'a + b'
'!a'
'a += b'
,
code: '~[1, 2, 3].indexOf(1)', options: [allow: ['~']]
,
code: '~1<<2 is -8', options: [allow: ['~', '<<']]
,
code: '~1<<2 == -8', options: [allow: ['~', '<<']]
,
code: 'a|0', options: [int32Hint: yes]
,
code: 'a|0', options: [allow: ['|'], int32Hint: no]
]
invalid: [
code: 'a ^ b'
errors: [
messageId: 'unexpected', data: {operator: '^'}, type: 'BinaryExpression'
]
,
code: 'a | b'
errors: [
messageId: 'unexpected', data: {operator: '|'}, type: 'BinaryExpression'
]
,
code: 'a & b'
errors: [
messageId: 'unexpected', data: {operator: '&'}, type: 'BinaryExpression'
]
,
code: 'a << b'
errors: [
messageId: 'unexpected', data: {operator: '<<'}, type: 'BinaryExpression'
]
,
code: 'a >> b'
errors: [
messageId: 'unexpected', data: {operator: '>>'}, type: 'BinaryExpression'
]
,
code: 'a >>> b'
errors: [
messageId: 'unexpected', data: {operator: '>>>'}, type: 'BinaryExpression'
]
,
code: '~a'
errors: [
messageId: 'unexpected', data: {operator: '~'}, type: 'UnaryExpression'
]
,
code: 'a ^= b'
errors: [
messageId: 'unexpected'
data: operator: '^='
type: 'AssignmentExpression'
]
,
code: 'a |= b'
errors: [
messageId: 'unexpected'
data: operator: '|='
type: 'AssignmentExpression'
]
,
code: 'a &= b'
errors: [
messageId: 'unexpected'
data: operator: '&='
type: 'AssignmentExpression'
]
,
code: 'a <<= b'
errors: [
messageId: 'unexpected'
data: operator: '<<='
type: 'AssignmentExpression'
]
,
code: 'a >>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>='
type: 'AssignmentExpression'
]
,
code: 'a >>>= b'
errors: [
messageId: 'unexpected'
data: operator: '>>>='
type: 'AssignmentExpression'
]
]
|
[
{
"context": "e + '\\n'\n Email.send\n from: requestor || \"spastai@gmail.com\",\n to: getUserEmail(tripOwner),\n subjec",
"end": 443,
"score": 0.9999254941940308,
"start": 426,
"tag": "EMAIL",
"value": "spastai@gmail.com"
}
] | src/packages/carpool-notifications/server/NotificationService.coffee | ArnoldasSid/vilnius-carpool | 11 | Meteor.startup ()->
Push.Configure Meteor.settings.push
class @NotificationService
notifyRequestRide: (trip)->
user = Meteor.user();
requestor = getUserEmail(user);
tripOwner = Meteor.users.findOne(trip.owner);
emailText = 'User ' + requestor + ' wants to join the trip\n' + trip.fromStreet + ' ' + trip.fromHouse + '-' + trip.toStreet + ' ' + trip.toHouse + '\n'
Email.send
from: requestor || "spastai@gmail.com",
to: getUserEmail(tripOwner),
subject: "Asking to join the trip",
text: emailText
notify: (reason, text, userId, context)->
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
historyId: result
context: context
reason: reason
query: userId: userId
notifyAboutTrip: (reason, userId, trip, context)->
text = "Trip #{reason}: #{trip.fromAddress}-#{trip.toAddress}"
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
trip: trip._id
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
title: "Trip #{reason}"
trip: trip._id
historyId: result
context: context
reason: reason
query: userId: userId
removeTripNotifications: (tripId)->
NotificationHistory.remove({trip: tripId})
| 89485 | Meteor.startup ()->
Push.Configure Meteor.settings.push
class @NotificationService
notifyRequestRide: (trip)->
user = Meteor.user();
requestor = getUserEmail(user);
tripOwner = Meteor.users.findOne(trip.owner);
emailText = 'User ' + requestor + ' wants to join the trip\n' + trip.fromStreet + ' ' + trip.fromHouse + '-' + trip.toStreet + ' ' + trip.toHouse + '\n'
Email.send
from: requestor || "<EMAIL>",
to: getUserEmail(tripOwner),
subject: "Asking to join the trip",
text: emailText
notify: (reason, text, userId, context)->
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
historyId: result
context: context
reason: reason
query: userId: userId
notifyAboutTrip: (reason, userId, trip, context)->
text = "Trip #{reason}: #{trip.fromAddress}-#{trip.toAddress}"
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
trip: trip._id
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
title: "Trip #{reason}"
trip: trip._id
historyId: result
context: context
reason: reason
query: userId: userId
removeTripNotifications: (tripId)->
NotificationHistory.remove({trip: tripId})
| true | Meteor.startup ()->
Push.Configure Meteor.settings.push
class @NotificationService
notifyRequestRide: (trip)->
user = Meteor.user();
requestor = getUserEmail(user);
tripOwner = Meteor.users.findOne(trip.owner);
emailText = 'User ' + requestor + ' wants to join the trip\n' + trip.fromStreet + ' ' + trip.fromHouse + '-' + trip.toStreet + ' ' + trip.toHouse + '\n'
Email.send
from: requestor || "PI:EMAIL:<EMAIL>END_PI",
to: getUserEmail(tripOwner),
subject: "Asking to join the trip",
text: emailText
notify: (reason, text, userId, context)->
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
historyId: result
context: context
reason: reason
query: userId: userId
notifyAboutTrip: (reason, userId, trip, context)->
text = "Trip #{reason}: #{trip.fromAddress}-#{trip.toAddress}"
da ["notifications"], "Notify #{userId}: #{text}"
last = NotificationHistory.findOne({}, sort: addedAt: -1)
badge = 1
if last != null
badge = last?.badge + 1
NotificationHistory.insert {
badge: badge
addedAt: new Date
trip: trip._id
context: context
userId: userId
reason: reason
}, (error, result) ->
Push.send
from: 'push'
title: 'Carpool'
text: text
badge: badge
payload:
title: "Trip #{reason}"
trip: trip._id
historyId: result
context: context
reason: reason
query: userId: userId
removeTripNotifications: (tripId)->
NotificationHistory.remove({trip: tripId})
|
[
{
"context": "orts = {\n port : 3000\n mongodb : \"mongodb://128.199.100.77:27017/crawl\"\n google:\n api_key2 : \"AIza",
"end": 74,
"score": 0.9989476799964905,
"start": 60,
"tag": "IP_ADDRESS",
"value": "128.199.100.77"
},
{
"context": "0.77:27017/crawl\"\n googl... | config.coffee | weicong96/smcrawl | 0 | module.exports = {
port : 3000
mongodb : "mongodb://128.199.100.77:27017/crawl"
google:
api_key2 : "AIzaSyDC3_nggRbuvTB6vxYvkS-jKp9MoHZwUT8"
api_key : "AIzaSyC3WqpktiXkPHRcTgWTL0TvEKdOZgqE8HU"
distance : 750
query_interval : 1000 * 60 * 60
query_limit : 1000
instagram :
client_id : "cda2be39cc134a989167b91c21c8fe08"
client_secret : "b216c8d9b5334a369197665f39290ffb"
distance : 5000
query_interval : 1000 * 60 * 60
query_limit : 550
} | 99044 | module.exports = {
port : 3000
mongodb : "mongodb://172.16.58.3:27017/crawl"
google:
api_key2 : "<KEY>"
api_key : "<KEY>"
distance : 750
query_interval : 1000 * 60 * 60
query_limit : 1000
instagram :
client_id : "cda2be39cc134a989167b91c21c8fe08"
client_secret : "<KEY>"
distance : 5000
query_interval : 1000 * 60 * 60
query_limit : 550
} | true | module.exports = {
port : 3000
mongodb : "mongodb://PI:IP_ADDRESS:172.16.58.3END_PI:27017/crawl"
google:
api_key2 : "PI:KEY:<KEY>END_PI"
api_key : "PI:KEY:<KEY>END_PI"
distance : 750
query_interval : 1000 * 60 * 60
query_limit : 1000
instagram :
client_id : "cda2be39cc134a989167b91c21c8fe08"
client_secret : "PI:KEY:<KEY>END_PI"
distance : 5000
query_interval : 1000 * 60 * 60
query_limit : 550
} |
[
{
"context": " username = req.body.username\n password = req.body.password\n\n if not username? or username is \"\" or no",
"end": 1020,
"score": 0.9972081184387207,
"start": 1003,
"tag": "PASSWORD",
"value": "req.body.password"
}
] | server/routes.coffee | dmahlow/system | 0 | # SERVER ROUTES
# --------------------------------------------------------------------------
# Define server routes.
module.exports = (app) ->
# Require Expresser.
expresser = require "expresser"
settings = expresser.settings
# Required modules.
database = require "./database.coffee"
fs = require "fs"
manager = require "./manager.coffee"
security = require "./security.coffee"
sync = require "./sync.coffee"
# Define the package.json.
packageJson = require "./../package.json"
# When was the package.json last modified?
lastModified = null
# MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# The login page and form.
getLogin = (req, res) ->
options = getResponseOptions req
# Render the index page.
res.render "login", options
# The login validation via post.
postLogin = (req, res) ->
username = req.body.username
password = req.body.password
if not username? or username is "" or not password? or password is ""
res.redirect "/login"
else
security.validateUser username, password, (err, result) ->
if err?
res.send "Error: #{JSON.stringify(err)}"
else
res.send "Login validated! #{JSON.stringify(result)}"
# The main index page.
getIndex = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Render the index page.
res.render "index", options
# The main index page. Only users with the "admin" role will be able to
# access this page.
getAdmin = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Make sure user has admin role.
if options.roles.admin isnt true
res.redirect "/401"
return
# Render the admin page.
res.render "admin", options
# Run the system upgrader.
runUpgrade = (req, res) ->
files = fs.readdirSync "./upgrade/"
for f in files
if f.indexOf(".coffee") > 0
require "../upgrade/" + f
res.send "UPGRADED!!!"
# ENTITY ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Entity Definitions](entityDefinition.html).
getEntityDefinition = (req, res) ->
database.getEntityDefinition getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Entity GET", err
# Add or update an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
postEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity POST"
return
database.setEntityDefinition getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity POST", err
# Patch only the specified properties of an [Entity Definition](entityDefinition.html).
patchEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity PATCH"
return
database.setEntityDefinition getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity PATCH", err
# Delete an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
deleteEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity DELETE"
return
database.deleteEntityDefinition getIdFromRequest(req), (err, result) ->
if not err?
manager.initEntityTimers()
res.send ""
else
sendErrorResponse res, "Entity DELETE", err
# Get the data for the specified [Entity Definition](entityDefinition.html).
# This effectively returns the [Entity Objects Collection](entityObject.html)
# related to the definition.
getEntityObject = (req, res) ->
friendlyId = getIdFromRequest(req)
database.getEntityDefinition {friendlyId: friendlyId}, (err, result) ->
if result? and not err?
filename = "entity.#{friendlyId}.json"
# Results is an array! If it has no models, then the
# specified `friendlyId` wasn't found in the database.
if result.length < 1
sendErrorResponse res, "EntityObject GET - could not find entity definition ID " + friendlyId
return
result = result[0]
# Got the entity definition, now download from its SourceUrl.
sync.download result.sourceUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "EntityObject GET - download failed: " + localFile, errorMessage
else
fs.readFile localFile, (fileError, fileData) ->
if fileError?
sendErrorResponse res, "EntityObject GET - downloaded, but read failed: " + localFile, fileError
else
data = fileData.toString()
result.data = database.cleanObjectForInsertion data
database.setEntityDefinition result, {patch: true}
res.send data
# If we can't find the matching entity definition, return an error.
else
sendErrorResponse res, "Entity Data GET", err
# AUDIT DATA ROUTES
# ----------------------------------------------------------------------
# Get all [AuditData](auditData.html).
getAuditData = (req, res) ->
database.getAuditData getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Data GET", err
# Add or update an [AuditData](auditData.html).
postAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data POST"
return
database.setAuditData getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data POST", err
# Patch only the specified properties of an [AuditData](auditData.html).
patchAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data PATCH"
return
database.setAuditData getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data PATCH", err
# Delete an [AuditData](auditData.html).
deleteAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data DELETE"
return
database.deleteAuditData getIdFromRequest(req), (err, result) ->
if not err?
manager.initAuditDataTimers()
res.send ""
else
sendErrorResponse res, "Audit Data DELETE", err
# AUDIT EVENT ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Audit Events](auditEvent.html).
getAuditEvent = (req, res) ->
database.getAuditEvent getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event GET", err
# Add or update an [AuditEvent](auditEvent.html).
postAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event POST"
return
database.setAuditEvent getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event POST", err
# Patch only the specified properties of an [AuditEvent](auditEvent.html).
patchAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event PATCH"
return
database.setAuditEvent getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event PATCH", err
# Delete an [AuditEvent](auditEvent.html).
deleteAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event DELETE"
return
database.deleteAuditEvent getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Audit Event DELETE", err
# VARIABLE ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Variables](variable.html).
getVariable = (req, res) ->
database.getVariable getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable GET", err
# Add or update an [Variable](variable.html).
postVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable POST"
return
database.setVariable getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable POST", err
# Patch only the specified properties of a [Variable](variable.html).
patchVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable PATCH"
return
database.setVariable getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable PATCH", err
# Delete a [Variable](variable.html).
deleteVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable DELETE"
return
database.deleteVariable getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Variable DELETE", err
# MAP ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Maps](map.html).
getMap = (req, res) ->
database.getMap getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map GET", err
# Add or update a [Map](map.html).
postMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapcreate and not roles.mapedit
sendForbiddenResponse res, "Map POST"
return
# Get map from request body.
map = getDocumentFromBody req
# Check if map is read only.
if map.isReadOnly
sendForbiddenResponse res, "Map POST (read-only)"
return
# If map is new, set the `createdByUserId` to the current logged user's ID.
if not map.id? or map.id is ""
map.createdByUserId = req.user.id
# Make sure creation date is set.
if not map.dateCreated? or map.dateCreated is ""
map.dateCreated = new Date()
database.setMap map, null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map POST", err
# Patch only the specified properties of a [Map](map.html).
patchMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map PATCH"
return
database.setMap getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map PATCH", err
# Delete a [Map](map.html).
deleteMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map DELETE"
return
database.deleteMap getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Map DELETE", err
# MAP THUMBS
# ----------------------------------------------------------------------
# Generates a thumbnail of the specified [Map](map.html), by passing
# its ID and SVG representation.
postMapThumb = (req, res) ->
svg = req.body.svg
svgPath = settings.path.imagesDir + "mapthumbs/" + req.params["id"] + ".svg"
fs.writeFile svgPath, svg, (err) ->
if err?
sendErrorResponse res, "Map Thumbnail POST", err
else
expresser.imaging.toPng svgPath, {size: settings.images.mapThumbSize}, (err2, result) ->
if err2?
sendErrorResponse res, "Map Thumbnail POST", err2
else
res.send result
# USER ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Users](user.html).
getUser = (req, res) ->
if not req.user?
sendForbiddenResponse res, "User GET"
return
roles = getUserRoles req
# Check if should get the logged user's details.
id = getIdFromRequest(req)
id = req.user.id if id is "logged"
# Return guest user if logged as "guest" and guest access is enabled on settings.
if settings.security.guestEnabled and req.user.id is "guest"
res.send minifyJson security.guestUser
else
database.getUser id, (err, result) ->
if result? and not err?
# Check user permissions.
if not roles.admin and result.id isnt req.user.id
sendForbiddenResponse res, "User GET"
return
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User GET", err
# Add or update a [Users](user.html).
postUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User POST"
return
# Make sure password hash is set and remove clear text password.
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, null, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User POST", err
# Patch only the specified properties of a [Users](user.html).
patchUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User PATCH"
return
# Make sure user password hash is set.
user = getDocumentFromBody req
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, {patch: true}, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User PATCH", err
# Delete a [Users](user.html).
deleteUser = (req, res) ->
roles = getUserRoles req
# Check user permissions.
if not roles.admin
sendForbiddenResponse res, "User DELETE"
return
database.deleteUser getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "User DELETE", err
# PROXY DOWNLOAD
# ----------------------------------------------------------------------
# Download an external file and serve it to the client, thus acting like a "proxy".
# The local filename is provided after the /downloader/ on the url, and the
# download URL is provided with the post parameter "url".
downloader = (req, res) ->
remoteUrl = req.body.url
filename = req.params["filename"]
sync.download remoteUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "Download failed: " + localFile, errorMessage
else
fs.readFile localFile, (err, data) ->
if err?
sendErrorResponse res, "Downloaded, read failed: " + localFile, err
else
res.send data.toString()
# STATUS ROUTES
# ----------------------------------------------------------------------
# Get the system status page.
getDocs = (req, res) ->
res.redirect "/docs/README.html"
# Get the system status page.
getStatus = (req, res) ->
res.json { status: "ok" }
# Error 401 (not authorized) page.
get401 = (req, res) ->
res.status 401
res.render "status401", title: settings.general.appTitle,
# Error 404 (not found) page.
get404 = (req, res) ->
res.status 404
res.render "status404", title: settings.general.appTitle,
# HELPER METHODS
# ----------------------------------------------------------------------
# Minify the passed JSON value. Please note that the result will be minified
# ONLY if the `Web.minifyJsonResponse` setting is set to true.
minifyJson = (source) ->
if settings.web.minifyJsonResponse
return expresser.utils.minifyJson source
else
return source
# Return the ID from the request. Give preference to the ID parameter
# on the body first, and then to the parameter passed on the URL path.
getIdFromRequest = (req) ->
if req.body?.id?
return req.body.id
else
return req.params.id
# Return the document from the request body.
# Make sure the document ID is set by checking its body and
# if necessary appending from the request parameters.
getDocumentFromBody = (req) ->
obj = req.body
obj.id = req.params.id if not obj.id?
return obj
# Get default app and server variables to be sent with responses.
getResponseOptions = (req) ->
os = require "os"
moment = require "moment"
host = req.headers["host"]
# Check the last modified date.
lastModified = fs.statSync("./package.json").mtime if not lastModified?
# Set render options.
options =
title: settings.general.appTitle,
version: packageJson.version,
lastModified: moment(lastModified).format("YYYY-MM-DD hh:mm"),
serverUptime: moment.duration(os.uptime(), "s").humanize(),
serverHostname: os.hostname(),
serverPort: settings.web.port,
serverOS: os.type() + " " + os.release(),
serverCpuLoad: os.loadavg()[0].toFixed(2),
serverRamLoad: (os.freemem() / os.totalmem() * 100).toFixed(2),
roles: getUserRoles req
return options
# Return an object with the user roles, based on the authenticated user's roles array.
# Please note that the "admin" role will be returned always for the online demo.
getUserRoles = (req) =>
roles = {}
return roles if not req.user?
# Set roles object using role_name: true.
for r in req.user.roles
roles[r] = true
return roles
# When the server can't return a valid result,
# send an error response with status code 500.
sendErrorResponse = (res, method, message) ->
expresser.logger.error "HTTP 500", method, message
res.statusCode = 500
res.send "Error: #{method} - #{message}"
# When user is not authorized to request a resource, send an 403 error
# with an "access denied" message.
sendForbiddenResponse = (res, method) ->
expresser.logger.error "HTTP 403", method
res.statusCode = 403
res.send "Access denied for #{method}."
# SET MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# Set authentication options.
passportOptions = {session: true}
passportStrategy = if expresser.settings.passport.ldap.enabled then "ldapauth" else "basic"
# The login page.
app.get "/login", getLogin
# The login page post validation.
app.post "/login", postLogin
# Main index.
if expresser.settings.passport.enabled
app.get "/", security.passport.authenticate(passportStrategy, passportOptions), getIndex
else
app.get "/", getIndex
# Admin area.
if expresser.settings.passport.enabled
app.get "/admin", security.passport.authenticate(passportStrategy, passportOptions), getAdmin
else
app.get "/admin", getAdmin
# Upgrader page.
app.get "/upgrade", runUpgrade
# SET DATA AND SPECIAL ROUTES
# ----------------------------------------------------------------------
# Entity definition routes.
app.get "/json/entitydefinition", getEntityDefinition
app.get "/json/entitydefinition/:id", getEntityDefinition
app.post "/json/entitydefinition", postEntityDefinition
app.put "/json/entitydefinition/:id", postEntityDefinition
app.patch "/json/entitydefinition/:id", patchEntityDefinition
app.delete "/json/entitydefinition/:id", deleteEntityDefinition
# Entity data (objects) routes.
app.get "/json/entityobject/:id", getEntityObject
# Audit Data routes.
app.get "/json/auditdata", getAuditData
app.get "/json/auditdata/:id", getAuditData
app.post "/json/auditdata", postAuditData
app.put "/json/auditdata/:id", postAuditData
app.patch "/json/auditdata/:id", patchAuditData
app.delete "/json/auditdata/:id", deleteAuditData
# Audit Event routes.
app.get "/json/auditevent", getAuditEvent
app.get "/json/auditevent/:id", getAuditEvent
app.post "/json/auditevent", postAuditEvent
app.put "/json/auditevent/:id", postAuditEvent
app.patch "/json/auditevent/:id", patchAuditEvent
app.delete "/json/auditevent/:id", deleteAuditEvent
# Variable routes.
app.get "/json/variable", getVariable
app.get "/json/variable/:id", getVariable
app.post "/json/variable", postVariable
app.put "/json/variable/:id", postVariable
app.patch "/json/variable/:id", patchVariable
app.delete "/json/variable/:id", deleteVariable
# Map routes.
app.get "/json/map", getMap
app.get "/json/map/:id", getMap
app.post "/json/map", postMap
app.put "/json/map/:id", postMap
app.patch "/json/map/:id", patchMap
app.delete "/json/map/:id", deleteMap
# Map thumbnails.
app.post "/images/mapthumbs/:id", postMapThumb
# User routes.
app.get "/json/user", getUser
app.get "/json/user/:id", getUser
app.post "/json/user", postUser
app.put "/json/user/:id", postUser
app.patch "/json/user/:id", patchUser
app.delete "/json/user/:id", deleteUser
# External downloader.
app.post "/downloader/:filename", downloader
# SET DOCS AND STATUS ROUTES
# ----------------------------------------------------------------------
# Error and status routes.
app.get "/docs", getDocs
app.get "/status", getStatus
app.get "/401", get401
app.get "/404", get404 | 217013 | # SERVER ROUTES
# --------------------------------------------------------------------------
# Define server routes.
module.exports = (app) ->
# Require Expresser.
expresser = require "expresser"
settings = expresser.settings
# Required modules.
database = require "./database.coffee"
fs = require "fs"
manager = require "./manager.coffee"
security = require "./security.coffee"
sync = require "./sync.coffee"
# Define the package.json.
packageJson = require "./../package.json"
# When was the package.json last modified?
lastModified = null
# MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# The login page and form.
getLogin = (req, res) ->
options = getResponseOptions req
# Render the index page.
res.render "login", options
# The login validation via post.
postLogin = (req, res) ->
username = req.body.username
password = <PASSWORD>
if not username? or username is "" or not password? or password is ""
res.redirect "/login"
else
security.validateUser username, password, (err, result) ->
if err?
res.send "Error: #{JSON.stringify(err)}"
else
res.send "Login validated! #{JSON.stringify(result)}"
# The main index page.
getIndex = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Render the index page.
res.render "index", options
# The main index page. Only users with the "admin" role will be able to
# access this page.
getAdmin = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Make sure user has admin role.
if options.roles.admin isnt true
res.redirect "/401"
return
# Render the admin page.
res.render "admin", options
# Run the system upgrader.
runUpgrade = (req, res) ->
files = fs.readdirSync "./upgrade/"
for f in files
if f.indexOf(".coffee") > 0
require "../upgrade/" + f
res.send "UPGRADED!!!"
# ENTITY ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Entity Definitions](entityDefinition.html).
getEntityDefinition = (req, res) ->
database.getEntityDefinition getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Entity GET", err
# Add or update an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
postEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity POST"
return
database.setEntityDefinition getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity POST", err
# Patch only the specified properties of an [Entity Definition](entityDefinition.html).
patchEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity PATCH"
return
database.setEntityDefinition getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity PATCH", err
# Delete an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
deleteEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity DELETE"
return
database.deleteEntityDefinition getIdFromRequest(req), (err, result) ->
if not err?
manager.initEntityTimers()
res.send ""
else
sendErrorResponse res, "Entity DELETE", err
# Get the data for the specified [Entity Definition](entityDefinition.html).
# This effectively returns the [Entity Objects Collection](entityObject.html)
# related to the definition.
getEntityObject = (req, res) ->
friendlyId = getIdFromRequest(req)
database.getEntityDefinition {friendlyId: friendlyId}, (err, result) ->
if result? and not err?
filename = "entity.#{friendlyId}.json"
# Results is an array! If it has no models, then the
# specified `friendlyId` wasn't found in the database.
if result.length < 1
sendErrorResponse res, "EntityObject GET - could not find entity definition ID " + friendlyId
return
result = result[0]
# Got the entity definition, now download from its SourceUrl.
sync.download result.sourceUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "EntityObject GET - download failed: " + localFile, errorMessage
else
fs.readFile localFile, (fileError, fileData) ->
if fileError?
sendErrorResponse res, "EntityObject GET - downloaded, but read failed: " + localFile, fileError
else
data = fileData.toString()
result.data = database.cleanObjectForInsertion data
database.setEntityDefinition result, {patch: true}
res.send data
# If we can't find the matching entity definition, return an error.
else
sendErrorResponse res, "Entity Data GET", err
# AUDIT DATA ROUTES
# ----------------------------------------------------------------------
# Get all [AuditData](auditData.html).
getAuditData = (req, res) ->
database.getAuditData getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Data GET", err
# Add or update an [AuditData](auditData.html).
postAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data POST"
return
database.setAuditData getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data POST", err
# Patch only the specified properties of an [AuditData](auditData.html).
patchAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data PATCH"
return
database.setAuditData getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data PATCH", err
# Delete an [AuditData](auditData.html).
deleteAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data DELETE"
return
database.deleteAuditData getIdFromRequest(req), (err, result) ->
if not err?
manager.initAuditDataTimers()
res.send ""
else
sendErrorResponse res, "Audit Data DELETE", err
# AUDIT EVENT ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Audit Events](auditEvent.html).
getAuditEvent = (req, res) ->
database.getAuditEvent getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event GET", err
# Add or update an [AuditEvent](auditEvent.html).
postAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event POST"
return
database.setAuditEvent getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event POST", err
# Patch only the specified properties of an [AuditEvent](auditEvent.html).
patchAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event PATCH"
return
database.setAuditEvent getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event PATCH", err
# Delete an [AuditEvent](auditEvent.html).
deleteAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event DELETE"
return
database.deleteAuditEvent getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Audit Event DELETE", err
# VARIABLE ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Variables](variable.html).
getVariable = (req, res) ->
database.getVariable getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable GET", err
# Add or update an [Variable](variable.html).
postVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable POST"
return
database.setVariable getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable POST", err
# Patch only the specified properties of a [Variable](variable.html).
patchVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable PATCH"
return
database.setVariable getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable PATCH", err
# Delete a [Variable](variable.html).
deleteVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable DELETE"
return
database.deleteVariable getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Variable DELETE", err
# MAP ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Maps](map.html).
getMap = (req, res) ->
database.getMap getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map GET", err
# Add or update a [Map](map.html).
postMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapcreate and not roles.mapedit
sendForbiddenResponse res, "Map POST"
return
# Get map from request body.
map = getDocumentFromBody req
# Check if map is read only.
if map.isReadOnly
sendForbiddenResponse res, "Map POST (read-only)"
return
# If map is new, set the `createdByUserId` to the current logged user's ID.
if not map.id? or map.id is ""
map.createdByUserId = req.user.id
# Make sure creation date is set.
if not map.dateCreated? or map.dateCreated is ""
map.dateCreated = new Date()
database.setMap map, null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map POST", err
# Patch only the specified properties of a [Map](map.html).
patchMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map PATCH"
return
database.setMap getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map PATCH", err
# Delete a [Map](map.html).
deleteMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map DELETE"
return
database.deleteMap getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Map DELETE", err
# MAP THUMBS
# ----------------------------------------------------------------------
# Generates a thumbnail of the specified [Map](map.html), by passing
# its ID and SVG representation.
postMapThumb = (req, res) ->
svg = req.body.svg
svgPath = settings.path.imagesDir + "mapthumbs/" + req.params["id"] + ".svg"
fs.writeFile svgPath, svg, (err) ->
if err?
sendErrorResponse res, "Map Thumbnail POST", err
else
expresser.imaging.toPng svgPath, {size: settings.images.mapThumbSize}, (err2, result) ->
if err2?
sendErrorResponse res, "Map Thumbnail POST", err2
else
res.send result
# USER ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Users](user.html).
getUser = (req, res) ->
if not req.user?
sendForbiddenResponse res, "User GET"
return
roles = getUserRoles req
# Check if should get the logged user's details.
id = getIdFromRequest(req)
id = req.user.id if id is "logged"
# Return guest user if logged as "guest" and guest access is enabled on settings.
if settings.security.guestEnabled and req.user.id is "guest"
res.send minifyJson security.guestUser
else
database.getUser id, (err, result) ->
if result? and not err?
# Check user permissions.
if not roles.admin and result.id isnt req.user.id
sendForbiddenResponse res, "User GET"
return
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User GET", err
# Add or update a [Users](user.html).
postUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User POST"
return
# Make sure password hash is set and remove clear text password.
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, null, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User POST", err
# Patch only the specified properties of a [Users](user.html).
patchUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User PATCH"
return
# Make sure user password hash is set.
user = getDocumentFromBody req
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, {patch: true}, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User PATCH", err
# Delete a [Users](user.html).
deleteUser = (req, res) ->
roles = getUserRoles req
# Check user permissions.
if not roles.admin
sendForbiddenResponse res, "User DELETE"
return
database.deleteUser getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "User DELETE", err
# PROXY DOWNLOAD
# ----------------------------------------------------------------------
# Download an external file and serve it to the client, thus acting like a "proxy".
# The local filename is provided after the /downloader/ on the url, and the
# download URL is provided with the post parameter "url".
downloader = (req, res) ->
remoteUrl = req.body.url
filename = req.params["filename"]
sync.download remoteUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "Download failed: " + localFile, errorMessage
else
fs.readFile localFile, (err, data) ->
if err?
sendErrorResponse res, "Downloaded, read failed: " + localFile, err
else
res.send data.toString()
# STATUS ROUTES
# ----------------------------------------------------------------------
# Get the system status page.
getDocs = (req, res) ->
res.redirect "/docs/README.html"
# Get the system status page.
getStatus = (req, res) ->
res.json { status: "ok" }
# Error 401 (not authorized) page.
get401 = (req, res) ->
res.status 401
res.render "status401", title: settings.general.appTitle,
# Error 404 (not found) page.
get404 = (req, res) ->
res.status 404
res.render "status404", title: settings.general.appTitle,
# HELPER METHODS
# ----------------------------------------------------------------------
# Minify the passed JSON value. Please note that the result will be minified
# ONLY if the `Web.minifyJsonResponse` setting is set to true.
minifyJson = (source) ->
if settings.web.minifyJsonResponse
return expresser.utils.minifyJson source
else
return source
# Return the ID from the request. Give preference to the ID parameter
# on the body first, and then to the parameter passed on the URL path.
getIdFromRequest = (req) ->
if req.body?.id?
return req.body.id
else
return req.params.id
# Return the document from the request body.
# Make sure the document ID is set by checking its body and
# if necessary appending from the request parameters.
getDocumentFromBody = (req) ->
obj = req.body
obj.id = req.params.id if not obj.id?
return obj
# Get default app and server variables to be sent with responses.
getResponseOptions = (req) ->
os = require "os"
moment = require "moment"
host = req.headers["host"]
# Check the last modified date.
lastModified = fs.statSync("./package.json").mtime if not lastModified?
# Set render options.
options =
title: settings.general.appTitle,
version: packageJson.version,
lastModified: moment(lastModified).format("YYYY-MM-DD hh:mm"),
serverUptime: moment.duration(os.uptime(), "s").humanize(),
serverHostname: os.hostname(),
serverPort: settings.web.port,
serverOS: os.type() + " " + os.release(),
serverCpuLoad: os.loadavg()[0].toFixed(2),
serverRamLoad: (os.freemem() / os.totalmem() * 100).toFixed(2),
roles: getUserRoles req
return options
# Return an object with the user roles, based on the authenticated user's roles array.
# Please note that the "admin" role will be returned always for the online demo.
getUserRoles = (req) =>
roles = {}
return roles if not req.user?
# Set roles object using role_name: true.
for r in req.user.roles
roles[r] = true
return roles
# When the server can't return a valid result,
# send an error response with status code 500.
sendErrorResponse = (res, method, message) ->
expresser.logger.error "HTTP 500", method, message
res.statusCode = 500
res.send "Error: #{method} - #{message}"
# When user is not authorized to request a resource, send an 403 error
# with an "access denied" message.
sendForbiddenResponse = (res, method) ->
expresser.logger.error "HTTP 403", method
res.statusCode = 403
res.send "Access denied for #{method}."
# SET MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# Set authentication options.
passportOptions = {session: true}
passportStrategy = if expresser.settings.passport.ldap.enabled then "ldapauth" else "basic"
# The login page.
app.get "/login", getLogin
# The login page post validation.
app.post "/login", postLogin
# Main index.
if expresser.settings.passport.enabled
app.get "/", security.passport.authenticate(passportStrategy, passportOptions), getIndex
else
app.get "/", getIndex
# Admin area.
if expresser.settings.passport.enabled
app.get "/admin", security.passport.authenticate(passportStrategy, passportOptions), getAdmin
else
app.get "/admin", getAdmin
# Upgrader page.
app.get "/upgrade", runUpgrade
# SET DATA AND SPECIAL ROUTES
# ----------------------------------------------------------------------
# Entity definition routes.
app.get "/json/entitydefinition", getEntityDefinition
app.get "/json/entitydefinition/:id", getEntityDefinition
app.post "/json/entitydefinition", postEntityDefinition
app.put "/json/entitydefinition/:id", postEntityDefinition
app.patch "/json/entitydefinition/:id", patchEntityDefinition
app.delete "/json/entitydefinition/:id", deleteEntityDefinition
# Entity data (objects) routes.
app.get "/json/entityobject/:id", getEntityObject
# Audit Data routes.
app.get "/json/auditdata", getAuditData
app.get "/json/auditdata/:id", getAuditData
app.post "/json/auditdata", postAuditData
app.put "/json/auditdata/:id", postAuditData
app.patch "/json/auditdata/:id", patchAuditData
app.delete "/json/auditdata/:id", deleteAuditData
# Audit Event routes.
app.get "/json/auditevent", getAuditEvent
app.get "/json/auditevent/:id", getAuditEvent
app.post "/json/auditevent", postAuditEvent
app.put "/json/auditevent/:id", postAuditEvent
app.patch "/json/auditevent/:id", patchAuditEvent
app.delete "/json/auditevent/:id", deleteAuditEvent
# Variable routes.
app.get "/json/variable", getVariable
app.get "/json/variable/:id", getVariable
app.post "/json/variable", postVariable
app.put "/json/variable/:id", postVariable
app.patch "/json/variable/:id", patchVariable
app.delete "/json/variable/:id", deleteVariable
# Map routes.
app.get "/json/map", getMap
app.get "/json/map/:id", getMap
app.post "/json/map", postMap
app.put "/json/map/:id", postMap
app.patch "/json/map/:id", patchMap
app.delete "/json/map/:id", deleteMap
# Map thumbnails.
app.post "/images/mapthumbs/:id", postMapThumb
# User routes.
app.get "/json/user", getUser
app.get "/json/user/:id", getUser
app.post "/json/user", postUser
app.put "/json/user/:id", postUser
app.patch "/json/user/:id", patchUser
app.delete "/json/user/:id", deleteUser
# External downloader.
app.post "/downloader/:filename", downloader
# SET DOCS AND STATUS ROUTES
# ----------------------------------------------------------------------
# Error and status routes.
app.get "/docs", getDocs
app.get "/status", getStatus
app.get "/401", get401
app.get "/404", get404 | true | # SERVER ROUTES
# --------------------------------------------------------------------------
# Define server routes.
module.exports = (app) ->
# Require Expresser.
expresser = require "expresser"
settings = expresser.settings
# Required modules.
database = require "./database.coffee"
fs = require "fs"
manager = require "./manager.coffee"
security = require "./security.coffee"
sync = require "./sync.coffee"
# Define the package.json.
packageJson = require "./../package.json"
# When was the package.json last modified?
lastModified = null
# MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# The login page and form.
getLogin = (req, res) ->
options = getResponseOptions req
# Render the index page.
res.render "login", options
# The login validation via post.
postLogin = (req, res) ->
username = req.body.username
password = PI:PASSWORD:<PASSWORD>END_PI
if not username? or username is "" or not password? or password is ""
res.redirect "/login"
else
security.validateUser username, password, (err, result) ->
if err?
res.send "Error: #{JSON.stringify(err)}"
else
res.send "Login validated! #{JSON.stringify(result)}"
# The main index page.
getIndex = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Render the index page.
res.render "index", options
# The main index page. Only users with the "admin" role will be able to
# access this page.
getAdmin = (req, res) ->
if not req.user?
res.redirect "/login"
return
options = getResponseOptions req
# Make sure user has admin role.
if options.roles.admin isnt true
res.redirect "/401"
return
# Render the admin page.
res.render "admin", options
# Run the system upgrader.
runUpgrade = (req, res) ->
files = fs.readdirSync "./upgrade/"
for f in files
if f.indexOf(".coffee") > 0
require "../upgrade/" + f
res.send "UPGRADED!!!"
# ENTITY ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Entity Definitions](entityDefinition.html).
getEntityDefinition = (req, res) ->
database.getEntityDefinition getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Entity GET", err
# Add or update an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
postEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity POST"
return
database.setEntityDefinition getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity POST", err
# Patch only the specified properties of an [Entity Definition](entityDefinition.html).
patchEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity PATCH"
return
database.setEntityDefinition getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initEntityTimers()
res.send minifyJson result
else
sendErrorResponse res, "Entity PATCH", err
# Delete an [Entity Definition](entityDefinition.html).
# This will also restart the entity timers on the server [manager](manager.html).
deleteEntityDefinition = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.entities
sendForbiddenResponse res, "Entity DELETE"
return
database.deleteEntityDefinition getIdFromRequest(req), (err, result) ->
if not err?
manager.initEntityTimers()
res.send ""
else
sendErrorResponse res, "Entity DELETE", err
# Get the data for the specified [Entity Definition](entityDefinition.html).
# This effectively returns the [Entity Objects Collection](entityObject.html)
# related to the definition.
getEntityObject = (req, res) ->
friendlyId = getIdFromRequest(req)
database.getEntityDefinition {friendlyId: friendlyId}, (err, result) ->
if result? and not err?
filename = "entity.#{friendlyId}.json"
# Results is an array! If it has no models, then the
# specified `friendlyId` wasn't found in the database.
if result.length < 1
sendErrorResponse res, "EntityObject GET - could not find entity definition ID " + friendlyId
return
result = result[0]
# Got the entity definition, now download from its SourceUrl.
sync.download result.sourceUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "EntityObject GET - download failed: " + localFile, errorMessage
else
fs.readFile localFile, (fileError, fileData) ->
if fileError?
sendErrorResponse res, "EntityObject GET - downloaded, but read failed: " + localFile, fileError
else
data = fileData.toString()
result.data = database.cleanObjectForInsertion data
database.setEntityDefinition result, {patch: true}
res.send data
# If we can't find the matching entity definition, return an error.
else
sendErrorResponse res, "Entity Data GET", err
# AUDIT DATA ROUTES
# ----------------------------------------------------------------------
# Get all [AuditData](auditData.html).
getAuditData = (req, res) ->
database.getAuditData getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Data GET", err
# Add or update an [AuditData](auditData.html).
postAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data POST"
return
database.setAuditData getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data POST", err
# Patch only the specified properties of an [AuditData](auditData.html).
patchAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data PATCH"
return
database.setAuditData getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
manager.initAuditDataTimers()
res.send minifyJson result
else
sendErrorResponse res, "Audit Data PATCH", err
# Delete an [AuditData](auditData.html).
deleteAuditData = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditdata
sendForbiddenResponse res, "Audit Data DELETE"
return
database.deleteAuditData getIdFromRequest(req), (err, result) ->
if not err?
manager.initAuditDataTimers()
res.send ""
else
sendErrorResponse res, "Audit Data DELETE", err
# AUDIT EVENT ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Audit Events](auditEvent.html).
getAuditEvent = (req, res) ->
database.getAuditEvent getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event GET", err
# Add or update an [AuditEvent](auditEvent.html).
postAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event POST"
return
database.setAuditEvent getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event POST", err
# Patch only the specified properties of an [AuditEvent](auditEvent.html).
patchAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event PATCH"
return
database.setAuditEvent getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Audit Event PATCH", err
# Delete an [AuditEvent](auditEvent.html).
deleteAuditEvent = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.auditevents
sendForbiddenResponse res, "Audit Event DELETE"
return
database.deleteAuditEvent getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Audit Event DELETE", err
# VARIABLE ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Variables](variable.html).
getVariable = (req, res) ->
database.getVariable getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable GET", err
# Add or update an [Variable](variable.html).
postVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable POST"
return
database.setVariable getDocumentFromBody(req), null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable POST", err
# Patch only the specified properties of a [Variable](variable.html).
patchVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable PATCH"
return
database.setVariable getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Variable PATCH", err
# Delete a [Variable](variable.html).
deleteVariable = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.variables
sendForbiddenResponse res, "Variable DELETE"
return
database.deleteVariable getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Variable DELETE", err
# MAP ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Maps](map.html).
getMap = (req, res) ->
database.getMap getIdFromRequest(req), (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map GET", err
# Add or update a [Map](map.html).
postMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapcreate and not roles.mapedit
sendForbiddenResponse res, "Map POST"
return
# Get map from request body.
map = getDocumentFromBody req
# Check if map is read only.
if map.isReadOnly
sendForbiddenResponse res, "Map POST (read-only)"
return
# If map is new, set the `createdByUserId` to the current logged user's ID.
if not map.id? or map.id is ""
map.createdByUserId = req.user.id
# Make sure creation date is set.
if not map.dateCreated? or map.dateCreated is ""
map.dateCreated = new Date()
database.setMap map, null, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map POST", err
# Patch only the specified properties of a [Map](map.html).
patchMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map PATCH"
return
database.setMap getDocumentFromBody(req), {patch: true}, (err, result) ->
if result? and not err?
res.send minifyJson result
else
sendErrorResponse res, "Map PATCH", err
# Delete a [Map](map.html).
deleteMap = (req, res) ->
roles = getUserRoles req
if not roles.admin and not roles.mapedit
sendForbiddenResponse res, "Map DELETE"
return
database.deleteMap getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "Map DELETE", err
# MAP THUMBS
# ----------------------------------------------------------------------
# Generates a thumbnail of the specified [Map](map.html), by passing
# its ID and SVG representation.
postMapThumb = (req, res) ->
svg = req.body.svg
svgPath = settings.path.imagesDir + "mapthumbs/" + req.params["id"] + ".svg"
fs.writeFile svgPath, svg, (err) ->
if err?
sendErrorResponse res, "Map Thumbnail POST", err
else
expresser.imaging.toPng svgPath, {size: settings.images.mapThumbSize}, (err2, result) ->
if err2?
sendErrorResponse res, "Map Thumbnail POST", err2
else
res.send result
# USER ROUTES
# ----------------------------------------------------------------------
# Get a single or a collection of [Users](user.html).
getUser = (req, res) ->
if not req.user?
sendForbiddenResponse res, "User GET"
return
roles = getUserRoles req
# Check if should get the logged user's details.
id = getIdFromRequest(req)
id = req.user.id if id is "logged"
# Return guest user if logged as "guest" and guest access is enabled on settings.
if settings.security.guestEnabled and req.user.id is "guest"
res.send minifyJson security.guestUser
else
database.getUser id, (err, result) ->
if result? and not err?
# Check user permissions.
if not roles.admin and result.id isnt req.user.id
sendForbiddenResponse res, "User GET"
return
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User GET", err
# Add or update a [Users](user.html).
postUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User POST"
return
# Make sure password hash is set and remove clear text password.
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, null, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User POST", err
# Patch only the specified properties of a [Users](user.html).
patchUser = (req, res) ->
roles = getUserRoles req
user = getDocumentFromBody req
# Check user permissions.
if not roles.admin and user.id isnt req.user.id
sendForbiddenResponse res, "User PATCH"
return
# Make sure user password hash is set.
user = getDocumentFromBody req
if user.password?
user["passwordHash"] = security.getPasswordHash user.username, user.password
delete user["password"]
database.setUser user, {patch: true}, (err, result) ->
if result? and not err?
# Make sure password fields are removed.
delete result["passwordHash"]
delete result["password"]
res.send minifyJson result
else
sendErrorResponse res, "User PATCH", err
# Delete a [Users](user.html).
deleteUser = (req, res) ->
roles = getUserRoles req
# Check user permissions.
if not roles.admin
sendForbiddenResponse res, "User DELETE"
return
database.deleteUser getIdFromRequest(req), (err, result) ->
if not err?
res.send ""
else
sendErrorResponse res, "User DELETE", err
# PROXY DOWNLOAD
# ----------------------------------------------------------------------
# Download an external file and serve it to the client, thus acting like a "proxy".
# The local filename is provided after the /downloader/ on the url, and the
# download URL is provided with the post parameter "url".
downloader = (req, res) ->
remoteUrl = req.body.url
filename = req.params["filename"]
sync.download remoteUrl, app.downloadsDir + filename, (errorMessage, localFile) ->
if errorMessage?
sendErrorResponse res, "Download failed: " + localFile, errorMessage
else
fs.readFile localFile, (err, data) ->
if err?
sendErrorResponse res, "Downloaded, read failed: " + localFile, err
else
res.send data.toString()
# STATUS ROUTES
# ----------------------------------------------------------------------
# Get the system status page.
getDocs = (req, res) ->
res.redirect "/docs/README.html"
# Get the system status page.
getStatus = (req, res) ->
res.json { status: "ok" }
# Error 401 (not authorized) page.
get401 = (req, res) ->
res.status 401
res.render "status401", title: settings.general.appTitle,
# Error 404 (not found) page.
get404 = (req, res) ->
res.status 404
res.render "status404", title: settings.general.appTitle,
# HELPER METHODS
# ----------------------------------------------------------------------
# Minify the passed JSON value. Please note that the result will be minified
# ONLY if the `Web.minifyJsonResponse` setting is set to true.
minifyJson = (source) ->
if settings.web.minifyJsonResponse
return expresser.utils.minifyJson source
else
return source
# Return the ID from the request. Give preference to the ID parameter
# on the body first, and then to the parameter passed on the URL path.
getIdFromRequest = (req) ->
if req.body?.id?
return req.body.id
else
return req.params.id
# Return the document from the request body.
# Make sure the document ID is set by checking its body and
# if necessary appending from the request parameters.
getDocumentFromBody = (req) ->
obj = req.body
obj.id = req.params.id if not obj.id?
return obj
# Get default app and server variables to be sent with responses.
getResponseOptions = (req) ->
os = require "os"
moment = require "moment"
host = req.headers["host"]
# Check the last modified date.
lastModified = fs.statSync("./package.json").mtime if not lastModified?
# Set render options.
options =
title: settings.general.appTitle,
version: packageJson.version,
lastModified: moment(lastModified).format("YYYY-MM-DD hh:mm"),
serverUptime: moment.duration(os.uptime(), "s").humanize(),
serverHostname: os.hostname(),
serverPort: settings.web.port,
serverOS: os.type() + " " + os.release(),
serverCpuLoad: os.loadavg()[0].toFixed(2),
serverRamLoad: (os.freemem() / os.totalmem() * 100).toFixed(2),
roles: getUserRoles req
return options
# Return an object with the user roles, based on the authenticated user's roles array.
# Please note that the "admin" role will be returned always for the online demo.
getUserRoles = (req) =>
roles = {}
return roles if not req.user?
# Set roles object using role_name: true.
for r in req.user.roles
roles[r] = true
return roles
# When the server can't return a valid result,
# send an error response with status code 500.
sendErrorResponse = (res, method, message) ->
expresser.logger.error "HTTP 500", method, message
res.statusCode = 500
res.send "Error: #{method} - #{message}"
# When user is not authorized to request a resource, send an 403 error
# with an "access denied" message.
sendForbiddenResponse = (res, method) ->
expresser.logger.error "HTTP 403", method
res.statusCode = 403
res.send "Access denied for #{method}."
# SET MAIN AND ADMIN ROUTES
# ----------------------------------------------------------------------
# Set authentication options.
passportOptions = {session: true}
passportStrategy = if expresser.settings.passport.ldap.enabled then "ldapauth" else "basic"
# The login page.
app.get "/login", getLogin
# The login page post validation.
app.post "/login", postLogin
# Main index.
if expresser.settings.passport.enabled
app.get "/", security.passport.authenticate(passportStrategy, passportOptions), getIndex
else
app.get "/", getIndex
# Admin area.
if expresser.settings.passport.enabled
app.get "/admin", security.passport.authenticate(passportStrategy, passportOptions), getAdmin
else
app.get "/admin", getAdmin
# Upgrader page.
app.get "/upgrade", runUpgrade
# SET DATA AND SPECIAL ROUTES
# ----------------------------------------------------------------------
# Entity definition routes.
app.get "/json/entitydefinition", getEntityDefinition
app.get "/json/entitydefinition/:id", getEntityDefinition
app.post "/json/entitydefinition", postEntityDefinition
app.put "/json/entitydefinition/:id", postEntityDefinition
app.patch "/json/entitydefinition/:id", patchEntityDefinition
app.delete "/json/entitydefinition/:id", deleteEntityDefinition
# Entity data (objects) routes.
app.get "/json/entityobject/:id", getEntityObject
# Audit Data routes.
app.get "/json/auditdata", getAuditData
app.get "/json/auditdata/:id", getAuditData
app.post "/json/auditdata", postAuditData
app.put "/json/auditdata/:id", postAuditData
app.patch "/json/auditdata/:id", patchAuditData
app.delete "/json/auditdata/:id", deleteAuditData
# Audit Event routes.
app.get "/json/auditevent", getAuditEvent
app.get "/json/auditevent/:id", getAuditEvent
app.post "/json/auditevent", postAuditEvent
app.put "/json/auditevent/:id", postAuditEvent
app.patch "/json/auditevent/:id", patchAuditEvent
app.delete "/json/auditevent/:id", deleteAuditEvent
# Variable routes.
app.get "/json/variable", getVariable
app.get "/json/variable/:id", getVariable
app.post "/json/variable", postVariable
app.put "/json/variable/:id", postVariable
app.patch "/json/variable/:id", patchVariable
app.delete "/json/variable/:id", deleteVariable
# Map routes.
app.get "/json/map", getMap
app.get "/json/map/:id", getMap
app.post "/json/map", postMap
app.put "/json/map/:id", postMap
app.patch "/json/map/:id", patchMap
app.delete "/json/map/:id", deleteMap
# Map thumbnails.
app.post "/images/mapthumbs/:id", postMapThumb
# User routes.
app.get "/json/user", getUser
app.get "/json/user/:id", getUser
app.post "/json/user", postUser
app.put "/json/user/:id", postUser
app.patch "/json/user/:id", patchUser
app.delete "/json/user/:id", deleteUser
# External downloader.
app.post "/downloader/:filename", downloader
# SET DOCS AND STATUS ROUTES
# ----------------------------------------------------------------------
# Error and status routes.
app.get "/docs", getDocs
app.get "/status", getStatus
app.get "/401", get401
app.get "/404", get404 |
[
{
"context": "length + ' users'\n Email.send\n from: 'info@codermania.com'\n bcc: userEmails\n subject: \"Study ",
"end": 1680,
"score": 0.9999317526817322,
"start": 1661,
"tag": "EMAIL",
"value": "info@codermania.com"
}
] | server/lib/StudyGroup.coffee | Elfoslav/codermania | 56 | class @StudyGroup
@getUnreadMessagesCount: (data) ->
check data,
userId: String
studyGroupId: String
return StudyGroupMessages.find
studyGroupId: data.studyGroupId
isReadBy: $nin: [ data.userId ]
.count()
@getUserStudyGroups: (userId) ->
check userId, String
return StudyGroups.find
userIds: $in: [ userId ]
#Send e-mail notifications for all users
@sendUnreadMessagesEmailNotifications: (frequency) ->
#weekly frequency is default, user settings does not need to exist
if frequency is 'weekly'
query =
$or: [
{ 'settings.emailNotifications': frequency }
{ 'settings.emailNotifications': $exists: 0 }
]
else
query = { 'settings.emailNotifications.studyGroupNotifications': frequency }
users = Meteor.users.find query,
fields:
_id: 1
'settings.emailNotifications': 1
emails: 1
userEmails = []
Logger.log 'sending study group notifications, found ' + users.count() + ' users'
users.forEach (user) ->
userStudyGroups = StudyGroup.getUserStudyGroups user._id
totalUnreadMessagesCount = 0
userStudyGroups.forEach (group) ->
totalUnreadMessagesCount += StudyGroup.getUnreadMessagesCount
userId: user._id
studyGroupId: group._id
if totalUnreadMessagesCount > 0 and
user.settings?.emailNotifications?.studyGroupNotifications != 'turn-off'
userEmails.push user.emails?[0]?.address
if userEmails.length > 0
Logger.log 'sending study group notifications to ' + userEmails.length + ' users'
Email.send
from: 'info@codermania.com'
bcc: userEmails
subject: "Study group notifications"
html: """
Hi coder,<br/><br/>
you have unread messages in your study groups. Read them on:
<a href=\"http://www.codermania.com/study-groups\">http://www.codermania.com/study-groups</a>
<br/><br/>
#{App.getEmailFooter({ showUnsubscribe: true })}
"""
| 773 | class @StudyGroup
@getUnreadMessagesCount: (data) ->
check data,
userId: String
studyGroupId: String
return StudyGroupMessages.find
studyGroupId: data.studyGroupId
isReadBy: $nin: [ data.userId ]
.count()
@getUserStudyGroups: (userId) ->
check userId, String
return StudyGroups.find
userIds: $in: [ userId ]
#Send e-mail notifications for all users
@sendUnreadMessagesEmailNotifications: (frequency) ->
#weekly frequency is default, user settings does not need to exist
if frequency is 'weekly'
query =
$or: [
{ 'settings.emailNotifications': frequency }
{ 'settings.emailNotifications': $exists: 0 }
]
else
query = { 'settings.emailNotifications.studyGroupNotifications': frequency }
users = Meteor.users.find query,
fields:
_id: 1
'settings.emailNotifications': 1
emails: 1
userEmails = []
Logger.log 'sending study group notifications, found ' + users.count() + ' users'
users.forEach (user) ->
userStudyGroups = StudyGroup.getUserStudyGroups user._id
totalUnreadMessagesCount = 0
userStudyGroups.forEach (group) ->
totalUnreadMessagesCount += StudyGroup.getUnreadMessagesCount
userId: user._id
studyGroupId: group._id
if totalUnreadMessagesCount > 0 and
user.settings?.emailNotifications?.studyGroupNotifications != 'turn-off'
userEmails.push user.emails?[0]?.address
if userEmails.length > 0
Logger.log 'sending study group notifications to ' + userEmails.length + ' users'
Email.send
from: '<EMAIL>'
bcc: userEmails
subject: "Study group notifications"
html: """
Hi coder,<br/><br/>
you have unread messages in your study groups. Read them on:
<a href=\"http://www.codermania.com/study-groups\">http://www.codermania.com/study-groups</a>
<br/><br/>
#{App.getEmailFooter({ showUnsubscribe: true })}
"""
| true | class @StudyGroup
@getUnreadMessagesCount: (data) ->
check data,
userId: String
studyGroupId: String
return StudyGroupMessages.find
studyGroupId: data.studyGroupId
isReadBy: $nin: [ data.userId ]
.count()
@getUserStudyGroups: (userId) ->
check userId, String
return StudyGroups.find
userIds: $in: [ userId ]
#Send e-mail notifications for all users
@sendUnreadMessagesEmailNotifications: (frequency) ->
#weekly frequency is default, user settings does not need to exist
if frequency is 'weekly'
query =
$or: [
{ 'settings.emailNotifications': frequency }
{ 'settings.emailNotifications': $exists: 0 }
]
else
query = { 'settings.emailNotifications.studyGroupNotifications': frequency }
users = Meteor.users.find query,
fields:
_id: 1
'settings.emailNotifications': 1
emails: 1
userEmails = []
Logger.log 'sending study group notifications, found ' + users.count() + ' users'
users.forEach (user) ->
userStudyGroups = StudyGroup.getUserStudyGroups user._id
totalUnreadMessagesCount = 0
userStudyGroups.forEach (group) ->
totalUnreadMessagesCount += StudyGroup.getUnreadMessagesCount
userId: user._id
studyGroupId: group._id
if totalUnreadMessagesCount > 0 and
user.settings?.emailNotifications?.studyGroupNotifications != 'turn-off'
userEmails.push user.emails?[0]?.address
if userEmails.length > 0
Logger.log 'sending study group notifications to ' + userEmails.length + ' users'
Email.send
from: 'PI:EMAIL:<EMAIL>END_PI'
bcc: userEmails
subject: "Study group notifications"
html: """
Hi coder,<br/><br/>
you have unread messages in your study groups. Read them on:
<a href=\"http://www.codermania.com/study-groups\">http://www.codermania.com/study-groups</a>
<br/><br/>
#{App.getEmailFooter({ showUnsubscribe: true })}
"""
|
[
{
"context": "esponder - touch response recieve class\n# Coded by Hajime Oh-yake 2013.03.25\n#*************************************",
"end": 113,
"score": 0.9998931884765625,
"start": 99,
"tag": "NAME",
"value": "Hajime Oh-yake"
}
] | JSKit/01_JSResponder.coffee | digitarhythm/codeJS | 0 | #*****************************************
# JSResponder - touch response recieve class
# Coded by Hajime Oh-yake 2013.03.25
#*****************************************
class JSResponder extends JSObject
constructor: ->
super()
@_event = null
@_touches = false
didBrowserResize:->
for o in @_objlist
o.didBrowserResize()
locationView:->
pt = new JSPoint()
pt.x = @_event.offsetX
pt.y = @_event.offsetY
return pt
| 33994 | #*****************************************
# JSResponder - touch response recieve class
# Coded by <NAME> 2013.03.25
#*****************************************
class JSResponder extends JSObject
constructor: ->
super()
@_event = null
@_touches = false
didBrowserResize:->
for o in @_objlist
o.didBrowserResize()
locationView:->
pt = new JSPoint()
pt.x = @_event.offsetX
pt.y = @_event.offsetY
return pt
| true | #*****************************************
# JSResponder - touch response recieve class
# Coded by PI:NAME:<NAME>END_PI 2013.03.25
#*****************************************
class JSResponder extends JSObject
constructor: ->
super()
@_event = null
@_touches = false
didBrowserResize:->
for o in @_objlist
o.didBrowserResize()
locationView:->
pt = new JSPoint()
pt.x = @_event.offsetX
pt.y = @_event.offsetY
return pt
|
[
{
"context": "###\n * https://github.com/jkuetemeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg K",
"end": 37,
"score": 0.9987826347351074,
"start": 26,
"tag": "USERNAME",
"value": "jkuetemeier"
},
{
"context": "temeier/gulp-tasks-common\n *\n * Copyright (c) 2014 Jörg Kütemei... | test/tools/common.coffee | kuetemeier/gulp-tasks-common | 0 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 Jörg Kütemeier
* Licensed under the MIT license.
###
# mockup of common for testing
common = {
config: {
taskEnabled : {
enabled: true
}
taskDisabled : {
enabled: false
}
taskOwnTask : {
enabled: true
}
taskOwnTaskDisabled : {
enabled: true
}
}
tasks: {
taskEnabled : {
fn: -> "taskEnabled"
}
taskDisabled : {
fn: -> "taskDisabled"
}
taskOwnTask : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
taskOwnTaskDisabled : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
}
}
common.register_tasks = require('../../lib/register_tasks')(common)
module.exports = common
| 177389 | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
# mockup of common for testing
common = {
config: {
taskEnabled : {
enabled: true
}
taskDisabled : {
enabled: false
}
taskOwnTask : {
enabled: true
}
taskOwnTaskDisabled : {
enabled: true
}
}
tasks: {
taskEnabled : {
fn: -> "taskEnabled"
}
taskDisabled : {
fn: -> "taskDisabled"
}
taskOwnTask : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
taskOwnTaskDisabled : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
}
}
common.register_tasks = require('../../lib/register_tasks')(common)
module.exports = common
| true | ###
* https://github.com/jkuetemeier/gulp-tasks-common
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
# mockup of common for testing
common = {
config: {
taskEnabled : {
enabled: true
}
taskDisabled : {
enabled: false
}
taskOwnTask : {
enabled: true
}
taskOwnTaskDisabled : {
enabled: true
}
}
tasks: {
taskEnabled : {
fn: -> "taskEnabled"
}
taskDisabled : {
fn: -> "taskDisabled"
}
taskOwnTask : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
taskOwnTaskDisabled : {
fn: -> "taskOwnTask"
task: (gulp, config) ->
gulp.task "taskOwnTask", -> "taskOwnTask"
}
}
}
common.register_tasks = require('../../lib/register_tasks')(common)
module.exports = common
|
[
{
"context": "dPlugins = []\n\n books = [\n Book.create(name: \"andy\", isSelected: false, year: 2012)\n Book.create(",
"end": 824,
"score": 0.99983811378479,
"start": 820,
"tag": "NAME",
"value": "andy"
},
{
"context": "lected: false, year: 2012)\n Book.create(name: \"tom\... | test/integration/observer_test.coffee | kumavis/ArrayPipeline | 0 | require '../test_helper.coffee'
###
# Test Classes
###
firedPlugins = []
pipeline = {}
Book = Em.Object.extend
isSelected: false
name: null
year: null
Pipe1 = Em.PipePlugin.extend
observes: ['isSelected']
process: (inputArr) ->
firedPlugins.pushObject 'pipe1'
return inputArr
Pipe2 = Em.PipePlugin.extend
observes: ['isSelected', 'name']
process: (inputArr) ->
firedPlugins.pushObject 'pipe2'
return inputArr
Pipe3 = Em.PipePlugin.extend
observes: ['isSelected', 'name', 'year']
process: (inputArr) ->
firedPlugins.pushObject 'pipe3'
return inputArr
Pipe4 = Em.PipePlugin.extend
observes: ['controller.selectedBook']
process: (inputArr) ->
firedPlugins.pushObject 'pipe4'
return inputArr
beforeEach ->
firedPlugins = []
books = [
Book.create(name: "andy", isSelected: false, year: 2012)
Book.create(name: "tom", isSelected: true, year: 2013)
Book.create(name: "huda", isSelected: false, year: 2011)
Book.create(name: "dgeb", isSelected: true, year: 2010)
Book.create(name: "trek", isSelected: true, year: 2010)
Book.create(name: "ebryn", isSelected: false, year: 2011)
Book.create(name: "luke", isSelected: true, year: 2010)
Book.create(name: "paul", isSelected: true, year: 2010)
Book.create(name: "alex", isSelected: false, year: 2010)
Book.create(name: "joey", isSelected: true, year: 2011)
]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe1, Pipe2, Pipe3]
###
# Tests
###
describe 'Observers:', ->
describe 'PipePlugin', ->
it 'registers observers for each property in "observes" if it is the firstResponder', ->
# Our fired list should start at 0
firedPlugins.get('length').should.equal(0)
# After getting results, our fired list should be at 3
pipeline.get('results')
firedPlugins.get('length').should.equal(3)
# When we change the name, pipe2 and pipe3 should run
books = pipeline.get('content')
books.get('firstObject').set('name', 'Mooooo')
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3'])
# When we change the year, only pipe3 should run
books.get('firstObject').set('year', 1999)
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3', 'pipe3'])
it 'registers observers destined for our controller', ->
# setup
books = [Book.create(name: "andy", isSelected: false, year: 2012)]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe4]
selectedBook: null
# test initial state
firedPlugins.get('length').should.equal(0)
# set our book on our controller
book = books.get('firstObject')
pipeline.set('selectedBook', book)
# pipe plugin should have refired
firedPlugins.toArray().should.deep.equal(['pipe4'])
it 'is fired when arrayContent is added', ->
pipeline.get('results')
# Our fired list should start at 3
firedPlugins.get('length').should.equal(3)
newBook = Book.create(name:'Andy', isSelected: false, year: 2014)
pipeline.get('content').pushObject(newBook)
# Our fired list should be at 6
firedPlugins.get('length').should.equal(6)
describe 'ArrayPipeline', ->
it 'updates the results set when you change the backing array content', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'bar')]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: []
pipeline.get('results').should.deep.equal(arrayOne)
pipeline.set('content', arrayTwo)
pipeline.get('results').should.deep.equal(arrayTwo)
it 'unregisters observers from the previous backing array content when changed', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'foo')]
Plugin = Em.PipePlugin.extend
observes: ['name']
process: (inputArr) -> return inputArr
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: [Plugin]
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(1)
pipeline.set('content', arrayTwo)
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(0)
| 79672 | require '../test_helper.coffee'
###
# Test Classes
###
firedPlugins = []
pipeline = {}
Book = Em.Object.extend
isSelected: false
name: null
year: null
Pipe1 = Em.PipePlugin.extend
observes: ['isSelected']
process: (inputArr) ->
firedPlugins.pushObject 'pipe1'
return inputArr
Pipe2 = Em.PipePlugin.extend
observes: ['isSelected', 'name']
process: (inputArr) ->
firedPlugins.pushObject 'pipe2'
return inputArr
Pipe3 = Em.PipePlugin.extend
observes: ['isSelected', 'name', 'year']
process: (inputArr) ->
firedPlugins.pushObject 'pipe3'
return inputArr
Pipe4 = Em.PipePlugin.extend
observes: ['controller.selectedBook']
process: (inputArr) ->
firedPlugins.pushObject 'pipe4'
return inputArr
beforeEach ->
firedPlugins = []
books = [
Book.create(name: "<NAME>", isSelected: false, year: 2012)
Book.create(name: "<NAME>", isSelected: true, year: 2013)
Book.create(name: "<NAME>", isSelected: false, year: 2011)
Book.create(name: "<NAME>", isSelected: true, year: 2010)
Book.create(name: "<NAME>", isSelected: true, year: 2010)
Book.create(name: "<NAME>", isSelected: false, year: 2011)
Book.create(name: "<NAME>", isSelected: true, year: 2010)
Book.create(name: "<NAME>", isSelected: true, year: 2010)
Book.create(name: "<NAME>", isSelected: false, year: 2010)
Book.create(name: "<NAME>", isSelected: true, year: 2011)
]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe1, Pipe2, Pipe3]
###
# Tests
###
describe 'Observers:', ->
describe 'PipePlugin', ->
it 'registers observers for each property in "observes" if it is the firstResponder', ->
# Our fired list should start at 0
firedPlugins.get('length').should.equal(0)
# After getting results, our fired list should be at 3
pipeline.get('results')
firedPlugins.get('length').should.equal(3)
# When we change the name, pipe2 and pipe3 should run
books = pipeline.get('content')
books.get('firstObject').set('name', 'Moooo<NAME>')
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3'])
# When we change the year, only pipe3 should run
books.get('firstObject').set('year', 1999)
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3', 'pipe3'])
it 'registers observers destined for our controller', ->
# setup
books = [Book.create(name: "<NAME>", isSelected: false, year: 2012)]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe4]
selectedBook: null
# test initial state
firedPlugins.get('length').should.equal(0)
# set our book on our controller
book = books.get('firstObject')
pipeline.set('selectedBook', book)
# pipe plugin should have refired
firedPlugins.toArray().should.deep.equal(['pipe4'])
it 'is fired when arrayContent is added', ->
pipeline.get('results')
# Our fired list should start at 3
firedPlugins.get('length').should.equal(3)
newBook = Book.create(name:'<NAME>', isSelected: false, year: 2014)
pipeline.get('content').pushObject(newBook)
# Our fired list should be at 6
firedPlugins.get('length').should.equal(6)
describe 'ArrayPipeline', ->
it 'updates the results set when you change the backing array content', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'bar')]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: []
pipeline.get('results').should.deep.equal(arrayOne)
pipeline.set('content', arrayTwo)
pipeline.get('results').should.deep.equal(arrayTwo)
it 'unregisters observers from the previous backing array content when changed', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'foo')]
Plugin = Em.PipePlugin.extend
observes: ['name']
process: (inputArr) -> return inputArr
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: [Plugin]
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(1)
pipeline.set('content', arrayTwo)
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(0)
| true | require '../test_helper.coffee'
###
# Test Classes
###
firedPlugins = []
pipeline = {}
Book = Em.Object.extend
isSelected: false
name: null
year: null
Pipe1 = Em.PipePlugin.extend
observes: ['isSelected']
process: (inputArr) ->
firedPlugins.pushObject 'pipe1'
return inputArr
Pipe2 = Em.PipePlugin.extend
observes: ['isSelected', 'name']
process: (inputArr) ->
firedPlugins.pushObject 'pipe2'
return inputArr
Pipe3 = Em.PipePlugin.extend
observes: ['isSelected', 'name', 'year']
process: (inputArr) ->
firedPlugins.pushObject 'pipe3'
return inputArr
Pipe4 = Em.PipePlugin.extend
observes: ['controller.selectedBook']
process: (inputArr) ->
firedPlugins.pushObject 'pipe4'
return inputArr
beforeEach ->
firedPlugins = []
books = [
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: false, year: 2012)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2013)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: false, year: 2011)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2010)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2010)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: false, year: 2011)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2010)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2010)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: false, year: 2010)
Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: true, year: 2011)
]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe1, Pipe2, Pipe3]
###
# Tests
###
describe 'Observers:', ->
describe 'PipePlugin', ->
it 'registers observers for each property in "observes" if it is the firstResponder', ->
# Our fired list should start at 0
firedPlugins.get('length').should.equal(0)
# After getting results, our fired list should be at 3
pipeline.get('results')
firedPlugins.get('length').should.equal(3)
# When we change the name, pipe2 and pipe3 should run
books = pipeline.get('content')
books.get('firstObject').set('name', 'MooooPI:NAME:<NAME>END_PI')
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3'])
# When we change the year, only pipe3 should run
books.get('firstObject').set('year', 1999)
firedPlugins.toArray().should.deep.equal(['pipe1', 'pipe2', 'pipe3', 'pipe2', 'pipe3', 'pipe3'])
it 'registers observers destined for our controller', ->
# setup
books = [Book.create(name: "PI:NAME:<NAME>END_PI", isSelected: false, year: 2012)]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: books
plugins: [Pipe4]
selectedBook: null
# test initial state
firedPlugins.get('length').should.equal(0)
# set our book on our controller
book = books.get('firstObject')
pipeline.set('selectedBook', book)
# pipe plugin should have refired
firedPlugins.toArray().should.deep.equal(['pipe4'])
it 'is fired when arrayContent is added', ->
pipeline.get('results')
# Our fired list should start at 3
firedPlugins.get('length').should.equal(3)
newBook = Book.create(name:'PI:NAME:<NAME>END_PI', isSelected: false, year: 2014)
pipeline.get('content').pushObject(newBook)
# Our fired list should be at 6
firedPlugins.get('length').should.equal(6)
describe 'ArrayPipeline', ->
it 'updates the results set when you change the backing array content', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'bar')]
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: []
pipeline.get('results').should.deep.equal(arrayOne)
pipeline.set('content', arrayTwo)
pipeline.get('results').should.deep.equal(arrayTwo)
it 'unregisters observers from the previous backing array content when changed', ->
arrayOne = [Book.create(name:'foo')]
arrayTwo = [Book.create(name:'foo')]
Plugin = Em.PipePlugin.extend
observes: ['name']
process: (inputArr) -> return inputArr
pipeline = Em.ArrayProxy.createWithMixins Em.ArrayPipelineMixin,
content: arrayOne
plugins: [Plugin]
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(1)
pipeline.set('content', arrayTwo)
Ember.observersFor(arrayOne.get('firstObject'), 'name').length.should.equal(0)
|
[
{
"context": "into the database\nuser.save \"user1\", \"password\", \"user1@gmail.com\", (err) -> \n\tthrow err if err\n\tconsole.log \"user ",
"end": 781,
"score": 0.9997501373291016,
"start": 766,
"tag": "EMAIL",
"value": "user1@gmail.com"
},
{
"context": "into the database\nuser.s... | src/populateDB.coffee | steven9neuf/ast_project | 0 | # require all the modules needed
express = require 'express'
bodyparser = require 'body-parser'
morgan = require 'morgan'
session = require 'express-session'
SessionStore = require('level-session-store')(session)
level = require 'level'
levelws = require 'level-ws'
db = levelws level "#{__dirname}/../db"
metrics = require('./metrics')(db)
user = require('./user')(db)
#####################################################################
# Populate the database
#####################################################################
#####################################################################
# USERS
#####################################################################
# Create and save the user 1 into the database
user.save "user1", "password", "user1@gmail.com", (err) ->
throw err if err
console.log "user 1 added"
# Create and save the user 2 into the database
user.save "user2", "password", "user2@gmail.com", (err) ->
throw err if err
console.log "user 2 added"
#####################################################################
# METRICS
#####################################################################
# Save metrics for user 1 into the database
metrics.save "user1", [
{timestamp:(new Date '2013-11-04 14:00 UTC').getTime(), value:12}
,
{timestamp:(new Date '2015-12-18 14:00 UTC').getTime(), value:13}
], (err) ->
throw err if err
console.log "metrics population for user 1 terminated"
# Save metrics for user 2 into the database
metrics.save "user2", [
{timestamp:(new Date '2011-10-14 15:00 UTC').getTime(), value:8}
,
{timestamp:(new Date '2017-05-28 11:00 UTC').getTime(), value:5}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:9}
,
{timestamp:(new Date '2016-05-28 11:00 UTC').getTime(), value:1}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:15}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:7}
,
{timestamp:(new Date '2011-05-28 11:00 UTC').getTime(), value:13}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:4}
,
{timestamp:(new Date '2012-05-28 11:00 UTC').getTime(), value:10}
], (err) ->
throw err if err
console.log "metrics population for user 2 terminated"
| 97855 | # require all the modules needed
express = require 'express'
bodyparser = require 'body-parser'
morgan = require 'morgan'
session = require 'express-session'
SessionStore = require('level-session-store')(session)
level = require 'level'
levelws = require 'level-ws'
db = levelws level "#{__dirname}/../db"
metrics = require('./metrics')(db)
user = require('./user')(db)
#####################################################################
# Populate the database
#####################################################################
#####################################################################
# USERS
#####################################################################
# Create and save the user 1 into the database
user.save "user1", "password", "<EMAIL>", (err) ->
throw err if err
console.log "user 1 added"
# Create and save the user 2 into the database
user.save "user2", "password", "<EMAIL>", (err) ->
throw err if err
console.log "user 2 added"
#####################################################################
# METRICS
#####################################################################
# Save metrics for user 1 into the database
metrics.save "user1", [
{timestamp:(new Date '2013-11-04 14:00 UTC').getTime(), value:12}
,
{timestamp:(new Date '2015-12-18 14:00 UTC').getTime(), value:13}
], (err) ->
throw err if err
console.log "metrics population for user 1 terminated"
# Save metrics for user 2 into the database
metrics.save "user2", [
{timestamp:(new Date '2011-10-14 15:00 UTC').getTime(), value:8}
,
{timestamp:(new Date '2017-05-28 11:00 UTC').getTime(), value:5}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:9}
,
{timestamp:(new Date '2016-05-28 11:00 UTC').getTime(), value:1}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:15}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:7}
,
{timestamp:(new Date '2011-05-28 11:00 UTC').getTime(), value:13}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:4}
,
{timestamp:(new Date '2012-05-28 11:00 UTC').getTime(), value:10}
], (err) ->
throw err if err
console.log "metrics population for user 2 terminated"
| true | # require all the modules needed
express = require 'express'
bodyparser = require 'body-parser'
morgan = require 'morgan'
session = require 'express-session'
SessionStore = require('level-session-store')(session)
level = require 'level'
levelws = require 'level-ws'
db = levelws level "#{__dirname}/../db"
metrics = require('./metrics')(db)
user = require('./user')(db)
#####################################################################
# Populate the database
#####################################################################
#####################################################################
# USERS
#####################################################################
# Create and save the user 1 into the database
user.save "user1", "password", "PI:EMAIL:<EMAIL>END_PI", (err) ->
throw err if err
console.log "user 1 added"
# Create and save the user 2 into the database
user.save "user2", "password", "PI:EMAIL:<EMAIL>END_PI", (err) ->
throw err if err
console.log "user 2 added"
#####################################################################
# METRICS
#####################################################################
# Save metrics for user 1 into the database
metrics.save "user1", [
{timestamp:(new Date '2013-11-04 14:00 UTC').getTime(), value:12}
,
{timestamp:(new Date '2015-12-18 14:00 UTC').getTime(), value:13}
], (err) ->
throw err if err
console.log "metrics population for user 1 terminated"
# Save metrics for user 2 into the database
metrics.save "user2", [
{timestamp:(new Date '2011-10-14 15:00 UTC').getTime(), value:8}
,
{timestamp:(new Date '2017-05-28 11:00 UTC').getTime(), value:5}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:9}
,
{timestamp:(new Date '2016-05-28 11:00 UTC').getTime(), value:1}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:15}
,
{timestamp:(new Date '2015-05-28 11:00 UTC').getTime(), value:7}
,
{timestamp:(new Date '2011-05-28 11:00 UTC').getTime(), value:13}
,
{timestamp:(new Date '2009-05-28 11:00 UTC').getTime(), value:4}
,
{timestamp:(new Date '2012-05-28 11:00 UTC').getTime(), value:10}
], (err) ->
throw err if err
console.log "metrics population for user 2 terminated"
|
[
{
"context": "128182440000\n8;list;[1,2,3]\n9;object;\"{\"\"name\"\":\"\"Egon\"\"}\"\n10;complex;\"[{\"\"name\"\":\"\"Valentina\"\"},{\"\"name",
"end": 504,
"score": 0.9992433786392212,
"start": 500,
"tag": "NAME",
"value": "Egon"
},
{
"context": "ct;\"{\"\"name\"\":\"\"Egon\"\"}\"\n... | src/type/csv.coffee | alinex/node-formatter | 0 | ###
CSV
=======================================================
The CSV format should only be used with table like data which is in the form of
a list of lists. See the [table](http://alinex.github.io/node-table) package to
transform and work with such data.
Autodetection is not possible here.
Common file extension `csv`.
``` csv
num;type;object
1;null;
2;undefined;
3;boolean;1
4;number;5.6
5;text;Hello
6;quotes;"Give me a ""hand up"""
7;date;128182440000
8;list;[1,2,3]
9;object;"{""name"":""Egon""}"
10;complex;"[{""name"":""Valentina""},{""name"":""Nadine""},{""name"":""Sven""}]"
```
While some types are fully supported: string, number
Others are partly supported and won't be automatically detectable:
- boolean as integer
- date as unix time integer
- null, undefined and empty strings are stored the same way and wil be red as null
And lastly complex sub objects will be stored as JSON text and be automatically
parsed on read again.
__Format Options:__
- `columns` - `Array` List of fields, applied when transform returns an object,
order matters, columns are auto discovered in the first record
- `delimiter` - `String` Set the field delimiter (default: ';')
- `escape` - `String` Set the escape character (Default: '"')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `quoted` - `Boolean` quote all the non-empty fields even if not required (default: false)
- `quotedEmpty` - `Boolean` quote empty fields? (default: false)
- `quotedString` - `Boolean` quote all fields of type string even if not required (default: false)
__Parse Options:__
- `delimiter` - `String` Set the field delimiter (default: ';')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `escape` - `String` Set the escape character (Default: '"')
- `comment` - `String` Treat all the characters after this one as a comment, default to ''
(disabled).
###
# Node Modules
# -------------------------------------------------
parser = null # load on demand
stringifyer = null # load on demand
# Implementation
# -------------------------------------------------
# @param {Object} obj to be formatted
# @param {Object} [options] format options like described above
# @param {Function(Error, String)} cb callback will be called with result
exports.stringify = (obj, options, cb) ->
obj = obj.data if obj.data
stringifyer ?= require 'csv-stringify'
stringifyer obj,
columns: options?.columns
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
header: true
quoted: options?.quoted ? false
quotedEmpty: options?.quotedEmpty ? false
quotedString: options?.quotedString ? false
, cb
# @param {String} text to be parsed
# @param {Object} [options] parse options like described above
# @param {Function(Error, Object)} cb callback will be called with result
exports.parse = (text, options, cb) ->
parser ?= require 'csv-parse'
parser text,
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
comment: options?.comment ? ''
auto_parse: true
auto_parse_date: true
, (err, obj) ->
return cb err if err
# optimize objects from json
cb null, obj.map (line) ->
line.map (field) ->
return null if field is ''
return field unless typeof field is 'string' and field[0] in ['[', '{']
try
result = JSON.parse field
catch
return field
result
| 107657 | ###
CSV
=======================================================
The CSV format should only be used with table like data which is in the form of
a list of lists. See the [table](http://alinex.github.io/node-table) package to
transform and work with such data.
Autodetection is not possible here.
Common file extension `csv`.
``` csv
num;type;object
1;null;
2;undefined;
3;boolean;1
4;number;5.6
5;text;Hello
6;quotes;"Give me a ""hand up"""
7;date;128182440000
8;list;[1,2,3]
9;object;"{""name"":""<NAME>""}"
10;complex;"[{""name"":""<NAME>""},{""name"":""<NAME>""},{""name"":""<NAME>""}]"
```
While some types are fully supported: string, number
Others are partly supported and won't be automatically detectable:
- boolean as integer
- date as unix time integer
- null, undefined and empty strings are stored the same way and wil be red as null
And lastly complex sub objects will be stored as JSON text and be automatically
parsed on read again.
__Format Options:__
- `columns` - `Array` List of fields, applied when transform returns an object,
order matters, columns are auto discovered in the first record
- `delimiter` - `String` Set the field delimiter (default: ';')
- `escape` - `String` Set the escape character (Default: '"')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `quoted` - `Boolean` quote all the non-empty fields even if not required (default: false)
- `quotedEmpty` - `Boolean` quote empty fields? (default: false)
- `quotedString` - `Boolean` quote all fields of type string even if not required (default: false)
__Parse Options:__
- `delimiter` - `String` Set the field delimiter (default: ';')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `escape` - `String` Set the escape character (Default: '"')
- `comment` - `String` Treat all the characters after this one as a comment, default to ''
(disabled).
###
# Node Modules
# -------------------------------------------------
parser = null # load on demand
stringifyer = null # load on demand
# Implementation
# -------------------------------------------------
# @param {Object} obj to be formatted
# @param {Object} [options] format options like described above
# @param {Function(Error, String)} cb callback will be called with result
exports.stringify = (obj, options, cb) ->
obj = obj.data if obj.data
stringifyer ?= require 'csv-stringify'
stringifyer obj,
columns: options?.columns
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
header: true
quoted: options?.quoted ? false
quotedEmpty: options?.quotedEmpty ? false
quotedString: options?.quotedString ? false
, cb
# @param {String} text to be parsed
# @param {Object} [options] parse options like described above
# @param {Function(Error, Object)} cb callback will be called with result
exports.parse = (text, options, cb) ->
parser ?= require 'csv-parse'
parser text,
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
comment: options?.comment ? ''
auto_parse: true
auto_parse_date: true
, (err, obj) ->
return cb err if err
# optimize objects from json
cb null, obj.map (line) ->
line.map (field) ->
return null if field is ''
return field unless typeof field is 'string' and field[0] in ['[', '{']
try
result = JSON.parse field
catch
return field
result
| true | ###
CSV
=======================================================
The CSV format should only be used with table like data which is in the form of
a list of lists. See the [table](http://alinex.github.io/node-table) package to
transform and work with such data.
Autodetection is not possible here.
Common file extension `csv`.
``` csv
num;type;object
1;null;
2;undefined;
3;boolean;1
4;number;5.6
5;text;Hello
6;quotes;"Give me a ""hand up"""
7;date;128182440000
8;list;[1,2,3]
9;object;"{""name"":""PI:NAME:<NAME>END_PI""}"
10;complex;"[{""name"":""PI:NAME:<NAME>END_PI""},{""name"":""PI:NAME:<NAME>END_PI""},{""name"":""PI:NAME:<NAME>END_PI""}]"
```
While some types are fully supported: string, number
Others are partly supported and won't be automatically detectable:
- boolean as integer
- date as unix time integer
- null, undefined and empty strings are stored the same way and wil be red as null
And lastly complex sub objects will be stored as JSON text and be automatically
parsed on read again.
__Format Options:__
- `columns` - `Array` List of fields, applied when transform returns an object,
order matters, columns are auto discovered in the first record
- `delimiter` - `String` Set the field delimiter (default: ';')
- `escape` - `String` Set the escape character (Default: '"')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `quoted` - `Boolean` quote all the non-empty fields even if not required (default: false)
- `quotedEmpty` - `Boolean` quote empty fields? (default: false)
- `quotedString` - `Boolean` quote all fields of type string even if not required (default: false)
__Parse Options:__
- `delimiter` - `String` Set the field delimiter (default: ';')
- `quote` - `String` Optionnal character surrounding a field, one character only (Default: '"')
- `escape` - `String` Set the escape character (Default: '"')
- `comment` - `String` Treat all the characters after this one as a comment, default to ''
(disabled).
###
# Node Modules
# -------------------------------------------------
parser = null # load on demand
stringifyer = null # load on demand
# Implementation
# -------------------------------------------------
# @param {Object} obj to be formatted
# @param {Object} [options] format options like described above
# @param {Function(Error, String)} cb callback will be called with result
exports.stringify = (obj, options, cb) ->
obj = obj.data if obj.data
stringifyer ?= require 'csv-stringify'
stringifyer obj,
columns: options?.columns
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
header: true
quoted: options?.quoted ? false
quotedEmpty: options?.quotedEmpty ? false
quotedString: options?.quotedString ? false
, cb
# @param {String} text to be parsed
# @param {Object} [options] parse options like described above
# @param {Function(Error, Object)} cb callback will be called with result
exports.parse = (text, options, cb) ->
parser ?= require 'csv-parse'
parser text,
delimiter: options?.delimiter ? ';'
quote: options?.quote ? '"'
escape: options?.escape ? '"'
comment: options?.comment ? ''
auto_parse: true
auto_parse_date: true
, (err, obj) ->
return cb err if err
# optimize objects from json
cb null, obj.map (line) ->
line.map (field) ->
return null if field is ''
return field unless typeof field is 'string' and field[0] in ['[', '{']
try
result = JSON.parse field
catch
return field
result
|
[
{
"context": "không đúng ')\n paramsPass=\n password : $scope.changePass.old\n new_password : $scope.changePass.new\n ",
"end": 5642,
"score": 0.9971591234207153,
"start": 5621,
"tag": "PASSWORD",
"value": "$scope.changePass.old"
},
{
"context": "ord : $scope.ch... | app/core/controllers/profile.coffee | xitrumuit1991/ls-yuptv | 0 | "use strict"
route = ($stateProvider, GlobalConfig)->
$stateProvider
.state "base.profile",
url : "profile"
views :
"main@" :
templateUrl : "/templates/profile/view.html"
controller : "ProfileCtrl"
route.$inject = ['$stateProvider', 'GlobalConfig']
ctrl = ($rootScope,
$scope, $timeout, $location,
$window, $state, $stateParams, ApiService, $http,
GlobalConfig, $interval, UtilityService, Upload) ->
if !$rootScope.user or !localStorage.user or !localStorage.token
return $state.go 'base',{},{reload : true}
$scope.birthDate =
dt : if $rootScope.user then new Date($rootScope.user.birthday) else (new Date())
opened : false
min : new Date()
$scope.openDate = () ->
$timeout () ->
$scope.birthDate.opened = true;
$scope.changePass =
old : ''
new : ''
renew : ''
$scope.tabActive = 'user-information'
$scope.savedVideo =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.following =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.follower =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.getSavedVideo = ()->
return UtilityService.notifyError('Không thể lấy danh sách video') if !$rootScope.user or !$rootScope.user.Room
params =
page : $scope.savedVideo.page
limit : $scope.savedVideo.limit
roomId: $rootScope.user.Room.id
ApiService.getSavedVideo(params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách video') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.savedVideo.items = result.videos
$scope.savedVideo.total_page = result.attr.total_page
$scope.savedVideo.total_item = result.attr.total_item
)
$scope.tabVideoPageChange = ()->
$scope.getSavedVideo()
$scope.cancelFollowing = (item, index)->
return unless item
ApiService.unFollowIdol {roomId:item.id},(error, result)->
return if error
return UtilityService.notifyError(result.message) if result and result.error
UtilityService.notifySuccess(result.message)
# $scope.following.items[index].isFollow = false
$scope.following.items.splice(index,1)
$scope.getFollowing = ()->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi ') if !$rootScope.user
params =
page : $scope.following.page
limit : $scope.following.limit
userId: $rootScope.user.id
ApiService.getUserFollowing params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi') if error
return UtilityService.notifyError(result.message) if result and result.error
$scope.following.items = result.rooms
$scope.following.total_page = result.attr.total_page
$scope.following.total_item = result.attr.total_item
console.log 'getFollowing result', $scope.following
$scope.tabFollowingPageChange = ()->
console.log '$scope.following.page',$scope.following.page
$scope.getFollowing()
$scope.getFollower = ()->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi ') if !$rootScope.user
params =
page : $scope.follower.page
limit : $scope.follower.limit
userId: $rootScope.user.id
ApiService.getUserFollower params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.follower.items = result.rooms
$scope.follower.total_page = result.attr.total_page
$scope.follower.total_item = result.attr.total_item
$scope.tabFollowerPageChange = ()->
$scope.getFollower()
$scope.changeTab = (tab)->
$scope.tabActive = tab
$scope.uploadNewAvatar = (file, errFiles)->
console.log 'fileSelect=',file
console.log 'errFiles=',errFiles
console.log 'errFiles[0]=',errFiles[0]
if errFiles and errFiles[0]
return UtilityService.notifyError( "ERROR: #{errFiles[0].$error} #{errFiles[0].$errorParam}" )
if file
file.upload = Upload.upload({
url : GlobalConfig.API_URL + 'user/avatar'
data : {avatar : file}
method : 'PUT'
})
file.upload.then ((response) ->
console.log 'response',response
if response and response.status == 200
UtilityService.notifySuccess('Thay đổi ảnh đại diện thành công')
ApiService.getProfile {},(error, result)->
return if error
UtilityService.setUserProfile(result) if result
return
return
), ((response) ->
console.log 'response',response
UtilityService.notifyError("#{response.status} : #{response.statusText}" )
return
), (evt) ->
console.log 'evt.loaded',evt.loaded
console.log 'evt.total',evt.total
# file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total))
return
return
$scope.updateProfile = ()->
if $scope.changePass.old
console.log 'change pass'
if !$scope.changePass.new or !$scope.changePass.renew
return UtilityService.notifyError('Vui lòng nhập mật khẩu mới ')
return UtilityService.notifyError('Mật khẩu ít nhất 6 kí tự') if $scope.changePass.new.length < 6
if $scope.changePass.new != $scope.changePass.renew
return UtilityService.notifyError('Mật khẩu xác nhận không đúng ')
paramsPass=
password : $scope.changePass.old
new_password : $scope.changePass.new
flush_token:false
ApiService.changePassword(paramsPass,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
$rootScope.user.birthday = $scope.birthDate.dt
console.log 'param update profile=', $rootScope.user
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
)
return
return UtilityService.notifyError( 'Vui lòng nhập mật khẩu hiện tại') if $scope.changePass.new or $scope.changePass.renew
$rootScope.user.birthday = $scope.birthDate.dt
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
#call api here
$scope.getSavedVideo()
$scope.getFollowing()
$scope.getFollower()
console.log '$rootScope.loginBy',$rootScope.loginBy
ctrl.$inject = [
'$rootScope', '$scope', '$timeout', '$location',
'$window', '$state', '$stateParams', 'ApiService', '$http',
'GlobalConfig', '$interval', 'UtilityService' , 'Upload'
]
angular
.module("app")
.config route
.controller "ProfileCtrl", ctrl
| 175035 | "use strict"
route = ($stateProvider, GlobalConfig)->
$stateProvider
.state "base.profile",
url : "profile"
views :
"main@" :
templateUrl : "/templates/profile/view.html"
controller : "ProfileCtrl"
route.$inject = ['$stateProvider', 'GlobalConfig']
ctrl = ($rootScope,
$scope, $timeout, $location,
$window, $state, $stateParams, ApiService, $http,
GlobalConfig, $interval, UtilityService, Upload) ->
if !$rootScope.user or !localStorage.user or !localStorage.token
return $state.go 'base',{},{reload : true}
$scope.birthDate =
dt : if $rootScope.user then new Date($rootScope.user.birthday) else (new Date())
opened : false
min : new Date()
$scope.openDate = () ->
$timeout () ->
$scope.birthDate.opened = true;
$scope.changePass =
old : ''
new : ''
renew : ''
$scope.tabActive = 'user-information'
$scope.savedVideo =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.following =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.follower =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.getSavedVideo = ()->
return UtilityService.notifyError('Không thể lấy danh sách video') if !$rootScope.user or !$rootScope.user.Room
params =
page : $scope.savedVideo.page
limit : $scope.savedVideo.limit
roomId: $rootScope.user.Room.id
ApiService.getSavedVideo(params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách video') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.savedVideo.items = result.videos
$scope.savedVideo.total_page = result.attr.total_page
$scope.savedVideo.total_item = result.attr.total_item
)
$scope.tabVideoPageChange = ()->
$scope.getSavedVideo()
$scope.cancelFollowing = (item, index)->
return unless item
ApiService.unFollowIdol {roomId:item.id},(error, result)->
return if error
return UtilityService.notifyError(result.message) if result and result.error
UtilityService.notifySuccess(result.message)
# $scope.following.items[index].isFollow = false
$scope.following.items.splice(index,1)
$scope.getFollowing = ()->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi ') if !$rootScope.user
params =
page : $scope.following.page
limit : $scope.following.limit
userId: $rootScope.user.id
ApiService.getUserFollowing params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi') if error
return UtilityService.notifyError(result.message) if result and result.error
$scope.following.items = result.rooms
$scope.following.total_page = result.attr.total_page
$scope.following.total_item = result.attr.total_item
console.log 'getFollowing result', $scope.following
$scope.tabFollowingPageChange = ()->
console.log '$scope.following.page',$scope.following.page
$scope.getFollowing()
$scope.getFollower = ()->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi ') if !$rootScope.user
params =
page : $scope.follower.page
limit : $scope.follower.limit
userId: $rootScope.user.id
ApiService.getUserFollower params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.follower.items = result.rooms
$scope.follower.total_page = result.attr.total_page
$scope.follower.total_item = result.attr.total_item
$scope.tabFollowerPageChange = ()->
$scope.getFollower()
$scope.changeTab = (tab)->
$scope.tabActive = tab
$scope.uploadNewAvatar = (file, errFiles)->
console.log 'fileSelect=',file
console.log 'errFiles=',errFiles
console.log 'errFiles[0]=',errFiles[0]
if errFiles and errFiles[0]
return UtilityService.notifyError( "ERROR: #{errFiles[0].$error} #{errFiles[0].$errorParam}" )
if file
file.upload = Upload.upload({
url : GlobalConfig.API_URL + 'user/avatar'
data : {avatar : file}
method : 'PUT'
})
file.upload.then ((response) ->
console.log 'response',response
if response and response.status == 200
UtilityService.notifySuccess('Thay đổi ảnh đại diện thành công')
ApiService.getProfile {},(error, result)->
return if error
UtilityService.setUserProfile(result) if result
return
return
), ((response) ->
console.log 'response',response
UtilityService.notifyError("#{response.status} : #{response.statusText}" )
return
), (evt) ->
console.log 'evt.loaded',evt.loaded
console.log 'evt.total',evt.total
# file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total))
return
return
$scope.updateProfile = ()->
if $scope.changePass.old
console.log 'change pass'
if !$scope.changePass.new or !$scope.changePass.renew
return UtilityService.notifyError('Vui lòng nhập mật khẩu mới ')
return UtilityService.notifyError('Mật khẩu ít nhất 6 kí tự') if $scope.changePass.new.length < 6
if $scope.changePass.new != $scope.changePass.renew
return UtilityService.notifyError('Mật khẩu xác nhận không đúng ')
paramsPass=
password : <PASSWORD>
new_password : <PASSWORD>
flush_token:false
ApiService.changePassword(paramsPass,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
$rootScope.user.birthday = $scope.birthDate.dt
console.log 'param update profile=', $rootScope.user
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
)
return
return UtilityService.notifyError( 'Vui lòng nhập mật khẩu hiện tại') if $scope.changePass.new or $scope.changePass.renew
$rootScope.user.birthday = $scope.birthDate.dt
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
#call api here
$scope.getSavedVideo()
$scope.getFollowing()
$scope.getFollower()
console.log '$rootScope.loginBy',$rootScope.loginBy
ctrl.$inject = [
'$rootScope', '$scope', '$timeout', '$location',
'$window', '$state', '$stateParams', 'ApiService', '$http',
'GlobalConfig', '$interval', 'UtilityService' , 'Upload'
]
angular
.module("app")
.config route
.controller "ProfileCtrl", ctrl
| true | "use strict"
route = ($stateProvider, GlobalConfig)->
$stateProvider
.state "base.profile",
url : "profile"
views :
"main@" :
templateUrl : "/templates/profile/view.html"
controller : "ProfileCtrl"
route.$inject = ['$stateProvider', 'GlobalConfig']
ctrl = ($rootScope,
$scope, $timeout, $location,
$window, $state, $stateParams, ApiService, $http,
GlobalConfig, $interval, UtilityService, Upload) ->
if !$rootScope.user or !localStorage.user or !localStorage.token
return $state.go 'base',{},{reload : true}
$scope.birthDate =
dt : if $rootScope.user then new Date($rootScope.user.birthday) else (new Date())
opened : false
min : new Date()
$scope.openDate = () ->
$timeout () ->
$scope.birthDate.opened = true;
$scope.changePass =
old : ''
new : ''
renew : ''
$scope.tabActive = 'user-information'
$scope.savedVideo =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.following =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.follower =
page : 0
limit : 12
items : []
total_page : 0
total_item:0
$scope.getSavedVideo = ()->
return UtilityService.notifyError('Không thể lấy danh sách video') if !$rootScope.user or !$rootScope.user.Room
params =
page : $scope.savedVideo.page
limit : $scope.savedVideo.limit
roomId: $rootScope.user.Room.id
ApiService.getSavedVideo(params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách video') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.savedVideo.items = result.videos
$scope.savedVideo.total_page = result.attr.total_page
$scope.savedVideo.total_item = result.attr.total_item
)
$scope.tabVideoPageChange = ()->
$scope.getSavedVideo()
$scope.cancelFollowing = (item, index)->
return unless item
ApiService.unFollowIdol {roomId:item.id},(error, result)->
return if error
return UtilityService.notifyError(result.message) if result and result.error
UtilityService.notifySuccess(result.message)
# $scope.following.items[index].isFollow = false
$scope.following.items.splice(index,1)
$scope.getFollowing = ()->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi ') if !$rootScope.user
params =
page : $scope.following.page
limit : $scope.following.limit
userId: $rootScope.user.id
ApiService.getUserFollowing params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách đang theo dõi') if error
return UtilityService.notifyError(result.message) if result and result.error
$scope.following.items = result.rooms
$scope.following.total_page = result.attr.total_page
$scope.following.total_item = result.attr.total_item
console.log 'getFollowing result', $scope.following
$scope.tabFollowingPageChange = ()->
console.log '$scope.following.page',$scope.following.page
$scope.getFollowing()
$scope.getFollower = ()->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi ') if !$rootScope.user
params =
page : $scope.follower.page
limit : $scope.follower.limit
userId: $rootScope.user.id
ApiService.getUserFollower params,(error, result)->
return UtilityService.notifyError('Không thể lấy danh sách người theo dõi') if error
if result and result.error
return UtilityService.notifyError(result.message)
$scope.follower.items = result.rooms
$scope.follower.total_page = result.attr.total_page
$scope.follower.total_item = result.attr.total_item
$scope.tabFollowerPageChange = ()->
$scope.getFollower()
$scope.changeTab = (tab)->
$scope.tabActive = tab
$scope.uploadNewAvatar = (file, errFiles)->
console.log 'fileSelect=',file
console.log 'errFiles=',errFiles
console.log 'errFiles[0]=',errFiles[0]
if errFiles and errFiles[0]
return UtilityService.notifyError( "ERROR: #{errFiles[0].$error} #{errFiles[0].$errorParam}" )
if file
file.upload = Upload.upload({
url : GlobalConfig.API_URL + 'user/avatar'
data : {avatar : file}
method : 'PUT'
})
file.upload.then ((response) ->
console.log 'response',response
if response and response.status == 200
UtilityService.notifySuccess('Thay đổi ảnh đại diện thành công')
ApiService.getProfile {},(error, result)->
return if error
UtilityService.setUserProfile(result) if result
return
return
), ((response) ->
console.log 'response',response
UtilityService.notifyError("#{response.status} : #{response.statusText}" )
return
), (evt) ->
console.log 'evt.loaded',evt.loaded
console.log 'evt.total',evt.total
# file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total))
return
return
$scope.updateProfile = ()->
if $scope.changePass.old
console.log 'change pass'
if !$scope.changePass.new or !$scope.changePass.renew
return UtilityService.notifyError('Vui lòng nhập mật khẩu mới ')
return UtilityService.notifyError('Mật khẩu ít nhất 6 kí tự') if $scope.changePass.new.length < 6
if $scope.changePass.new != $scope.changePass.renew
return UtilityService.notifyError('Mật khẩu xác nhận không đúng ')
paramsPass=
password : PI:PASSWORD:<PASSWORD>END_PI
new_password : PI:PASSWORD:<PASSWORD>END_PI
flush_token:false
ApiService.changePassword(paramsPass,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
$rootScope.user.birthday = $scope.birthDate.dt
console.log 'param update profile=', $rootScope.user
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
)
return
return UtilityService.notifyError( 'Vui lòng nhập mật khẩu hiện tại') if $scope.changePass.new or $scope.changePass.renew
$rootScope.user.birthday = $scope.birthDate.dt
ApiService.updateUserProfile($rootScope.user,(error, result)->
if error
return UtilityService.notifyError(JSON.stringify(error))
if result and result.error
return UtilityService.notifyError( result.message )
UtilityService.notifySuccess( 'Cập nhập tài khoản thành công')
UtilityService.setUserProfile(result)
)
#call api here
$scope.getSavedVideo()
$scope.getFollowing()
$scope.getFollower()
console.log '$rootScope.loginBy',$rootScope.loginBy
ctrl.$inject = [
'$rootScope', '$scope', '$timeout', '$location',
'$window', '$state', '$stateParams', 'ApiService', '$http',
'GlobalConfig', '$interval', 'UtilityService' , 'Upload'
]
angular
.module("app")
.config route
.controller "ProfileCtrl", ctrl
|
[
{
"context": "d on jQuery Cookie plugin\n # Copyright (c) 2010 Klaus Hartl (stilbuero.de)\n # Dual licensed under the MIT ",
"end": 329,
"score": 0.9998756051063538,
"start": 318,
"tag": "NAME",
"value": "Klaus Hartl"
}
] | bootstrap-tour.coffee | cloudify/bootstrap-tour | 1 | # bootstrap-tour.js v0.0.1
# Copyright 2012 Gild, Inc.
#
# Free to use under the MIT license.
# http://www.opensource.org/licenses/mit-license.php
# References jQuery
$ = jQuery
# Adds plugin object to jQuery
$.fn.extend {}=
featureTour: (options) ->
# based on jQuery Cookie plugin
# Copyright (c) 2010 Klaus Hartl (stilbuero.de)
# Dual licensed under the MIT and GPL licenses:
# http://www.opensource.org/licenses/mit-license.php
# http://www.gnu.org/licenses/gpl.html
cookie = (key, value, options) ->
if arguments.length > 1 and String(value) isnt "[object Object]"
options = jQuery.extend({}, options)
options.expires = -1 unless value?
if typeof options.expires is "number"
days = options.expires
t = options.expires = new Date()
t.setDate t.getDate() + days
value = String(value)
return (document.cookie = [ encodeURIComponent(key), "=", (if options.raw then value else encodeURIComponent(value)), (if options.expires then "; expires=" + options.expires.toUTCString() else ""), (if options.path then "; path=" + options.path else ""), (if options.domain then "; domain=" + options.domain else ""), (if options.secure then "; secure" else "") ].join(""))
options = value or {}
result = undefined
decode = (if options.raw then (s) ->
s
else decodeURIComponent)
return (if (result = new RegExp("(?:^|; )" + encodeURIComponent(key) + "=([^;]*)").exec(document.cookie)) then decode(result[1]) else null)
# Default settings
settings =
tipContent: '#featureTourTipContent' # What is the ID of the <ol> you put the content in
cookieMonster: false # true or false to control whether cookies are used
cookieName: 'bootstrapFeatureTour' # Name the cookie you'll use
cookieDomain: false # Will this cookie be attached to a domain, ie. '.mydomain.com'
debug: false
# Merge default settings with options.
settings = $.extend settings, options
# Simple logger.
log = (msg) ->
console?.log msg if settings.debug
return @each () ->
return if settings.cookieMonster && cookie(settings.cookieName)?
$tipContent = $(settings.tipContent).first()
return unless $tipContent?
$tips = $tipContent.find('li')
$tips.each (idx) ->
$li = $(@)
tip_data = $li.data()
return unless (target = tip_data['target'])?
return unless ($target = $(target).first())?
$target.popover
trigger: 'manual'
title: if tip_data['title']? then "#{tip_data['title']} <a class=\"tour-tip-close close\" data-touridx=\"#{idx + 1}\">×</a>" else null
content: "<p>#{$li.html()}</p><p style=\"text-align: right\"><a href=\"#\" class=\"tour-tip-next btn btn-success\" data-touridx=\"#{idx + 1}\">#{if (idx + 1) < $tips.length then 'Next <i class="icon-chevron-right icon-white"></i>' else '<i class="icon-ok icon-white"></i> Done'}</a></p>"
placement: tip_data['placement'] || 'right'
# save the target element in the tip node
$li.data('target', $target)
# show the first tip
$target.popover('show') if idx == 0
# handle the close button
$('a.tour-tip-close').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
# handle the next and done buttons
$('a.tour-tip-next').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
next_tip = $(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx') + 1})")?.data('target')
if next_tip?
next_tip.popover('show')
else
# last tip
cookie(settings.cookieName, 'ridden', { expires: 365, domain: settings.cookieDomain }) if settings.cookieMonster
| 44072 | # bootstrap-tour.js v0.0.1
# Copyright 2012 Gild, Inc.
#
# Free to use under the MIT license.
# http://www.opensource.org/licenses/mit-license.php
# References jQuery
$ = jQuery
# Adds plugin object to jQuery
$.fn.extend {}=
featureTour: (options) ->
# based on jQuery Cookie plugin
# Copyright (c) 2010 <NAME> (stilbuero.de)
# Dual licensed under the MIT and GPL licenses:
# http://www.opensource.org/licenses/mit-license.php
# http://www.gnu.org/licenses/gpl.html
cookie = (key, value, options) ->
if arguments.length > 1 and String(value) isnt "[object Object]"
options = jQuery.extend({}, options)
options.expires = -1 unless value?
if typeof options.expires is "number"
days = options.expires
t = options.expires = new Date()
t.setDate t.getDate() + days
value = String(value)
return (document.cookie = [ encodeURIComponent(key), "=", (if options.raw then value else encodeURIComponent(value)), (if options.expires then "; expires=" + options.expires.toUTCString() else ""), (if options.path then "; path=" + options.path else ""), (if options.domain then "; domain=" + options.domain else ""), (if options.secure then "; secure" else "") ].join(""))
options = value or {}
result = undefined
decode = (if options.raw then (s) ->
s
else decodeURIComponent)
return (if (result = new RegExp("(?:^|; )" + encodeURIComponent(key) + "=([^;]*)").exec(document.cookie)) then decode(result[1]) else null)
# Default settings
settings =
tipContent: '#featureTourTipContent' # What is the ID of the <ol> you put the content in
cookieMonster: false # true or false to control whether cookies are used
cookieName: 'bootstrapFeatureTour' # Name the cookie you'll use
cookieDomain: false # Will this cookie be attached to a domain, ie. '.mydomain.com'
debug: false
# Merge default settings with options.
settings = $.extend settings, options
# Simple logger.
log = (msg) ->
console?.log msg if settings.debug
return @each () ->
return if settings.cookieMonster && cookie(settings.cookieName)?
$tipContent = $(settings.tipContent).first()
return unless $tipContent?
$tips = $tipContent.find('li')
$tips.each (idx) ->
$li = $(@)
tip_data = $li.data()
return unless (target = tip_data['target'])?
return unless ($target = $(target).first())?
$target.popover
trigger: 'manual'
title: if tip_data['title']? then "#{tip_data['title']} <a class=\"tour-tip-close close\" data-touridx=\"#{idx + 1}\">×</a>" else null
content: "<p>#{$li.html()}</p><p style=\"text-align: right\"><a href=\"#\" class=\"tour-tip-next btn btn-success\" data-touridx=\"#{idx + 1}\">#{if (idx + 1) < $tips.length then 'Next <i class="icon-chevron-right icon-white"></i>' else '<i class="icon-ok icon-white"></i> Done'}</a></p>"
placement: tip_data['placement'] || 'right'
# save the target element in the tip node
$li.data('target', $target)
# show the first tip
$target.popover('show') if idx == 0
# handle the close button
$('a.tour-tip-close').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
# handle the next and done buttons
$('a.tour-tip-next').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
next_tip = $(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx') + 1})")?.data('target')
if next_tip?
next_tip.popover('show')
else
# last tip
cookie(settings.cookieName, 'ridden', { expires: 365, domain: settings.cookieDomain }) if settings.cookieMonster
| true | # bootstrap-tour.js v0.0.1
# Copyright 2012 Gild, Inc.
#
# Free to use under the MIT license.
# http://www.opensource.org/licenses/mit-license.php
# References jQuery
$ = jQuery
# Adds plugin object to jQuery
$.fn.extend {}=
featureTour: (options) ->
# based on jQuery Cookie plugin
# Copyright (c) 2010 PI:NAME:<NAME>END_PI (stilbuero.de)
# Dual licensed under the MIT and GPL licenses:
# http://www.opensource.org/licenses/mit-license.php
# http://www.gnu.org/licenses/gpl.html
cookie = (key, value, options) ->
if arguments.length > 1 and String(value) isnt "[object Object]"
options = jQuery.extend({}, options)
options.expires = -1 unless value?
if typeof options.expires is "number"
days = options.expires
t = options.expires = new Date()
t.setDate t.getDate() + days
value = String(value)
return (document.cookie = [ encodeURIComponent(key), "=", (if options.raw then value else encodeURIComponent(value)), (if options.expires then "; expires=" + options.expires.toUTCString() else ""), (if options.path then "; path=" + options.path else ""), (if options.domain then "; domain=" + options.domain else ""), (if options.secure then "; secure" else "") ].join(""))
options = value or {}
result = undefined
decode = (if options.raw then (s) ->
s
else decodeURIComponent)
return (if (result = new RegExp("(?:^|; )" + encodeURIComponent(key) + "=([^;]*)").exec(document.cookie)) then decode(result[1]) else null)
# Default settings
settings =
tipContent: '#featureTourTipContent' # What is the ID of the <ol> you put the content in
cookieMonster: false # true or false to control whether cookies are used
cookieName: 'bootstrapFeatureTour' # Name the cookie you'll use
cookieDomain: false # Will this cookie be attached to a domain, ie. '.mydomain.com'
debug: false
# Merge default settings with options.
settings = $.extend settings, options
# Simple logger.
log = (msg) ->
console?.log msg if settings.debug
return @each () ->
return if settings.cookieMonster && cookie(settings.cookieName)?
$tipContent = $(settings.tipContent).first()
return unless $tipContent?
$tips = $tipContent.find('li')
$tips.each (idx) ->
$li = $(@)
tip_data = $li.data()
return unless (target = tip_data['target'])?
return unless ($target = $(target).first())?
$target.popover
trigger: 'manual'
title: if tip_data['title']? then "#{tip_data['title']} <a class=\"tour-tip-close close\" data-touridx=\"#{idx + 1}\">×</a>" else null
content: "<p>#{$li.html()}</p><p style=\"text-align: right\"><a href=\"#\" class=\"tour-tip-next btn btn-success\" data-touridx=\"#{idx + 1}\">#{if (idx + 1) < $tips.length then 'Next <i class="icon-chevron-right icon-white"></i>' else '<i class="icon-ok icon-white"></i> Done'}</a></p>"
placement: tip_data['placement'] || 'right'
# save the target element in the tip node
$li.data('target', $target)
# show the first tip
$target.popover('show') if idx == 0
# handle the close button
$('a.tour-tip-close').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
# handle the next and done buttons
$('a.tour-tip-next').live 'click', ->
$(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx')})").data('target').popover('hide')
next_tip = $(settings.tipContent).first().find("li:nth-child(#{$(@).data('touridx') + 1})")?.data('target')
if next_tip?
next_tip.popover('show')
else
# last tip
cookie(settings.cookieName, 'ridden', { expires: 365, domain: settings.cookieDomain }) if settings.cookieMonster
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.999546468257904,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/pummel/test-postmortem-jsstack.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
os = require("os")
util = require("util")
unless os.type() is "SunOS"
console.error "Skipping because postmortem debugging not available."
process.exit 0
#
# * Some functions to create a recognizable stack.
#
frames = [
"stalloogle"
"bagnoogle"
"doogle"
]
expected = undefined
stalloogle = (str) ->
expected = str
os.loadavg()
return
bagnoogle = (arg0, arg1) ->
stalloogle arg0 + " is " + arg1 + " except that it is read-only"
return
done = false
doogle = ->
setTimeout doogle, 10 unless done
bagnoogle "The bfs command", "(almost) like ed(1)"
return
spawn = require("child_process").spawn
prefix = "/var/tmp/node"
corefile = prefix + "." + process.pid
args = [corefile]
if process.env.MDB_LIBRARY_PATH and process.env.MDB_LIBRARY_PATH isnt ""
args = args.concat([
"-L"
process.env.MDB_LIBRARY_PATH
])
#
# * We're going to use DTrace to stop us, gcore us, and set us running again
# * when we call getloadavg() -- with the implicit assumption that our
# * deepest function is the only caller of os.loadavg().
#
dtrace = spawn("dtrace", [
"-qwn"
"syscall::getloadavg:entry/pid == " + process.pid + "/{stop(); system(\"gcore -o " + prefix + " %d\", pid); system(\"prun %d\", pid); exit(0); }"
])
output = ""
unlinkSync = require("fs").unlinkSync
dtrace.stderr.on "data", (data) ->
console.log "dtrace: " + data
return
dtrace.on "exit", (code) ->
unless code is 0
console.error "dtrace exited with code " + code
process.exit code
done = true
#
# * We have our core file. Now we need to fire up mdb to analyze it...
#
mdb = spawn("mdb", args,
stdio: "pipe"
)
mod = util.format("::load %s\n", path.join(__dirname, "..", "..", "out", "Release", "mdb_v8.so"))
mdb.on "exit", (code) ->
retained = "; core retained as " + corefile
unless code is 0
console.error "mdb exited with code " + code + retained
process.exit code
sentinel = "<anonymous> (as "
arg1 = " arg1: "
lines = output.split("\n")
matched = 0
straddr = `undefined`
i = 0
while i < lines.length
line = lines[i]
straddr = line.substr(arg1.length).split(" ")[0] if matched is 1 and line.indexOf(arg1) is 0
continue if line.indexOf(sentinel) is -1 or frames.length is 0
frame = line.substr(line.indexOf(sentinel) + sentinel.length)
top = frames.shift()
assert.equal frame.indexOf(top), 0, "unexpected frame where " + top + " was expected" + retained
matched++
i++
assert.equal frames.length, 0, "did not find expected frame " + frames[0] + retained
assert.notEqual straddr, `undefined`, "did not find arg1 for top frame" + retained
#
# * Now we're going to take one more swing at the core file to print out
# * the argument string that we found.
#
output = ""
mdb = spawn("mdb", args,
stdio: "pipe"
)
mdb.on "exit", (code) ->
unless code is 0
console.error "mdb (second) exited with code " + code + retained
process.exit code
assert.notEqual output.indexOf(expected), -1, "did not find arg1 (" + straddr + ") to contain expected string" + retained
unlinkSync corefile
process.exit 0
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb (second) stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write straddr + "::v8str\n"
mdb.stdin.end()
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write "::jsstack -v\n"
mdb.stdin.end()
return
setTimeout doogle, 10
| 109679 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
os = require("os")
util = require("util")
unless os.type() is "SunOS"
console.error "Skipping because postmortem debugging not available."
process.exit 0
#
# * Some functions to create a recognizable stack.
#
frames = [
"stalloogle"
"bagnoogle"
"doogle"
]
expected = undefined
stalloogle = (str) ->
expected = str
os.loadavg()
return
bagnoogle = (arg0, arg1) ->
stalloogle arg0 + " is " + arg1 + " except that it is read-only"
return
done = false
doogle = ->
setTimeout doogle, 10 unless done
bagnoogle "The bfs command", "(almost) like ed(1)"
return
spawn = require("child_process").spawn
prefix = "/var/tmp/node"
corefile = prefix + "." + process.pid
args = [corefile]
if process.env.MDB_LIBRARY_PATH and process.env.MDB_LIBRARY_PATH isnt ""
args = args.concat([
"-L"
process.env.MDB_LIBRARY_PATH
])
#
# * We're going to use DTrace to stop us, gcore us, and set us running again
# * when we call getloadavg() -- with the implicit assumption that our
# * deepest function is the only caller of os.loadavg().
#
dtrace = spawn("dtrace", [
"-qwn"
"syscall::getloadavg:entry/pid == " + process.pid + "/{stop(); system(\"gcore -o " + prefix + " %d\", pid); system(\"prun %d\", pid); exit(0); }"
])
output = ""
unlinkSync = require("fs").unlinkSync
dtrace.stderr.on "data", (data) ->
console.log "dtrace: " + data
return
dtrace.on "exit", (code) ->
unless code is 0
console.error "dtrace exited with code " + code
process.exit code
done = true
#
# * We have our core file. Now we need to fire up mdb to analyze it...
#
mdb = spawn("mdb", args,
stdio: "pipe"
)
mod = util.format("::load %s\n", path.join(__dirname, "..", "..", "out", "Release", "mdb_v8.so"))
mdb.on "exit", (code) ->
retained = "; core retained as " + corefile
unless code is 0
console.error "mdb exited with code " + code + retained
process.exit code
sentinel = "<anonymous> (as "
arg1 = " arg1: "
lines = output.split("\n")
matched = 0
straddr = `undefined`
i = 0
while i < lines.length
line = lines[i]
straddr = line.substr(arg1.length).split(" ")[0] if matched is 1 and line.indexOf(arg1) is 0
continue if line.indexOf(sentinel) is -1 or frames.length is 0
frame = line.substr(line.indexOf(sentinel) + sentinel.length)
top = frames.shift()
assert.equal frame.indexOf(top), 0, "unexpected frame where " + top + " was expected" + retained
matched++
i++
assert.equal frames.length, 0, "did not find expected frame " + frames[0] + retained
assert.notEqual straddr, `undefined`, "did not find arg1 for top frame" + retained
#
# * Now we're going to take one more swing at the core file to print out
# * the argument string that we found.
#
output = ""
mdb = spawn("mdb", args,
stdio: "pipe"
)
mdb.on "exit", (code) ->
unless code is 0
console.error "mdb (second) exited with code " + code + retained
process.exit code
assert.notEqual output.indexOf(expected), -1, "did not find arg1 (" + straddr + ") to contain expected string" + retained
unlinkSync corefile
process.exit 0
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb (second) stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write straddr + "::v8str\n"
mdb.stdin.end()
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write "::jsstack -v\n"
mdb.stdin.end()
return
setTimeout doogle, 10
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
os = require("os")
util = require("util")
unless os.type() is "SunOS"
console.error "Skipping because postmortem debugging not available."
process.exit 0
#
# * Some functions to create a recognizable stack.
#
frames = [
"stalloogle"
"bagnoogle"
"doogle"
]
expected = undefined
stalloogle = (str) ->
expected = str
os.loadavg()
return
bagnoogle = (arg0, arg1) ->
stalloogle arg0 + " is " + arg1 + " except that it is read-only"
return
done = false
doogle = ->
setTimeout doogle, 10 unless done
bagnoogle "The bfs command", "(almost) like ed(1)"
return
spawn = require("child_process").spawn
prefix = "/var/tmp/node"
corefile = prefix + "." + process.pid
args = [corefile]
if process.env.MDB_LIBRARY_PATH and process.env.MDB_LIBRARY_PATH isnt ""
args = args.concat([
"-L"
process.env.MDB_LIBRARY_PATH
])
#
# * We're going to use DTrace to stop us, gcore us, and set us running again
# * when we call getloadavg() -- with the implicit assumption that our
# * deepest function is the only caller of os.loadavg().
#
dtrace = spawn("dtrace", [
"-qwn"
"syscall::getloadavg:entry/pid == " + process.pid + "/{stop(); system(\"gcore -o " + prefix + " %d\", pid); system(\"prun %d\", pid); exit(0); }"
])
output = ""
unlinkSync = require("fs").unlinkSync
dtrace.stderr.on "data", (data) ->
console.log "dtrace: " + data
return
dtrace.on "exit", (code) ->
unless code is 0
console.error "dtrace exited with code " + code
process.exit code
done = true
#
# * We have our core file. Now we need to fire up mdb to analyze it...
#
mdb = spawn("mdb", args,
stdio: "pipe"
)
mod = util.format("::load %s\n", path.join(__dirname, "..", "..", "out", "Release", "mdb_v8.so"))
mdb.on "exit", (code) ->
retained = "; core retained as " + corefile
unless code is 0
console.error "mdb exited with code " + code + retained
process.exit code
sentinel = "<anonymous> (as "
arg1 = " arg1: "
lines = output.split("\n")
matched = 0
straddr = `undefined`
i = 0
while i < lines.length
line = lines[i]
straddr = line.substr(arg1.length).split(" ")[0] if matched is 1 and line.indexOf(arg1) is 0
continue if line.indexOf(sentinel) is -1 or frames.length is 0
frame = line.substr(line.indexOf(sentinel) + sentinel.length)
top = frames.shift()
assert.equal frame.indexOf(top), 0, "unexpected frame where " + top + " was expected" + retained
matched++
i++
assert.equal frames.length, 0, "did not find expected frame " + frames[0] + retained
assert.notEqual straddr, `undefined`, "did not find arg1 for top frame" + retained
#
# * Now we're going to take one more swing at the core file to print out
# * the argument string that we found.
#
output = ""
mdb = spawn("mdb", args,
stdio: "pipe"
)
mdb.on "exit", (code) ->
unless code is 0
console.error "mdb (second) exited with code " + code + retained
process.exit code
assert.notEqual output.indexOf(expected), -1, "did not find arg1 (" + straddr + ") to contain expected string" + retained
unlinkSync corefile
process.exit 0
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb (second) stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write straddr + "::v8str\n"
mdb.stdin.end()
return
mdb.stdout.on "data", (data) ->
output += data
return
mdb.stderr.on "data", (data) ->
console.log "mdb stderr: " + data
return
mdb.stdin.write mod
mdb.stdin.write "::jsstack -v\n"
mdb.stdin.end()
return
setTimeout doogle, 10
|
[
{
"context": "r l’état de cette patinoire:\"\n request_email: 'Courriel'\n request_phone: 'Téléphone'\n or_call: 'ou ",
"end": 3193,
"score": 0.9997659921646118,
"start": 3185,
"tag": "NAME",
"value": "Courriel"
},
{
"context": "to ignore query string.\n# @see https://github... | app/assets/javascripts/map.js.coffee | mudar/patinermontreal.ca | 3 | # @see hasClass
$.fn.hasAttr = (attr) ->
_.any this, (el) ->
typeof $(el).attr(attr) isnt 'undefined'
# @see toggleClass
$.fn.toggleAttr = (attr, state) ->
isBoolean = typeof state is 'boolean'
this.each ->
self = $ this
state = not self.hasAttr(attr) unless isBoolean
if state
self.attr attr, attr
else
self.removeAttr attr
# Simple i18n "framework".
I18n =
en:
locale: 'en'
other_locale: 'fr'
# Date
abbr_month_names: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
time_format: '%{b} %{e}, %{l}%{P}'
# Popup
accuracy: 'You are within %{radius} meters of this point'
condition: 'In %{condition} condition'
unknown_condition: 'Ice condition not available'
call_to_action: "You can contribute by asking the city to publish this rink’s conditions:"
request_email: 'Email'
request_phone: 'Phone'
or_call: 'or call'
add_favorite: 'Add to favorites'
remove_favorite: 'Remove from favorites'
explanation: 'Going skating? Let your friends know:'
# Social
tweet: "I’m going"
tweet_text_PSE: "I’m going to play hockey at %{park}"
tweet_text_PPL: "I’m going skating at %{park}"
tweet_text_PP: "I’m going skating at %{park}"
# Rink kinds
_PSE: 'Team sports'
_PPL: 'Free skating'
_PP: 'Landscaped'
# Rink descriptions
'Aire de patinage libre': 'Free skating area'
'Grande patinoire avec bandes': 'Big rink with boards'
'Patinoire avec bandes': 'Rink with boards'
'Patinoire de patin libre': 'Free skating rink'
'Patinoire décorative': 'Decorative rink'
'Patinoire entretenue par les citoyens': 'Rink maintained by citizens'
'Patinoire réfrigérée': 'Refrigerated rink'
'Patinoire réfrigérée Bleu-Blanc-Bouge': 'Refrigerated rink Bleu-Blanc-Bouge'
'Petite patinoire avec bandes': 'Small rink with boards'
# Interface statuses
open: 'Open'
closed: 'Closed'
cleared: 'Cleared'
flooded: 'Flooded'
resurfaced: 'Resurfaced'
Excellente: 'excellent'
Bonne: 'good'
Mauvaise: 'bad'
# URLs
rinks: 'rinks'
rinks_url: 'rinks/%{id}-%{slug}'
favorites_url: 'favorites'
# Translate rink kinds and statuses.
'sports-dequipe': 'team-sports'
'patin-libre': 'free-skating'
'paysagee': 'landscaped'
'ouvert': 'open'
'deblaye': 'cleared'
'arrose': 'flooded'
'resurface': 'resurfaced'
'favories': 'favorites'
# Translate from rink kind to path component.
PSE: 'team-sports'
PPL: 'free-skating'
PP: 'landscaped'
C: 'landscaped'
# smartbanner
download: 'Download'
fr:
locale: 'fr'
other_locale: 'en'
# Date
abbr_month_names: ['jan.', 'fév.', 'mar.', 'avr.', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.', 'déc.']
time_format: '%{b} %{e} à %{H}h'
# Popup
accuracy: 'Vous êtes à moins de %{radius} mètres de ce point'
condition: 'En %{condition} condition'
unknown_condition: 'État de la patinoire non disponible'
call_to_action: "Vous pouvez contribuer en demandant à la ville de publier l’état de cette patinoire:"
request_email: 'Courriel'
request_phone: 'Téléphone'
or_call: 'ou appelez le'
add_favorite: 'Ajouter aux favories'
remove_favorite: 'Supprimer des favories'
explanation: 'Vous allez patiner? Informez vos amis:'
# Social
tweet: "J’y vais"
tweet_text_PSE: 'Je vais jouer au hockey à %{park}'
tweet_text_PPL: 'Je vais patiner à %{park}'
tweet_text_PP: 'Je vais patiner à %{park}'
# Rink kinds
_PSE: "Sports d’équipe"
_PPL: 'Patin libre'
_PP: 'Paysagée'
# Interface statuses
open: 'Ouverte'
closed: 'Fermée'
cleared: 'Déblayée'
flooded: 'Arrosée'
resurfaced: 'Resurfacée'
Excellente: 'excellente'
Bonne: 'bonne'
Mauvaise: 'mauvaise'
# URLs
rinks: 'patinoires'
rinks_url: 'patinoires/%{id}-%{slug}'
favorites_url: 'favories'
# Translate from rink kind to path component.
PSE: 'sports-dequipe'
PPL: 'patin-libre'
PP: 'paysagee'
C: 'paysagee'
# smartbanner
download: 'Télécharger'
window.t = (string, args = {}) ->
current_locale = args.locale or locale
string = I18n[current_locale][string] or string
string = string.replace ///%\{#{key}\}///g, value for key, value of args
string
window.format_date = (string) ->
date = new Date Date.parse(string)
hour = date.getHours()
args =
b: t('abbr_month_names')[date.getMonth()]
e: date.getDate()
H: hour
l: if hour > 12 then hour - 12 else (if hour is 0 then 12 else hour)
P: if hour > 11 then 'pm' else 'am'
t('time_format', args)
# Monkey-patch Backbone to be trailing-slash agnostic and to ignore query string.
# @see https://github.com/documentcloud/backbone/issues/520
((_getFragment) ->
Backbone.History.prototype.getFragment = ->
_getFragment.apply(this, arguments).replace(/\/$/, '').replace(/\?.*/, '')
) Backbone.History.prototype.getFragment
other_locale = t 'other_locale'
other_domain = $('#language a').attr('href').match(/^http(s?):\/\/[^\/]+\//)[0].replace t('locale'), other_locale
# Update the language switch link after each navigation event.
((_navigate) ->
Backbone.History.prototype.navigate = ->
_navigate.apply this, arguments
$('#language a').attr 'href', _.reduce ['about', 'contact', 'donate', 'api', 'rinks', 'favorites', 'sports-dequipe', 'patin-libre', 'paysagee', 'ouvert', 'deblaye', 'arrose', 'resurface'], (string,component) ->
string.replace t(component), t(component, locale: other_locale)
, other_domain + Backbone.history.getFragment()
) Backbone.History.prototype.navigate
$ ->
window.debug = env is 'development'
$('.control').tooltip()
$.smartbanner
title: "Patiner Montréal"
authors: {'android': 'Android' , 'ios': 'iPhone'}
price: null
appStoreLanguage: t('locale')
icons: {'android': '/assets/app-icon-android.png', 'ios': '/assets/app-icon-ios.png'}
iOSUniversalApp: false
button: t('download')
appendToSelector: 'header'
# Toggle social sidebar
$(window).on 'load', (e) ->
$('#share-toggle').fadeIn();
$('#share-toggle').on 'click', (e) ->
e.preventDefault();
$('#social .navbar').slideToggle( 'fast' )
return
# Create map.
Map = new L.Map 'map',
center: new L.LatLng(45.53, -73.63)
zoom: 13
minZoom: 11
maxZoom: 18
maxBounds: L.latLngBounds(L.latLng(45.170459, -74.447699), L.latLng(46.035873, -73.147435))
tonerUrl = "https://stamen-tiles.a.ssl.fastly.net/toner-lite/{Z}/{X}/{Y}.png";
tilesUrl = tonerUrl.replace(/({[A-Z]})/g, (s) -> s.toLowerCase());
basemap = new L.tileLayer(tilesUrl, {
subdomains: ['','a.','b.','c.','d.'],
type: 'png',
attribution: 'Map tiles by <a href="https://stamen.com">Stamen Design</a>, under <a href="https://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. Data by <a href="https://openstreetmap.org">OpenStreetMap</a>, under <a href="https://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
}).addTo(Map);
# Define models.
Rink = Backbone.Model.extend
# @note +defaults+ doesn't have access to model attributes or collection.
initialize: (attributes) ->
# Handing "C" is unnecessarily hard.
@set(genre: 'PP') if 'C' is @get 'genre'
@set url: t('rinks_url', id: @get('id'), slug: @get('slug'))
# Set the favorite based on local storage.
Backbone.sync 'read', @,
success: (response) =>
@set favorite: response.favorite
error: (message) =>
# Do nothing.
defaults:
favorite: false
visible: false
# Sets the rink as visible.
show: ->
@set visible: true
# Sets the rink as hidden.
hide: ->
@set visible: false
# Toggles the rink's favorite status.
toggle: ->
@save favorite: not @get 'favorite'
# Define collections.
RinkSet = Backbone.Collection.extend
model: Rink
localStorage: new Store 'rinks'
# Sets only matching rinks as visible.
showIfMatching: (kinds, statuses) ->
@each (rink) ->
rink.set visible: (rink.get('genre') in kinds and _.all statuses, (status) -> rink.get status)
@trigger 'changeAll', kinds, statuses
# Sets only favorite rinks as visible.
showIfFavorite: ->
@each (rink) ->
rink.set visible: rink.get 'favorite'
@trigger 'changeAll'
# @return array all visible rinks
visible: ->
@filter (rink) ->
rink.get 'visible'
# @return array all favorite rinks
favorites: ->
@filter (rink) ->
rink.get 'favorite'
# @expects a RinkSet collection
MarkersView = Backbone.View.extend
initialize: ->
@collection.each (model) ->
model.view = new MarkerView model: model
# @expects a Rink model
MarkerView = Backbone.View.extend
template: _.template $('#popup-template').html()
# @see L.Marker.bindPopup
initialize: ->
offset = new L.Point 0, -10
state = if @model.get 'ouvert'
'on'
else if @model.get 'condition' # rinks with conditions receive updates
'off'
else
'na'
icon = L.Icon.extend
options:
iconUrl: "/assets/#{@model.get 'genre'}_#{state}.png"
iconRetinaUrl: "/assets/#{@model.get 'genre'}_#{state}_2x.png"
shadowUrl: "/assets/#{@model.get 'genre'}_shadow.png"
iconSize: new L.Point 28, 28
shadowSize: new L.Point 34, 26
iconAnchor: new L.Point 15, 27
shadowAnchor: [13, 22]
popupAnchor: offset
# "new L.Icon.extend({})" raises "TypeError: object is not a function"
@marker = new L.Marker new L.LatLng(@model.get('lat'), @model.get('lng')), icon: new icon
@marker._popup = new L.Popup offset: offset, autoPan: true, autoPanPaddingTopLeft: [50,100], autoPanPaddingBottomRight: [70,40], closeButton: false, @marker
@marker._popup.setContent @template @model.toJSON()
@marker._popup._initLayout()
# @see delegateEvents
$(@marker._popup._contentNode).delegate '.favorite', 'click.delegateEvents' + @cid, _.bind ->
@model.toggle()
, @
@marker.on 'click', ->
Options.save(beforePopup: @currentUrl()) unless @rinkUrl()
Backbone.history.navigate @model.get('url'), true
, @
@model.bind 'change:favorite', ->
@marker._popup.setContent @template @model.toJSON()
twttr.widgets.load() if twttr.widgets
, @
@model.bind 'change:visible', @render, @
render: ->
if @model.get 'visible'
Map.addLayer @marker
else
Map.removeLayer @marker
@
# Opens the marker's popup.
openPopup: ->
# Prevent restoration of last known state if opening another popup.
Options.save openingPopup: true
@marker.openPopup()
Options.save openingPopup: false
# Refresh Twitter button.
twttr.widgets.load() if twttr.widgets
# Pan to popup.
$('#social .navbar').slideUp()
# Don't navigate to the last known state if opening another popup.
Map.on 'popupclose', (event) ->
unless Options.get 'openingPopup'
Backbone.history.navigate Options.get('beforePopup'), true
# A view for the primary buttons.
# @expects a RinkSet collection
ControlsView = Backbone.View.extend
initialize: ->
_.each ['PP', 'PPL', 'PSE'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'kinds'
_.each ['ouvert', 'deblaye', 'arrose', 'resurface'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'statuses'
new ControlView collection: @collection, el: '#favories'
# A view for a single button.
# @expects a RinkSet collection
ControlView = Backbone.View.extend
initialize: (attributes) ->
@id = $(@el).attr 'id'
@type = attributes.type
@collection.bind 'changeAll', @render, @
events:
click: 'toggle'
render: (kinds, statuses) ->
if @type?
# Don't change state of controls if showing "my favorites".
unless @favoritesUrl()
state = @id in kinds or @id in statuses
@$('.icon').toggleClass 'active', state
else
@$('.icon').toggleClass 'active', @favoritesUrl()
@
toggle: (state) ->
# This creates an extra history entry if switching from an open popup to
# "my favorites", but it's simplest.
Map.closePopup()
if @type?
[kinds, statuses] = if @filterUrl() then @fromUrl @currentUrl() else @fromUI()
if @type is 'kinds'
filters = kinds
else
filters = statuses
if @id in filters
filters = _.without filters, @id
else
filters.push @id
if @type is 'kinds'
kinds = filters
else
statuses = filters
Backbone.history.navigate @toUrl(kinds, statuses), true
else
if @$('.icon').hasClass 'active'
Backbone.history.navigate Options.get('beforeFavorites'), true
else
unless @favoritesUrl()
Options.save beforeFavorites: @currentUrl()
Backbone.history.navigate t('favorites_url'), true
# Define routes.
# @expects a RinkSet collection
Router = Backbone.Router.extend
initialize: (attributes) ->
@collection = attributes.collection
# Maps path components to actions.
routes:
'': 'default'
'favorites': 'favorites'
'favories': 'favorites'
'f': 'filter'
'f/*filters': 'filter'
'rinks/:id': 'show'
'patinoires/:id': 'show'
# Performs the "favorites" action.
favorites: ->
@collection.showIfFavorite()
# Performs the "filter" action.
# @param string splat a URL path
filter: (splat) ->
@collection.showIfMatching @fromUrl(splat)...
# Performs the "show" action.
# @param string id a rink ID
show: (id) ->
# Remove the slug from the ID.
rink = @collection.get id.match(/^\d+/)[0]
# If rink is not visible, display all rinks first.
unless rink.get 'visible'
@collection.showIfMatching @fromUrl(@rootUrl())...
rink.view.openPopup()
default: ->
# If no route, display all rinks.
@navigate @rootUrl(), true
# Helpers to mix-in to views and routers.
window.Helpers =
# Maps path components to rink kinds.
kinds:
'team-sports': 'PSE'
'sports-dequipe': 'PSE'
'free-skating': 'PPL'
'patin-libre': 'PPL'
'landscaped': 'PP'
'paysagee': 'PP'
# Maps path components to rink statuses.
statuses:
'open': 'ouvert'
'ouvert': 'ouvert'
'cleared': 'deblaye'
'deblaye': 'deblaye'
'flooded': 'arrose'
'arrose': 'arrose'
'resurfaced': 'resurface'
'resurface': 'resurface'
numberToPhone: (number, options = {}) ->
number = number.replace(/([0-9]{3})([0-9]{3})([0-9]{4})/, '($1) $2-$3')
if options.extension
number += ' x' + options.extension
number
# @return string the current URL
currentUrl: ->
Backbone.history.getFragment()
# @return string the root URL
rootUrl: ->
@toUrl ['PP', 'PPL', 'PSE'], []
# @return boolean whether the current URL is a filter URL
filterUrl: ->
@currentUrl().indexOf('f/') >= 0
# @return boolean whether the current URL is a rink URL
rinkUrl: ->
@currentUrl().indexOf(t 'rinks') >= 0
# @return boolean whether the current URL is the favorites URL
favoritesUrl: ->
@currentUrl() is t 'favorites_url'
# Returns a filter URL based on the UI's state.
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUI: ->
kinds = _.filter ['PP', 'PPL', 'PSE'], (filter) ->
$("##{filter} .icon").hasClass 'active'
statuses = _.filter ['ouvert', 'deblaye', 'arrose', 'resurface'], (filter) ->
$("##{filter} .icon").hasClass 'active'
[kinds, statuses]
# @param string splat a URL path
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUrl: (splat) ->
kinds = []
statuses = []
if splat?
for part in splat.split('/')
if part of @kinds
kinds.push @kinds[part]
else if part of @statuses
statuses.push @statuses[part]
else if part is 'f'
# Do nothing.
else
console.log "Unknown filter: #{part}" if window.debug
[kinds, statuses]
# Performs the inverse of +fromUrl+.
# @param array kinds rink kinds
# @param array statuses rink statuses
# @return string a URL path
toUrl: (kinds, statuses) ->
'f/' + _.uniq(_.map(kinds.sort().concat(statuses.sort()), (filter) -> t filter)).join '/'
body: (arrondissement) ->
string = if arrondissement.name then "Attn: #{arrondissement.name}\r\n\r\n" else ''
string += "Serait-il possible de publier l'état de vos patinoires extérieures comme le font plusieurs arrondissements à la Ville de Montréal ? Voir: https://ville.montreal.qc.ca/portal/page?_pageid=5798,94909650&_dad=portal&_schema=PORTAL\r\n\r\nMerci."
encodeURIComponent string
# Set up options singleton.
Singleton = Backbone.Model.extend
localStorage: new Store 'options'
Options = new Singleton
id: 1
beforeFavorites: Helpers.rootUrl()
beforePopup: Helpers.rootUrl()
openingPopup: false
# Add helper functions to views and routers.
_.each [MarkersView, MarkerView, ControlsView, ControlView, Router], (klass) ->
_.extend klass.prototype, Helpers
# Seed collection.
window.Rinks = new RinkSet
Rinks.reset json
# Instantiate routes.
Routes = new Router
collection: Rinks
# Instantiate views.
markers = new MarkersView
el: '#map' # to avoid creating an element
collection: Rinks
controls = new ControlsView
el: '#controls' # to avoid creating an element
collection: Rinks
# Route the initial URL.
#window.location.replace window.location.pathname
Backbone.history.start pushState: true
# https://support.cloudmade.com/answers/general
Map.on 'locationfound', (event) ->
radius = event.accuracy / 2
if radius < 1000
locationIcon = L.Icon.extend
options:
iconUrl: "/assets/marker-icon.png"
iconRetinaUrl: "/assets/marker-icon-2x.png"
shadowUrl: "/assets/marker-shadow.png"
iconSize: [25, 41]
shadowSize: [33, 31]
iconAnchor: [12, 41]
shadowAnchor: [10, 31]
popupAnchor: [0, -46]
marker = new L.Marker event.latlng, icon: new locationIcon
Map.addLayer marker
marker.bindPopup t 'accuracy', radius: radius
Map.addLayer new L.Circle event.latlng, radius
Map.on 'locationerror', (event) ->
console.log event.message if window.debug
# If a popup is open, don't set the view to the marker.
if Helpers.rinkUrl()
Map.locate()
else
Map.locate( setView: true, zoom: 13 )
# Backbone doesn't attach the "events" option directly to the view, even
# though it makes sense given that views needn't necessarily hardcode CSS
# selectors (and maybe shouldn't).
# @see https://github.com/documentcloud/backbone/issues/656
| 131963 | # @see hasClass
$.fn.hasAttr = (attr) ->
_.any this, (el) ->
typeof $(el).attr(attr) isnt 'undefined'
# @see toggleClass
$.fn.toggleAttr = (attr, state) ->
isBoolean = typeof state is 'boolean'
this.each ->
self = $ this
state = not self.hasAttr(attr) unless isBoolean
if state
self.attr attr, attr
else
self.removeAttr attr
# Simple i18n "framework".
I18n =
en:
locale: 'en'
other_locale: 'fr'
# Date
abbr_month_names: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
time_format: '%{b} %{e}, %{l}%{P}'
# Popup
accuracy: 'You are within %{radius} meters of this point'
condition: 'In %{condition} condition'
unknown_condition: 'Ice condition not available'
call_to_action: "You can contribute by asking the city to publish this rink’s conditions:"
request_email: 'Email'
request_phone: 'Phone'
or_call: 'or call'
add_favorite: 'Add to favorites'
remove_favorite: 'Remove from favorites'
explanation: 'Going skating? Let your friends know:'
# Social
tweet: "I’m going"
tweet_text_PSE: "I’m going to play hockey at %{park}"
tweet_text_PPL: "I’m going skating at %{park}"
tweet_text_PP: "I’m going skating at %{park}"
# Rink kinds
_PSE: 'Team sports'
_PPL: 'Free skating'
_PP: 'Landscaped'
# Rink descriptions
'Aire de patinage libre': 'Free skating area'
'Grande patinoire avec bandes': 'Big rink with boards'
'Patinoire avec bandes': 'Rink with boards'
'Patinoire de patin libre': 'Free skating rink'
'Patinoire décorative': 'Decorative rink'
'Patinoire entretenue par les citoyens': 'Rink maintained by citizens'
'Patinoire réfrigérée': 'Refrigerated rink'
'Patinoire réfrigérée Bleu-Blanc-Bouge': 'Refrigerated rink Bleu-Blanc-Bouge'
'Petite patinoire avec bandes': 'Small rink with boards'
# Interface statuses
open: 'Open'
closed: 'Closed'
cleared: 'Cleared'
flooded: 'Flooded'
resurfaced: 'Resurfaced'
Excellente: 'excellent'
Bonne: 'good'
Mauvaise: 'bad'
# URLs
rinks: 'rinks'
rinks_url: 'rinks/%{id}-%{slug}'
favorites_url: 'favorites'
# Translate rink kinds and statuses.
'sports-dequipe': 'team-sports'
'patin-libre': 'free-skating'
'paysagee': 'landscaped'
'ouvert': 'open'
'deblaye': 'cleared'
'arrose': 'flooded'
'resurface': 'resurfaced'
'favories': 'favorites'
# Translate from rink kind to path component.
PSE: 'team-sports'
PPL: 'free-skating'
PP: 'landscaped'
C: 'landscaped'
# smartbanner
download: 'Download'
fr:
locale: 'fr'
other_locale: 'en'
# Date
abbr_month_names: ['jan.', 'fév.', 'mar.', 'avr.', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.', 'déc.']
time_format: '%{b} %{e} à %{H}h'
# Popup
accuracy: 'Vous êtes à moins de %{radius} mètres de ce point'
condition: 'En %{condition} condition'
unknown_condition: 'État de la patinoire non disponible'
call_to_action: "Vous pouvez contribuer en demandant à la ville de publier l’état de cette patinoire:"
request_email: '<NAME>'
request_phone: 'Téléphone'
or_call: 'ou appelez le'
add_favorite: 'Ajouter aux favories'
remove_favorite: 'Supprimer des favories'
explanation: 'Vous allez patiner? Informez vos amis:'
# Social
tweet: "J’y vais"
tweet_text_PSE: 'Je vais jouer au hockey à %{park}'
tweet_text_PPL: 'Je vais patiner à %{park}'
tweet_text_PP: 'Je vais patiner à %{park}'
# Rink kinds
_PSE: "Sports d’équipe"
_PPL: 'Patin libre'
_PP: 'Paysagée'
# Interface statuses
open: 'Ouverte'
closed: 'Fermée'
cleared: 'Déblayée'
flooded: 'Arrosée'
resurfaced: 'Resurfacée'
Excellente: 'excellente'
Bonne: 'bonne'
Mauvaise: 'mauvaise'
# URLs
rinks: 'patinoires'
rinks_url: 'patinoires/%{id}-%{slug}'
favorites_url: 'favories'
# Translate from rink kind to path component.
PSE: 'sports-dequipe'
PPL: 'patin-libre'
PP: 'paysagee'
C: 'paysagee'
# smartbanner
download: 'Télécharger'
window.t = (string, args = {}) ->
current_locale = args.locale or locale
string = I18n[current_locale][string] or string
string = string.replace ///%\{#{key}\}///g, value for key, value of args
string
window.format_date = (string) ->
date = new Date Date.parse(string)
hour = date.getHours()
args =
b: t('abbr_month_names')[date.getMonth()]
e: date.getDate()
H: hour
l: if hour > 12 then hour - 12 else (if hour is 0 then 12 else hour)
P: if hour > 11 then 'pm' else 'am'
t('time_format', args)
# Monkey-patch Backbone to be trailing-slash agnostic and to ignore query string.
# @see https://github.com/documentcloud/backbone/issues/520
((_getFragment) ->
Backbone.History.prototype.getFragment = ->
_getFragment.apply(this, arguments).replace(/\/$/, '').replace(/\?.*/, '')
) Backbone.History.prototype.getFragment
other_locale = t 'other_locale'
other_domain = $('#language a').attr('href').match(/^http(s?):\/\/[^\/]+\//)[0].replace t('locale'), other_locale
# Update the language switch link after each navigation event.
((_navigate) ->
Backbone.History.prototype.navigate = ->
_navigate.apply this, arguments
$('#language a').attr 'href', _.reduce ['about', 'contact', 'donate', 'api', 'rinks', 'favorites', 'sports-dequipe', 'patin-libre', 'paysagee', 'ouvert', 'deblaye', 'arrose', 'resurface'], (string,component) ->
string.replace t(component), t(component, locale: other_locale)
, other_domain + Backbone.history.getFragment()
) Backbone.History.prototype.navigate
$ ->
window.debug = env is 'development'
$('.control').tooltip()
$.smartbanner
title: "<NAME>"
authors: {'android': 'Android' , 'ios': 'iPhone'}
price: null
appStoreLanguage: t('locale')
icons: {'android': '/assets/app-icon-android.png', 'ios': '/assets/app-icon-ios.png'}
iOSUniversalApp: false
button: t('download')
appendToSelector: 'header'
# Toggle social sidebar
$(window).on 'load', (e) ->
$('#share-toggle').fadeIn();
$('#share-toggle').on 'click', (e) ->
e.preventDefault();
$('#social .navbar').slideToggle( 'fast' )
return
# Create map.
Map = new L.Map 'map',
center: new L.LatLng(45.53, -73.63)
zoom: 13
minZoom: 11
maxZoom: 18
maxBounds: L.latLngBounds(L.latLng(45.170459, -74.447699), L.latLng(46.035873, -73.147435))
tonerUrl = "https://stamen-tiles.a.ssl.fastly.net/toner-lite/{Z}/{X}/{Y}.png";
tilesUrl = tonerUrl.replace(/({[A-Z]})/g, (s) -> s.toLowerCase());
basemap = new L.tileLayer(tilesUrl, {
subdomains: ['','a.','b.','c.','d.'],
type: 'png',
attribution: 'Map tiles by <a href="https://stamen.com">Stamen Design</a>, under <a href="https://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. Data by <a href="https://openstreetmap.org">OpenStreetMap</a>, under <a href="https://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
}).addTo(Map);
# Define models.
Rink = Backbone.Model.extend
# @note +defaults+ doesn't have access to model attributes or collection.
initialize: (attributes) ->
# Handing "C" is unnecessarily hard.
@set(genre: 'PP') if 'C' is @get 'genre'
@set url: t('rinks_url', id: @get('id'), slug: @get('slug'))
# Set the favorite based on local storage.
Backbone.sync 'read', @,
success: (response) =>
@set favorite: response.favorite
error: (message) =>
# Do nothing.
defaults:
favorite: false
visible: false
# Sets the rink as visible.
show: ->
@set visible: true
# Sets the rink as hidden.
hide: ->
@set visible: false
# Toggles the rink's favorite status.
toggle: ->
@save favorite: not @get 'favorite'
# Define collections.
RinkSet = Backbone.Collection.extend
model: Rink
localStorage: new Store 'rinks'
# Sets only matching rinks as visible.
showIfMatching: (kinds, statuses) ->
@each (rink) ->
rink.set visible: (rink.get('genre') in kinds and _.all statuses, (status) -> rink.get status)
@trigger 'changeAll', kinds, statuses
# Sets only favorite rinks as visible.
showIfFavorite: ->
@each (rink) ->
rink.set visible: rink.get 'favorite'
@trigger 'changeAll'
# @return array all visible rinks
visible: ->
@filter (rink) ->
rink.get 'visible'
# @return array all favorite rinks
favorites: ->
@filter (rink) ->
rink.get 'favorite'
# @expects a RinkSet collection
MarkersView = Backbone.View.extend
initialize: ->
@collection.each (model) ->
model.view = new MarkerView model: model
# @expects a Rink model
MarkerView = Backbone.View.extend
template: _.template $('#popup-template').html()
# @see L.Marker.bindPopup
initialize: ->
offset = new L.Point 0, -10
state = if @model.get 'ouvert'
'on'
else if @model.get 'condition' # rinks with conditions receive updates
'off'
else
'na'
icon = L.Icon.extend
options:
iconUrl: "/assets/#{@model.get 'genre'}_#{state}.png"
iconRetinaUrl: "/assets/#{@model.get 'genre'}_#{state}_2x.png"
shadowUrl: "/assets/#{@model.get 'genre'}_shadow.png"
iconSize: new L.Point 28, 28
shadowSize: new L.Point 34, 26
iconAnchor: new L.Point 15, 27
shadowAnchor: [13, 22]
popupAnchor: offset
# "new L.Icon.extend({})" raises "TypeError: object is not a function"
@marker = new L.Marker new L.LatLng(@model.get('lat'), @model.get('lng')), icon: new icon
@marker._popup = new L.Popup offset: offset, autoPan: true, autoPanPaddingTopLeft: [50,100], autoPanPaddingBottomRight: [70,40], closeButton: false, @marker
@marker._popup.setContent @template @model.toJSON()
@marker._popup._initLayout()
# @see delegateEvents
$(@marker._popup._contentNode).delegate '.favorite', 'click.delegateEvents' + @cid, _.bind ->
@model.toggle()
, @
@marker.on 'click', ->
Options.save(beforePopup: @currentUrl()) unless @rinkUrl()
Backbone.history.navigate @model.get('url'), true
, @
@model.bind 'change:favorite', ->
@marker._popup.setContent @template @model.toJSON()
twttr.widgets.load() if twttr.widgets
, @
@model.bind 'change:visible', @render, @
render: ->
if @model.get 'visible'
Map.addLayer @marker
else
Map.removeLayer @marker
@
# Opens the marker's popup.
openPopup: ->
# Prevent restoration of last known state if opening another popup.
Options.save openingPopup: true
@marker.openPopup()
Options.save openingPopup: false
# Refresh Twitter button.
twttr.widgets.load() if twttr.widgets
# Pan to popup.
$('#social .navbar').slideUp()
# Don't navigate to the last known state if opening another popup.
Map.on 'popupclose', (event) ->
unless Options.get 'openingPopup'
Backbone.history.navigate Options.get('beforePopup'), true
# A view for the primary buttons.
# @expects a RinkSet collection
ControlsView = Backbone.View.extend
initialize: ->
_.each ['PP', 'PPL', 'PSE'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'kinds'
_.each ['ouvert', 'deblaye', 'arrose', 'resurface'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'statuses'
new ControlView collection: @collection, el: '#favories'
# A view for a single button.
# @expects a RinkSet collection
ControlView = Backbone.View.extend
initialize: (attributes) ->
@id = $(@el).attr 'id'
@type = attributes.type
@collection.bind 'changeAll', @render, @
events:
click: 'toggle'
render: (kinds, statuses) ->
if @type?
# Don't change state of controls if showing "my favorites".
unless @favoritesUrl()
state = @id in kinds or @id in statuses
@$('.icon').toggleClass 'active', state
else
@$('.icon').toggleClass 'active', @favoritesUrl()
@
toggle: (state) ->
# This creates an extra history entry if switching from an open popup to
# "my favorites", but it's simplest.
Map.closePopup()
if @type?
[kinds, statuses] = if @filterUrl() then @fromUrl @currentUrl() else @fromUI()
if @type is 'kinds'
filters = kinds
else
filters = statuses
if @id in filters
filters = _.without filters, @id
else
filters.push @id
if @type is 'kinds'
kinds = filters
else
statuses = filters
Backbone.history.navigate @toUrl(kinds, statuses), true
else
if @$('.icon').hasClass 'active'
Backbone.history.navigate Options.get('beforeFavorites'), true
else
unless @favoritesUrl()
Options.save beforeFavorites: @currentUrl()
Backbone.history.navigate t('favorites_url'), true
# Define routes.
# @expects a RinkSet collection
Router = Backbone.Router.extend
initialize: (attributes) ->
@collection = attributes.collection
# Maps path components to actions.
routes:
'': 'default'
'favorites': 'favorites'
'favories': 'favorites'
'f': 'filter'
'f/*filters': 'filter'
'rinks/:id': 'show'
'patinoires/:id': 'show'
# Performs the "favorites" action.
favorites: ->
@collection.showIfFavorite()
# Performs the "filter" action.
# @param string splat a URL path
filter: (splat) ->
@collection.showIfMatching @fromUrl(splat)...
# Performs the "show" action.
# @param string id a rink ID
show: (id) ->
# Remove the slug from the ID.
rink = @collection.get id.match(/^\d+/)[0]
# If rink is not visible, display all rinks first.
unless rink.get 'visible'
@collection.showIfMatching @fromUrl(@rootUrl())...
rink.view.openPopup()
default: ->
# If no route, display all rinks.
@navigate @rootUrl(), true
# Helpers to mix-in to views and routers.
window.Helpers =
# Maps path components to rink kinds.
kinds:
'team-sports': 'PSE'
'sports-dequipe': 'PSE'
'free-skating': 'PPL'
'patin-libre': 'PPL'
'landscaped': 'PP'
'paysagee': 'PP'
# Maps path components to rink statuses.
statuses:
'open': 'ouvert'
'ouvert': 'ouvert'
'cleared': 'deblaye'
'deblaye': 'deblaye'
'flooded': 'arrose'
'arrose': 'arrose'
'resurfaced': 'resurface'
'resurface': 'resurface'
numberToPhone: (number, options = {}) ->
number = number.replace(/([0-9]{3})([0-9]{3})([0-9]{4})/, '($1) $2-$3')
if options.extension
number += ' x' + options.extension
number
# @return string the current URL
currentUrl: ->
Backbone.history.getFragment()
# @return string the root URL
rootUrl: ->
@toUrl ['PP', 'PPL', 'PSE'], []
# @return boolean whether the current URL is a filter URL
filterUrl: ->
@currentUrl().indexOf('f/') >= 0
# @return boolean whether the current URL is a rink URL
rinkUrl: ->
@currentUrl().indexOf(t 'rinks') >= 0
# @return boolean whether the current URL is the favorites URL
favoritesUrl: ->
@currentUrl() is t 'favorites_url'
# Returns a filter URL based on the UI's state.
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUI: ->
kinds = _.filter ['PP', 'PPL', 'PSE'], (filter) ->
$("##{filter} .icon").hasClass 'active'
statuses = _.filter ['ouvert', 'deblaye', 'arrose', 'resurface'], (filter) ->
$("##{filter} .icon").hasClass 'active'
[kinds, statuses]
# @param string splat a URL path
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUrl: (splat) ->
kinds = []
statuses = []
if splat?
for part in splat.split('/')
if part of @kinds
kinds.push @kinds[part]
else if part of @statuses
statuses.push @statuses[part]
else if part is 'f'
# Do nothing.
else
console.log "Unknown filter: #{part}" if window.debug
[kinds, statuses]
# Performs the inverse of +fromUrl+.
# @param array kinds rink kinds
# @param array statuses rink statuses
# @return string a URL path
toUrl: (kinds, statuses) ->
'f/' + _.uniq(_.map(kinds.sort().concat(statuses.sort()), (filter) -> t filter)).join '/'
body: (arrondissement) ->
string = if arrondissement.name then "Attn: #{arrondissement.name}\r\n\r\n" else ''
string += "Serait-il possible de publier l'état de vos patinoires extérieures comme le font plusieurs arrondissements à la Ville de Montréal ? Voir: https://ville.montreal.qc.ca/portal/page?_pageid=5798,94909650&_dad=portal&_schema=PORTAL\r\n\r\nMerci."
encodeURIComponent string
# Set up options singleton.
Singleton = Backbone.Model.extend
localStorage: new Store 'options'
Options = new Singleton
id: 1
beforeFavorites: Helpers.rootUrl()
beforePopup: Helpers.rootUrl()
openingPopup: false
# Add helper functions to views and routers.
_.each [MarkersView, MarkerView, ControlsView, ControlView, Router], (klass) ->
_.extend klass.prototype, Helpers
# Seed collection.
window.Rinks = new RinkSet
Rinks.reset json
# Instantiate routes.
Routes = new Router
collection: Rinks
# Instantiate views.
markers = new MarkersView
el: '#map' # to avoid creating an element
collection: Rinks
controls = new ControlsView
el: '#controls' # to avoid creating an element
collection: Rinks
# Route the initial URL.
#window.location.replace window.location.pathname
Backbone.history.start pushState: true
# https://support.cloudmade.com/answers/general
Map.on 'locationfound', (event) ->
radius = event.accuracy / 2
if radius < 1000
locationIcon = L.Icon.extend
options:
iconUrl: "/assets/marker-icon.png"
iconRetinaUrl: "/assets/marker-icon-2x.png"
shadowUrl: "/assets/marker-shadow.png"
iconSize: [25, 41]
shadowSize: [33, 31]
iconAnchor: [12, 41]
shadowAnchor: [10, 31]
popupAnchor: [0, -46]
marker = new L.Marker event.latlng, icon: new locationIcon
Map.addLayer marker
marker.bindPopup t 'accuracy', radius: radius
Map.addLayer new L.Circle event.latlng, radius
Map.on 'locationerror', (event) ->
console.log event.message if window.debug
# If a popup is open, don't set the view to the marker.
if Helpers.rinkUrl()
Map.locate()
else
Map.locate( setView: true, zoom: 13 )
# Backbone doesn't attach the "events" option directly to the view, even
# though it makes sense given that views needn't necessarily hardcode CSS
# selectors (and maybe shouldn't).
# @see https://github.com/documentcloud/backbone/issues/656
| true | # @see hasClass
$.fn.hasAttr = (attr) ->
_.any this, (el) ->
typeof $(el).attr(attr) isnt 'undefined'
# @see toggleClass
$.fn.toggleAttr = (attr, state) ->
isBoolean = typeof state is 'boolean'
this.each ->
self = $ this
state = not self.hasAttr(attr) unless isBoolean
if state
self.attr attr, attr
else
self.removeAttr attr
# Simple i18n "framework".
I18n =
en:
locale: 'en'
other_locale: 'fr'
# Date
abbr_month_names: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
time_format: '%{b} %{e}, %{l}%{P}'
# Popup
accuracy: 'You are within %{radius} meters of this point'
condition: 'In %{condition} condition'
unknown_condition: 'Ice condition not available'
call_to_action: "You can contribute by asking the city to publish this rink’s conditions:"
request_email: 'Email'
request_phone: 'Phone'
or_call: 'or call'
add_favorite: 'Add to favorites'
remove_favorite: 'Remove from favorites'
explanation: 'Going skating? Let your friends know:'
# Social
tweet: "I’m going"
tweet_text_PSE: "I’m going to play hockey at %{park}"
tweet_text_PPL: "I’m going skating at %{park}"
tweet_text_PP: "I’m going skating at %{park}"
# Rink kinds
_PSE: 'Team sports'
_PPL: 'Free skating'
_PP: 'Landscaped'
# Rink descriptions
'Aire de patinage libre': 'Free skating area'
'Grande patinoire avec bandes': 'Big rink with boards'
'Patinoire avec bandes': 'Rink with boards'
'Patinoire de patin libre': 'Free skating rink'
'Patinoire décorative': 'Decorative rink'
'Patinoire entretenue par les citoyens': 'Rink maintained by citizens'
'Patinoire réfrigérée': 'Refrigerated rink'
'Patinoire réfrigérée Bleu-Blanc-Bouge': 'Refrigerated rink Bleu-Blanc-Bouge'
'Petite patinoire avec bandes': 'Small rink with boards'
# Interface statuses
open: 'Open'
closed: 'Closed'
cleared: 'Cleared'
flooded: 'Flooded'
resurfaced: 'Resurfaced'
Excellente: 'excellent'
Bonne: 'good'
Mauvaise: 'bad'
# URLs
rinks: 'rinks'
rinks_url: 'rinks/%{id}-%{slug}'
favorites_url: 'favorites'
# Translate rink kinds and statuses.
'sports-dequipe': 'team-sports'
'patin-libre': 'free-skating'
'paysagee': 'landscaped'
'ouvert': 'open'
'deblaye': 'cleared'
'arrose': 'flooded'
'resurface': 'resurfaced'
'favories': 'favorites'
# Translate from rink kind to path component.
PSE: 'team-sports'
PPL: 'free-skating'
PP: 'landscaped'
C: 'landscaped'
# smartbanner
download: 'Download'
fr:
locale: 'fr'
other_locale: 'en'
# Date
abbr_month_names: ['jan.', 'fév.', 'mar.', 'avr.', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.', 'déc.']
time_format: '%{b} %{e} à %{H}h'
# Popup
accuracy: 'Vous êtes à moins de %{radius} mètres de ce point'
condition: 'En %{condition} condition'
unknown_condition: 'État de la patinoire non disponible'
call_to_action: "Vous pouvez contribuer en demandant à la ville de publier l’état de cette patinoire:"
request_email: 'PI:NAME:<NAME>END_PI'
request_phone: 'Téléphone'
or_call: 'ou appelez le'
add_favorite: 'Ajouter aux favories'
remove_favorite: 'Supprimer des favories'
explanation: 'Vous allez patiner? Informez vos amis:'
# Social
tweet: "J’y vais"
tweet_text_PSE: 'Je vais jouer au hockey à %{park}'
tweet_text_PPL: 'Je vais patiner à %{park}'
tweet_text_PP: 'Je vais patiner à %{park}'
# Rink kinds
_PSE: "Sports d’équipe"
_PPL: 'Patin libre'
_PP: 'Paysagée'
# Interface statuses
open: 'Ouverte'
closed: 'Fermée'
cleared: 'Déblayée'
flooded: 'Arrosée'
resurfaced: 'Resurfacée'
Excellente: 'excellente'
Bonne: 'bonne'
Mauvaise: 'mauvaise'
# URLs
rinks: 'patinoires'
rinks_url: 'patinoires/%{id}-%{slug}'
favorites_url: 'favories'
# Translate from rink kind to path component.
PSE: 'sports-dequipe'
PPL: 'patin-libre'
PP: 'paysagee'
C: 'paysagee'
# smartbanner
download: 'Télécharger'
window.t = (string, args = {}) ->
current_locale = args.locale or locale
string = I18n[current_locale][string] or string
string = string.replace ///%\{#{key}\}///g, value for key, value of args
string
window.format_date = (string) ->
date = new Date Date.parse(string)
hour = date.getHours()
args =
b: t('abbr_month_names')[date.getMonth()]
e: date.getDate()
H: hour
l: if hour > 12 then hour - 12 else (if hour is 0 then 12 else hour)
P: if hour > 11 then 'pm' else 'am'
t('time_format', args)
# Monkey-patch Backbone to be trailing-slash agnostic and to ignore query string.
# @see https://github.com/documentcloud/backbone/issues/520
((_getFragment) ->
Backbone.History.prototype.getFragment = ->
_getFragment.apply(this, arguments).replace(/\/$/, '').replace(/\?.*/, '')
) Backbone.History.prototype.getFragment
other_locale = t 'other_locale'
other_domain = $('#language a').attr('href').match(/^http(s?):\/\/[^\/]+\//)[0].replace t('locale'), other_locale
# Update the language switch link after each navigation event.
((_navigate) ->
Backbone.History.prototype.navigate = ->
_navigate.apply this, arguments
$('#language a').attr 'href', _.reduce ['about', 'contact', 'donate', 'api', 'rinks', 'favorites', 'sports-dequipe', 'patin-libre', 'paysagee', 'ouvert', 'deblaye', 'arrose', 'resurface'], (string,component) ->
string.replace t(component), t(component, locale: other_locale)
, other_domain + Backbone.history.getFragment()
) Backbone.History.prototype.navigate
$ ->
window.debug = env is 'development'
$('.control').tooltip()
$.smartbanner
title: "PI:NAME:<NAME>END_PI"
authors: {'android': 'Android' , 'ios': 'iPhone'}
price: null
appStoreLanguage: t('locale')
icons: {'android': '/assets/app-icon-android.png', 'ios': '/assets/app-icon-ios.png'}
iOSUniversalApp: false
button: t('download')
appendToSelector: 'header'
# Toggle social sidebar
$(window).on 'load', (e) ->
$('#share-toggle').fadeIn();
$('#share-toggle').on 'click', (e) ->
e.preventDefault();
$('#social .navbar').slideToggle( 'fast' )
return
# Create map.
Map = new L.Map 'map',
center: new L.LatLng(45.53, -73.63)
zoom: 13
minZoom: 11
maxZoom: 18
maxBounds: L.latLngBounds(L.latLng(45.170459, -74.447699), L.latLng(46.035873, -73.147435))
tonerUrl = "https://stamen-tiles.a.ssl.fastly.net/toner-lite/{Z}/{X}/{Y}.png";
tilesUrl = tonerUrl.replace(/({[A-Z]})/g, (s) -> s.toLowerCase());
basemap = new L.tileLayer(tilesUrl, {
subdomains: ['','a.','b.','c.','d.'],
type: 'png',
attribution: 'Map tiles by <a href="https://stamen.com">Stamen Design</a>, under <a href="https://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. Data by <a href="https://openstreetmap.org">OpenStreetMap</a>, under <a href="https://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
}).addTo(Map);
# Define models.
Rink = Backbone.Model.extend
# @note +defaults+ doesn't have access to model attributes or collection.
initialize: (attributes) ->
# Handing "C" is unnecessarily hard.
@set(genre: 'PP') if 'C' is @get 'genre'
@set url: t('rinks_url', id: @get('id'), slug: @get('slug'))
# Set the favorite based on local storage.
Backbone.sync 'read', @,
success: (response) =>
@set favorite: response.favorite
error: (message) =>
# Do nothing.
defaults:
favorite: false
visible: false
# Sets the rink as visible.
show: ->
@set visible: true
# Sets the rink as hidden.
hide: ->
@set visible: false
# Toggles the rink's favorite status.
toggle: ->
@save favorite: not @get 'favorite'
# Define collections.
RinkSet = Backbone.Collection.extend
model: Rink
localStorage: new Store 'rinks'
# Sets only matching rinks as visible.
showIfMatching: (kinds, statuses) ->
@each (rink) ->
rink.set visible: (rink.get('genre') in kinds and _.all statuses, (status) -> rink.get status)
@trigger 'changeAll', kinds, statuses
# Sets only favorite rinks as visible.
showIfFavorite: ->
@each (rink) ->
rink.set visible: rink.get 'favorite'
@trigger 'changeAll'
# @return array all visible rinks
visible: ->
@filter (rink) ->
rink.get 'visible'
# @return array all favorite rinks
favorites: ->
@filter (rink) ->
rink.get 'favorite'
# @expects a RinkSet collection
MarkersView = Backbone.View.extend
initialize: ->
@collection.each (model) ->
model.view = new MarkerView model: model
# @expects a Rink model
MarkerView = Backbone.View.extend
template: _.template $('#popup-template').html()
# @see L.Marker.bindPopup
initialize: ->
offset = new L.Point 0, -10
state = if @model.get 'ouvert'
'on'
else if @model.get 'condition' # rinks with conditions receive updates
'off'
else
'na'
icon = L.Icon.extend
options:
iconUrl: "/assets/#{@model.get 'genre'}_#{state}.png"
iconRetinaUrl: "/assets/#{@model.get 'genre'}_#{state}_2x.png"
shadowUrl: "/assets/#{@model.get 'genre'}_shadow.png"
iconSize: new L.Point 28, 28
shadowSize: new L.Point 34, 26
iconAnchor: new L.Point 15, 27
shadowAnchor: [13, 22]
popupAnchor: offset
# "new L.Icon.extend({})" raises "TypeError: object is not a function"
@marker = new L.Marker new L.LatLng(@model.get('lat'), @model.get('lng')), icon: new icon
@marker._popup = new L.Popup offset: offset, autoPan: true, autoPanPaddingTopLeft: [50,100], autoPanPaddingBottomRight: [70,40], closeButton: false, @marker
@marker._popup.setContent @template @model.toJSON()
@marker._popup._initLayout()
# @see delegateEvents
$(@marker._popup._contentNode).delegate '.favorite', 'click.delegateEvents' + @cid, _.bind ->
@model.toggle()
, @
@marker.on 'click', ->
Options.save(beforePopup: @currentUrl()) unless @rinkUrl()
Backbone.history.navigate @model.get('url'), true
, @
@model.bind 'change:favorite', ->
@marker._popup.setContent @template @model.toJSON()
twttr.widgets.load() if twttr.widgets
, @
@model.bind 'change:visible', @render, @
render: ->
if @model.get 'visible'
Map.addLayer @marker
else
Map.removeLayer @marker
@
# Opens the marker's popup.
openPopup: ->
# Prevent restoration of last known state if opening another popup.
Options.save openingPopup: true
@marker.openPopup()
Options.save openingPopup: false
# Refresh Twitter button.
twttr.widgets.load() if twttr.widgets
# Pan to popup.
$('#social .navbar').slideUp()
# Don't navigate to the last known state if opening another popup.
Map.on 'popupclose', (event) ->
unless Options.get 'openingPopup'
Backbone.history.navigate Options.get('beforePopup'), true
# A view for the primary buttons.
# @expects a RinkSet collection
ControlsView = Backbone.View.extend
initialize: ->
_.each ['PP', 'PPL', 'PSE'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'kinds'
_.each ['ouvert', 'deblaye', 'arrose', 'resurface'], (id) =>
new ControlView collection: @collection, el: "##{id}", type: 'statuses'
new ControlView collection: @collection, el: '#favories'
# A view for a single button.
# @expects a RinkSet collection
ControlView = Backbone.View.extend
initialize: (attributes) ->
@id = $(@el).attr 'id'
@type = attributes.type
@collection.bind 'changeAll', @render, @
events:
click: 'toggle'
render: (kinds, statuses) ->
if @type?
# Don't change state of controls if showing "my favorites".
unless @favoritesUrl()
state = @id in kinds or @id in statuses
@$('.icon').toggleClass 'active', state
else
@$('.icon').toggleClass 'active', @favoritesUrl()
@
toggle: (state) ->
# This creates an extra history entry if switching from an open popup to
# "my favorites", but it's simplest.
Map.closePopup()
if @type?
[kinds, statuses] = if @filterUrl() then @fromUrl @currentUrl() else @fromUI()
if @type is 'kinds'
filters = kinds
else
filters = statuses
if @id in filters
filters = _.without filters, @id
else
filters.push @id
if @type is 'kinds'
kinds = filters
else
statuses = filters
Backbone.history.navigate @toUrl(kinds, statuses), true
else
if @$('.icon').hasClass 'active'
Backbone.history.navigate Options.get('beforeFavorites'), true
else
unless @favoritesUrl()
Options.save beforeFavorites: @currentUrl()
Backbone.history.navigate t('favorites_url'), true
# Define routes.
# @expects a RinkSet collection
Router = Backbone.Router.extend
initialize: (attributes) ->
@collection = attributes.collection
# Maps path components to actions.
routes:
'': 'default'
'favorites': 'favorites'
'favories': 'favorites'
'f': 'filter'
'f/*filters': 'filter'
'rinks/:id': 'show'
'patinoires/:id': 'show'
# Performs the "favorites" action.
favorites: ->
@collection.showIfFavorite()
# Performs the "filter" action.
# @param string splat a URL path
filter: (splat) ->
@collection.showIfMatching @fromUrl(splat)...
# Performs the "show" action.
# @param string id a rink ID
show: (id) ->
# Remove the slug from the ID.
rink = @collection.get id.match(/^\d+/)[0]
# If rink is not visible, display all rinks first.
unless rink.get 'visible'
@collection.showIfMatching @fromUrl(@rootUrl())...
rink.view.openPopup()
default: ->
# If no route, display all rinks.
@navigate @rootUrl(), true
# Helpers to mix-in to views and routers.
window.Helpers =
# Maps path components to rink kinds.
kinds:
'team-sports': 'PSE'
'sports-dequipe': 'PSE'
'free-skating': 'PPL'
'patin-libre': 'PPL'
'landscaped': 'PP'
'paysagee': 'PP'
# Maps path components to rink statuses.
statuses:
'open': 'ouvert'
'ouvert': 'ouvert'
'cleared': 'deblaye'
'deblaye': 'deblaye'
'flooded': 'arrose'
'arrose': 'arrose'
'resurfaced': 'resurface'
'resurface': 'resurface'
numberToPhone: (number, options = {}) ->
number = number.replace(/([0-9]{3})([0-9]{3})([0-9]{4})/, '($1) $2-$3')
if options.extension
number += ' x' + options.extension
number
# @return string the current URL
currentUrl: ->
Backbone.history.getFragment()
# @return string the root URL
rootUrl: ->
@toUrl ['PP', 'PPL', 'PSE'], []
# @return boolean whether the current URL is a filter URL
filterUrl: ->
@currentUrl().indexOf('f/') >= 0
# @return boolean whether the current URL is a rink URL
rinkUrl: ->
@currentUrl().indexOf(t 'rinks') >= 0
# @return boolean whether the current URL is the favorites URL
favoritesUrl: ->
@currentUrl() is t 'favorites_url'
# Returns a filter URL based on the UI's state.
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUI: ->
kinds = _.filter ['PP', 'PPL', 'PSE'], (filter) ->
$("##{filter} .icon").hasClass 'active'
statuses = _.filter ['ouvert', 'deblaye', 'arrose', 'resurface'], (filter) ->
$("##{filter} .icon").hasClass 'active'
[kinds, statuses]
# @param string splat a URL path
# @return array a two-value array where the first value is an array of rink
# kinds and the second value is an array of rink statuses
fromUrl: (splat) ->
kinds = []
statuses = []
if splat?
for part in splat.split('/')
if part of @kinds
kinds.push @kinds[part]
else if part of @statuses
statuses.push @statuses[part]
else if part is 'f'
# Do nothing.
else
console.log "Unknown filter: #{part}" if window.debug
[kinds, statuses]
# Performs the inverse of +fromUrl+.
# @param array kinds rink kinds
# @param array statuses rink statuses
# @return string a URL path
toUrl: (kinds, statuses) ->
'f/' + _.uniq(_.map(kinds.sort().concat(statuses.sort()), (filter) -> t filter)).join '/'
body: (arrondissement) ->
string = if arrondissement.name then "Attn: #{arrondissement.name}\r\n\r\n" else ''
string += "Serait-il possible de publier l'état de vos patinoires extérieures comme le font plusieurs arrondissements à la Ville de Montréal ? Voir: https://ville.montreal.qc.ca/portal/page?_pageid=5798,94909650&_dad=portal&_schema=PORTAL\r\n\r\nMerci."
encodeURIComponent string
# Set up options singleton.
Singleton = Backbone.Model.extend
localStorage: new Store 'options'
Options = new Singleton
id: 1
beforeFavorites: Helpers.rootUrl()
beforePopup: Helpers.rootUrl()
openingPopup: false
# Add helper functions to views and routers.
_.each [MarkersView, MarkerView, ControlsView, ControlView, Router], (klass) ->
_.extend klass.prototype, Helpers
# Seed collection.
window.Rinks = new RinkSet
Rinks.reset json
# Instantiate routes.
Routes = new Router
collection: Rinks
# Instantiate views.
markers = new MarkersView
el: '#map' # to avoid creating an element
collection: Rinks
controls = new ControlsView
el: '#controls' # to avoid creating an element
collection: Rinks
# Route the initial URL.
#window.location.replace window.location.pathname
Backbone.history.start pushState: true
# https://support.cloudmade.com/answers/general
Map.on 'locationfound', (event) ->
radius = event.accuracy / 2
if radius < 1000
locationIcon = L.Icon.extend
options:
iconUrl: "/assets/marker-icon.png"
iconRetinaUrl: "/assets/marker-icon-2x.png"
shadowUrl: "/assets/marker-shadow.png"
iconSize: [25, 41]
shadowSize: [33, 31]
iconAnchor: [12, 41]
shadowAnchor: [10, 31]
popupAnchor: [0, -46]
marker = new L.Marker event.latlng, icon: new locationIcon
Map.addLayer marker
marker.bindPopup t 'accuracy', radius: radius
Map.addLayer new L.Circle event.latlng, radius
Map.on 'locationerror', (event) ->
console.log event.message if window.debug
# If a popup is open, don't set the view to the marker.
if Helpers.rinkUrl()
Map.locate()
else
Map.locate( setView: true, zoom: 13 )
# Backbone doesn't attach the "events" option directly to the view, even
# though it makes sense given that views needn't necessarily hardcode CSS
# selectors (and maybe shouldn't).
# @see https://github.com/documentcloud/backbone/issues/656
|
[
{
"context": "llainsDict =\n 'jk': 'The Joker'\n 'hq': 'Harley Quinn'\n 'en': 'Edward Nigma'\n\n for guid, name o",
"end": 382,
"score": 0.9998699426651001,
"start": 370,
"tag": "NAME",
"value": "Harley Quinn"
},
{
"context": "he Joker'\n 'hq': 'Harley Quinn'\n... | tests/WheatonDeckTest.coffee | lessthanthree/wheaton | 1 | deckClassPath = '../src/vendor/wheaton/Deck'
cardClassPath = '../src/vendor/wheaton/Card'
jest.dontMock deckClassPath
jest.dontMock cardClassPath
describe 'Wheaton/Objects/Deck', ->
deck = undefined
beforeEach ->
Deck = require deckClassPath
Card = require cardClassPath
deck = new Deck
gothamVillainsDict =
'jk': 'The Joker'
'hq': 'Harley Quinn'
'en': 'Edward Nigma'
for guid, name of gothamVillainsDict
deck.set guid, new Card
guid: guid
name: name
it 'can draw a card', ->
lastCardInDeck = deck.last
drawnDeckCard = deck.draw()
expect lastCardInDeck
.toEqual drawnDeckCard
| 121168 | deckClassPath = '../src/vendor/wheaton/Deck'
cardClassPath = '../src/vendor/wheaton/Card'
jest.dontMock deckClassPath
jest.dontMock cardClassPath
describe 'Wheaton/Objects/Deck', ->
deck = undefined
beforeEach ->
Deck = require deckClassPath
Card = require cardClassPath
deck = new Deck
gothamVillainsDict =
'jk': 'The Joker'
'hq': '<NAME>'
'en': '<NAME>'
for guid, name of gothamVillainsDict
deck.set guid, new Card
guid: guid
name: name
it 'can draw a card', ->
lastCardInDeck = deck.last
drawnDeckCard = deck.draw()
expect lastCardInDeck
.toEqual drawnDeckCard
| true | deckClassPath = '../src/vendor/wheaton/Deck'
cardClassPath = '../src/vendor/wheaton/Card'
jest.dontMock deckClassPath
jest.dontMock cardClassPath
describe 'Wheaton/Objects/Deck', ->
deck = undefined
beforeEach ->
Deck = require deckClassPath
Card = require cardClassPath
deck = new Deck
gothamVillainsDict =
'jk': 'The Joker'
'hq': 'PI:NAME:<NAME>END_PI'
'en': 'PI:NAME:<NAME>END_PI'
for guid, name of gothamVillainsDict
deck.set guid, new Card
guid: guid
name: name
it 'can draw a card', ->
lastCardInDeck = deck.last
drawnDeckCard = deck.draw()
expect lastCardInDeck
.toEqual drawnDeckCard
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998928904533386,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhaki... | src/config/sequelize_config.coffee | AbdelhakimRafik/Project | 1 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date June 2021
###
path = require 'path'
config = require './index'
module.exports =
development:
username: config.db.username
password: config.db.password
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true
# Use a different storage type
migrationStorage: 'json',
# Use a different file name
migrationStoragePath: path.join __dirname, '../database/sequelize-meta.json',
production:
username: config.db.username
password: config.db.password
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true | 39806 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date June 2021
###
path = require 'path'
config = require './index'
module.exports =
development:
username: config.db.username
password: <PASSWORD>
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true
# Use a different storage type
migrationStorage: 'json',
# Use a different file name
migrationStoragePath: path.join __dirname, '../database/sequelize-meta.json',
production:
username: config.db.username
password: <PASSWORD>
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date June 2021
###
path = require 'path'
config = require './index'
module.exports =
development:
username: config.db.username
password: PI:PASSWORD:<PASSWORD>END_PI
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true
# Use a different storage type
migrationStorage: 'json',
# Use a different file name
migrationStoragePath: path.join __dirname, '../database/sequelize-meta.json',
production:
username: config.db.username
password: PI:PASSWORD:<PASSWORD>END_PI
database: config.db.dbName
host: config.db.host
port: config.db.port
dialect: config.db.dialect
dialectOptions:
bigNumberStrings: true |
[
{
"context": "= data.split \":\"\n result = \n username: username\n password: password\n email: email\n ",
"end": 334,
"score": 0.9893836975097656,
"start": 326,
"tag": "USERNAME",
"value": "username"
},
{
"context": "lt = \n username: username\n ... | src/back/user.coffee | ThomasCharuel/ece_ast_project | 0 | module.exports = (db) ->
# get (username, callback)
# Get user informations
# - callback: the callback function, callback(err, data)
get: (username, callback) ->
db.get "user:#{username}", (err, data) ->
return callback err if err
[ password, email ] = data.split ":"
result =
username: username
password: password
email: email
callback null, result
# save (username, password, email, callback)
# Save user
# - username: user name
# - password: user password
# - email: user mail
# - callback: the callback function
save: (username, password, email, callback) ->
ws = db.createWriteStream()
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
value: "#{password}:#{email}"
ws.end()
# remove (username, callback)
# Delete given user
# - username: the user id
# - callback: the callback function
remove: (username, callback) ->
ws = db.createWriteStream
type: 'del'
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
ws.end()
metrics = require('./metrics')(db)
metrics.deleteByUsername username, (err) ->
callback err if err | 137480 | module.exports = (db) ->
# get (username, callback)
# Get user informations
# - callback: the callback function, callback(err, data)
get: (username, callback) ->
db.get "user:#{username}", (err, data) ->
return callback err if err
[ password, email ] = data.split ":"
result =
username: username
password: <PASSWORD>
email: email
callback null, result
# save (username, password, email, callback)
# Save user
# - username: user name
# - password: <PASSWORD>
# - email: user mail
# - callback: the callback function
save: (username, password, email, callback) ->
ws = db.createWriteStream()
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
value: "#{password}:#{email}"
ws.end()
# remove (username, callback)
# Delete given user
# - username: the user id
# - callback: the callback function
remove: (username, callback) ->
ws = db.createWriteStream
type: 'del'
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
ws.end()
metrics = require('./metrics')(db)
metrics.deleteByUsername username, (err) ->
callback err if err | true | module.exports = (db) ->
# get (username, callback)
# Get user informations
# - callback: the callback function, callback(err, data)
get: (username, callback) ->
db.get "user:#{username}", (err, data) ->
return callback err if err
[ password, email ] = data.split ":"
result =
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
email: email
callback null, result
# save (username, password, email, callback)
# Save user
# - username: user name
# - password: PI:PASSWORD:<PASSWORD>END_PI
# - email: user mail
# - callback: the callback function
save: (username, password, email, callback) ->
ws = db.createWriteStream()
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
value: "#{password}:#{email}"
ws.end()
# remove (username, callback)
# Delete given user
# - username: the user id
# - callback: the callback function
remove: (username, callback) ->
ws = db.createWriteStream
type: 'del'
ws.on 'error', (err) -> callback err
ws.on 'close', callback
ws.write
key: "user:#{username}"
ws.end()
metrics = require('./metrics')(db)
metrics.deleteByUsername username, (err) ->
callback err if err |
[
{
"context": "rom a YAML file.\n@module joukou-api/config\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\n\nfs = require( ",
"end": 691,
"score": 0.9998815655708313,
"start": 677,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "@module joukou-api/config\n@author... | src/config.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
Simple module to load deployment configuration from a YAML file.
@module joukou-api/config
@author Isaac Johnston <isaac.johnston@joukou.com>
###
fs = require( 'fs' )
path = require( 'path' )
yaml = require( 'js-yaml' )
log = require( './log/LoggerFactory' ).getLogger( name: 'server' )
try
module.exports = yaml.safeLoad(
fs.readFileSync( process.env.JOUKOU_CONFIG, encoding: 'utf8' )
)
catch e
log.warn( 'unable to load ' + process.env.JOUKOU_CONFIG )
module.exports = {} | 121998 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
Simple module to load deployment configuration from a YAML file.
@module joukou-api/config
@author <NAME> <<EMAIL>>
###
fs = require( 'fs' )
path = require( 'path' )
yaml = require( 'js-yaml' )
log = require( './log/LoggerFactory' ).getLogger( name: 'server' )
try
module.exports = yaml.safeLoad(
fs.readFileSync( process.env.JOUKOU_CONFIG, encoding: 'utf8' )
)
catch e
log.warn( 'unable to load ' + process.env.JOUKOU_CONFIG )
module.exports = {} | true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
Simple module to load deployment configuration from a YAML file.
@module joukou-api/config
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
fs = require( 'fs' )
path = require( 'path' )
yaml = require( 'js-yaml' )
log = require( './log/LoggerFactory' ).getLogger( name: 'server' )
try
module.exports = yaml.safeLoad(
fs.readFileSync( process.env.JOUKOU_CONFIG, encoding: 'utf8' )
)
catch e
log.warn( 'unable to load ' + process.env.JOUKOU_CONFIG )
module.exports = {} |
[
{
"context": "ime Completions converted from https://github.com/Southclaw/pawn-sublime-language\n# Converter created by Rena",
"end": 120,
"score": 0.6926596164703369,
"start": 111,
"tag": "USERNAME",
"value": "Southclaw"
},
{
"context": "hclaw/pawn-sublime-language\n# Converter crea... | snippets/SIF.InventoryKeys.pwn.cson | Wuzi/language-pawn | 4 | # SIF.InventoryKeys.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by Renato "Hii" Garcia
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'OnPlayerAddToInventory':
'prefix': 'OnPlayerAddToInventory'
'body': 'OnPlayerAddToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'OnPlayerAddedToInventory':
'prefix': 'OnPlayerAddedToInventory'
'body': 'OnPlayerAddedToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
| 52128 | # SIF.InventoryKeys.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by <NAME> "<NAME>" <NAME>
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'OnPlayerAddToInventory':
'prefix': 'OnPlayerAddToInventory'
'body': 'OnPlayerAddToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'OnPlayerAddedToInventory':
'prefix': 'OnPlayerAddedToInventory'
'body': 'OnPlayerAddedToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
| true | # SIF.InventoryKeys.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by PI:NAME:<NAME>END_PI "PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'OnPlayerAddToInventory':
'prefix': 'OnPlayerAddToInventory'
'body': 'OnPlayerAddToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'OnPlayerAddedToInventory':
'prefix': 'OnPlayerAddedToInventory'
'body': 'OnPlayerAddedToInventory(${1:playerid}, ${2:itemid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
|
[
{
"context": "igger, \"logout\")\n\n channelFor: (key) ->\n key = \"#{key}-chan\"\n return service if (service = @get key)?\n ",
"end": 1087,
"score": 0.9179954528808594,
"start": 1075,
"tag": "KEY",
"value": "\"#{key}-chan"
}
] | addon/services/autox-session-context.coffee | foxnewsnetwork/autox | 0 | `import Ember from 'ember'`
`import _x from 'autox/utils/xdash'`
`import _ from 'lodash/lodash'`
{RSVP, Service, Evented, isBlank, inject, computed, run, String: {singularize}} = Ember
{alias} = computed
{apply} = _x.computed
{chain, bind} = _
NullModelError = (key) -> """
You called AutoxSessionContextService.connect with argument '#{key}',
but is currently null on the session model.
"""
AutoxSessionContextService = Service.extend Evented,
store: inject.service("store")
session: inject.service("session")
authData: alias "session.data.authenticated"
loggedIn: alias "session.isAuthenticated"
model: computed "session.isAuthenticated",
get: ->
store = @get "store"
if @get "session.isAuthenticated"
store.peekRecord "session", @get("authData.data.id")
else
store.createRecord "session"
instanceInit: ->
session = @get("session")
session.on "authenticationSucceeded", run.bind(@, @trigger, "login")
session.on "invalidationSucceeded", run.bind(@, @trigger, "logout")
channelFor: (key) ->
key = "#{key}-chan"
return service if (service = @get key)?
@[key] ?= inject.service(key)
@get key
fetchChannelable: (key) ->
return @get("model")?.get key
connect: (key) ->
@fetchChannelable(key)
.then (model) =>
throw NullModelError(key) if isBlank model
@channelFor(key)
.connect model
disconnect: (key) ->
@fetchChannelable(key)
.then =>
@channelFor(key)
.disconnect()
cast: (params, model) ->
store = @get "store"
sessionClass = store.modelFor "session"
sessionClass.eachAttribute (name) ->
if (value = Ember.get(params, name))?
model.set name, value
sessionClass.eachRelatedType (name) ->
if (value = Ember.get(params, name))?
model.set name, value
model
login: (params={}) ->
store = @get "store"
session = @get("session")
chain @get("model")
.thru bind(@cast, @, params)
.thru bind(session.authenticate, session, "authenticator:autox")
.value()
logout: ->
@get("session").invalidate()
update: (params={}) ->
model = @get "model"
@cast(params, model)
model
.save()
.then (model) =>
@trigger "change", model
model
.catch (error) =>
console.log error
throw error
`export default AutoxSessionContextService`
| 203981 | `import Ember from 'ember'`
`import _x from 'autox/utils/xdash'`
`import _ from 'lodash/lodash'`
{RSVP, Service, Evented, isBlank, inject, computed, run, String: {singularize}} = Ember
{alias} = computed
{apply} = _x.computed
{chain, bind} = _
NullModelError = (key) -> """
You called AutoxSessionContextService.connect with argument '#{key}',
but is currently null on the session model.
"""
AutoxSessionContextService = Service.extend Evented,
store: inject.service("store")
session: inject.service("session")
authData: alias "session.data.authenticated"
loggedIn: alias "session.isAuthenticated"
model: computed "session.isAuthenticated",
get: ->
store = @get "store"
if @get "session.isAuthenticated"
store.peekRecord "session", @get("authData.data.id")
else
store.createRecord "session"
instanceInit: ->
session = @get("session")
session.on "authenticationSucceeded", run.bind(@, @trigger, "login")
session.on "invalidationSucceeded", run.bind(@, @trigger, "logout")
channelFor: (key) ->
key = <KEY>"
return service if (service = @get key)?
@[key] ?= inject.service(key)
@get key
fetchChannelable: (key) ->
return @get("model")?.get key
connect: (key) ->
@fetchChannelable(key)
.then (model) =>
throw NullModelError(key) if isBlank model
@channelFor(key)
.connect model
disconnect: (key) ->
@fetchChannelable(key)
.then =>
@channelFor(key)
.disconnect()
cast: (params, model) ->
store = @get "store"
sessionClass = store.modelFor "session"
sessionClass.eachAttribute (name) ->
if (value = Ember.get(params, name))?
model.set name, value
sessionClass.eachRelatedType (name) ->
if (value = Ember.get(params, name))?
model.set name, value
model
login: (params={}) ->
store = @get "store"
session = @get("session")
chain @get("model")
.thru bind(@cast, @, params)
.thru bind(session.authenticate, session, "authenticator:autox")
.value()
logout: ->
@get("session").invalidate()
update: (params={}) ->
model = @get "model"
@cast(params, model)
model
.save()
.then (model) =>
@trigger "change", model
model
.catch (error) =>
console.log error
throw error
`export default AutoxSessionContextService`
| true | `import Ember from 'ember'`
`import _x from 'autox/utils/xdash'`
`import _ from 'lodash/lodash'`
{RSVP, Service, Evented, isBlank, inject, computed, run, String: {singularize}} = Ember
{alias} = computed
{apply} = _x.computed
{chain, bind} = _
NullModelError = (key) -> """
You called AutoxSessionContextService.connect with argument '#{key}',
but is currently null on the session model.
"""
AutoxSessionContextService = Service.extend Evented,
store: inject.service("store")
session: inject.service("session")
authData: alias "session.data.authenticated"
loggedIn: alias "session.isAuthenticated"
model: computed "session.isAuthenticated",
get: ->
store = @get "store"
if @get "session.isAuthenticated"
store.peekRecord "session", @get("authData.data.id")
else
store.createRecord "session"
instanceInit: ->
session = @get("session")
session.on "authenticationSucceeded", run.bind(@, @trigger, "login")
session.on "invalidationSucceeded", run.bind(@, @trigger, "logout")
channelFor: (key) ->
key = PI:KEY:<KEY>END_PI"
return service if (service = @get key)?
@[key] ?= inject.service(key)
@get key
fetchChannelable: (key) ->
return @get("model")?.get key
connect: (key) ->
@fetchChannelable(key)
.then (model) =>
throw NullModelError(key) if isBlank model
@channelFor(key)
.connect model
disconnect: (key) ->
@fetchChannelable(key)
.then =>
@channelFor(key)
.disconnect()
cast: (params, model) ->
store = @get "store"
sessionClass = store.modelFor "session"
sessionClass.eachAttribute (name) ->
if (value = Ember.get(params, name))?
model.set name, value
sessionClass.eachRelatedType (name) ->
if (value = Ember.get(params, name))?
model.set name, value
model
login: (params={}) ->
store = @get "store"
session = @get("session")
chain @get("model")
.thru bind(@cast, @, params)
.thru bind(session.authenticate, session, "authenticator:autox")
.value()
logout: ->
@get("session").invalidate()
update: (params={}) ->
model = @get "model"
@cast(params, model)
model
.save()
.then (model) =>
@trigger "change", model
model
.catch (error) =>
console.log error
throw error
`export default AutoxSessionContextService`
|
[
{
"context": " = index: 0\n\n request: (url, cb) ->\n key = \"_\" + global.jsonpCallbacks.index++\n sep = \"?\"\n ",
"end": 186,
"score": 0.750205934047699,
"start": 181,
"tag": "KEY",
"value": "\"_\" +"
}
] | src/scripts/util/jsonp.coffee | georgeOsdDev/booklog | 1 | "use strict"
define ->
class JSONP
constructor: (context)->
@s = {}
global = context
global.jsonpCallbacks = index: 0
request: (url, cb) ->
key = "_" + global.jsonpCallbacks.index++
sep = "?"
@s[key] = document.createElement("script")
@s[key].type = "text/javascript"
if url.indexOf("?") > 0 then sep = '&'
@s[key].src = url + sep + "callback=" + "jsonpCallback" + key
self = @
global["jsonpCallback" + key] = (json) ->
self.s[key].parentNode.removeChild self.s[key]
delete global["jsonpCallback" + key]
cb json
document.getElementsByTagName("head")[0].appendChild @s[key]
| 17182 | "use strict"
define ->
class JSONP
constructor: (context)->
@s = {}
global = context
global.jsonpCallbacks = index: 0
request: (url, cb) ->
key = <KEY> global.jsonpCallbacks.index++
sep = "?"
@s[key] = document.createElement("script")
@s[key].type = "text/javascript"
if url.indexOf("?") > 0 then sep = '&'
@s[key].src = url + sep + "callback=" + "jsonpCallback" + key
self = @
global["jsonpCallback" + key] = (json) ->
self.s[key].parentNode.removeChild self.s[key]
delete global["jsonpCallback" + key]
cb json
document.getElementsByTagName("head")[0].appendChild @s[key]
| true | "use strict"
define ->
class JSONP
constructor: (context)->
@s = {}
global = context
global.jsonpCallbacks = index: 0
request: (url, cb) ->
key = PI:KEY:<KEY>END_PI global.jsonpCallbacks.index++
sep = "?"
@s[key] = document.createElement("script")
@s[key].type = "text/javascript"
if url.indexOf("?") > 0 then sep = '&'
@s[key].src = url + sep + "callback=" + "jsonpCallback" + key
self = @
global["jsonpCallback" + key] = (json) ->
self.s[key].parentNode.removeChild self.s[key]
delete global["jsonpCallback" + key]
cb json
document.getElementsByTagName("head")[0].appendChild @s[key]
|
[
{
"context": "ch'], ['cent', 'centy', 'centów']],\n 'BAM': [['marka','marki','marek'], ['fenig','fenigi','fenigów']],",
"end": 2554,
"score": 0.9952043294906616,
"start": 2549,
"tag": "NAME",
"value": "marka"
},
{
"context": "cent', 'centy', 'centów']],\n 'BAM': [['marka','mark... | src/app/components/number2words/Number2Words.coffee | machomic/fakturownik-front | 1 | class Number2Words
minus: 'minus'
exponent:
0: ['','','']
3: ['tysiąc','tysiące','tysięcy']
6: ['milion','miliony','milionów']
9: ['miliard','miliardy','miliardów']
12: ['bilion','biliony','bilionów']
15: ['biliard','biliardy','biliardów']
18: ['trylion','tryliony','trylionów']
21: ['tryliard','tryliardy','tryliardów']
24: ['kwadrylion','kwadryliony','kwadrylionów']
27: ['kwadryliard','kwadryliardy','kwadryliardów']
30: ['kwintylion','kwintyliony','kwintylionów']
33: ['kwintyliiard','kwintyliardy','kwintyliardów']
36: ['sekstylion','sekstyliony','sekstylionów']
39: ['sekstyliard','sekstyliardy','sekstyliardów']
42: ['septylion','septyliony','septylionów']
45: ['septyliard','septyliardy','septyliardów']
48: ['oktylion','oktyliony','oktylionów']
51: ['oktyliard','oktyliardy','oktyliardów']
54: ['nonylion','nonyliony','nonylionów']
57: ['nonyliard','nonyliardy','nonyliardów']
60: ['decylion','decyliony','decylionów']
63: ['decyliard','decyliardy','decyliardów']
100: ['centylion','centyliony','centylionów']
103: ['centyliard','centyliardy','centyliardów']
120: ['wicylion','wicylion','wicylion']
123: ['wicyliard','wicyliardy','wicyliardów']
180: ['trycylion','trycylion','trycylion']
183: ['trycyliard','trycyliardy','trycyliardów']
240: ['kwadragilion','kwadragilion','kwadragilion']
243: ['kwadragiliard','kwadragiliardy','kwadragiliardów']
300: ['kwinkwagilion','kwinkwagilion','kwinkwagilion']
303: ['kwinkwagiliard','kwinkwagiliardy','kwinkwagiliardów']
360: ['seskwilion','seskwilion','seskwilion']
363: ['seskwiliard','seskwiliardy','seskwiliardów']
420: ['septagilion','septagilion','septagilion']
423: ['septagiliard','septagiliardy','septagiliardów']
480: ['oktogilion','oktogilion','oktogilion']
483: ['oktogiliard','oktogiliardy','oktogiliardów']
540: ['nonagilion','nonagilion','nonagilion']
543: ['nonagiliard','nonagiliardy','nonagiliardów']
600: ['centylion','centyliony','centylionów']
603: ['centyliard','centyliardy','centyliardów']
6000018: ['milinilitrylion','milinilitryliony','milinilitrylionów']
digits: ['zero', 'jeden', 'dwa', 'trzy', 'cztery',
'pięć', 'sześć', 'siedem', 'osiem', 'dziewięć']
sep: ' '
currencyNames:
'ALL': [['lek','leki','leków'], ['quindarka','quindarki','quindarek']],
'AUD': [['dolar australijski', 'dolary australijskie', 'dolarów australijskich'], ['cent', 'centy', 'centów']],
'BAM': [['marka','marki','marek'], ['fenig','fenigi','fenigów']],
'BGN': [['lew','lewy','lew'], ['stotinka','stotinki','stotinek']],
'BRL': [['real','reale','realów'], ['centavos','centavos','centavos']],
'BYR': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'CAD': [['dolar kanadyjski', 'dolary kanadyjskie', 'dolarów kanadyjskich'], ['cent', 'centy', 'centów']],
'CHF': [['frank szwajcarski','franki szwajcarskie','franków szwajcarskich'], ['rapp','rappy','rappów']],
'CYP': [['funt cypryjski','funty cypryjskie','funtów cypryjskich'], ['cent', 'centy', 'centów']],
'CZK': [['korona czeska','korony czeskie','koron czeskich'], ['halerz','halerze','halerzy']],
'DKK': [['korona duńska','korony duńskie','koron duńskich'], ['ore','ore','ore']],
'EEK': [['korona estońska','korony estońskie','koron estońskich'], ['senti','senti','senti']],
'EUR': [['euro', 'euro', 'euro'], ['eurocent', 'eurocenty', 'eurocentów']],
'GBP': [['funt szterling','funty szterlingi','funtów szterlingów'], ['pens','pensy','pensów']],
'HKD': [['dolar Hongkongu','dolary Hongkongu','dolarów Hongkongu'], ['cent', 'centy', 'centów']],
'HRK': [['kuna','kuny','kun'], ['lipa','lipy','lip']],
'HUF': [['forint','forinty','forintów'], ['filler','fillery','fillerów']],
'ILS': [['nowy szekel','nowe szekele','nowych szekeli'], ['agora','agory','agorot']],
'ISK': [['korona islandzka','korony islandzkie','koron islandzkich'], ['aurar','aurar','aurar']],
'JPY': [['jen','jeny','jenów'], ['sen','seny','senów']],
'LTL': [['lit','lity','litów'], ['cent', 'centy', 'centów']],
'LVL': [['łat','łaty','łatów'], ['sentim','sentimy','sentimów']],
'MKD': [['denar','denary','denarów'], ['deni','deni','deni']],
'MTL': [['lira maltańska','liry maltańskie','lir maltańskich'], ['centym','centymy','centymów']],
'NOK': [['korona norweska','korony norweskie','koron norweskich'], ['oere','oere','oere']],
'PLN': [['złoty', 'złote', 'złotych'], ['grosz', 'grosze', 'groszy']],
'ROL': [['lej','leje','lei'], ['bani','bani','bani']],
'RUB': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'SEK': [['korona szwedzka','korony szwedzkie','koron szweckich'], ['oere','oere','oere']],
'SIT': [['tolar','tolary','tolarów'], ['stotinia','stotinie','stotini']],
'SKK': [['korona słowacka','korony słowackie','koron słowackich'], ['halerz','halerze','halerzy']],
'TRL': [['lira turecka','liry tureckie','lir tureckich'], ['kurusza','kurysze','kuruszy']],
'UAH': [['hrywna','hrywna','hrywna'], ['cent', 'centy', 'centów']],
'USD': [['dolar','dolary','dolarów'], ['cent', 'centy', 'centów']],
'YUM': [['dinar','dinary','dinarów'], ['para','para','para']],
'ZAR': [['rand','randy','randów'], ['cent', 'centy', 'centów']]
defaultCurrency: 'PLN'
constructor: () ->
toWords: (num, power = 0, powsuffix = '') ->
ret = ''
num = new String(num)
# add a @minus sign
if num.substr(0,1) == '-'
ret = "#{@sep}#{@minus}"
num = num.substr(1)
# // strip excessive zero signs and spaces
num = num.trim()
num = num.replace(/^0+/, '')
if num.length > 3
p = curp = maxp = num.length-1
while p > 0
# check for highest power
if @exponent.hasOwnProperty(p)
# send substr from curp to p
snum = num.substr(maxp - curp, curp - p + 1)
snum = snum.replace(/^0+/, '')
if (snum != '')
cursuffix = @exponent[power][@exponent[power].length-1]
if (powsuffix != '')
cursuffix = cursuffix + @sep + powsuffix
ret = ret + @toWords(snum, p, cursuffix)
curp = p - 1
p--
num = num.substr(maxp - curp, curp - p + 1)
if num == 0
ret
else if num == 0 || num == ''
return "#{@sep}#{@digits[0]}"
h = t = d = 0
# coffescript cant handle switch without breaks
`
switch (num.length) {
case 3:
h = parseInt(num.substr(-3, 1));
case 2:
t = parseInt(num.substr(-2, 1));
case 1:
d = parseInt(num.substr( -1, 1));
break;
case 0:
return;
}`
switch h
when 9
ret = ret + @sep + 'dziewięćset'
when 8
ret = ret + @sep + 'osiemset'
when 7
ret = ret + @sep + 'siedemset'
when 6
ret = ret + @sep + 'sześćset'
when 5
ret = ret + @sep + 'pięćset'
when 4
ret = ret + @sep + 'czterysta'
when 3
ret = ret + @sep + 'trzysta'
when 2
ret = ret + @sep + 'dwieście'
when 1
ret = ret + @sep + 'sto'
switch t
when 9, 8, 7, 6, 5
ret = ret + @sep + @digits[t] + 'dziesiąt'
when 4
ret = ret + @sep + 'czterdzieści'
when 3
ret = ret + @sep + 'trzydzieści'
when 2
ret = ret + @sep + 'dwadzieścia'
when 1
switch d
when 0
ret = ret + @sep + 'dziesięć'
when 1
ret = ret + @sep + 'jedenaście'
when 2, 3, 7, 8
ret = ret + @sep + @digits[d] + 'naście'
when 4
ret = ret + @sep + 'czternaście'
when 5
ret = ret + @sep + 'piętnaście'
when 6
ret = ret + @sep + 'szesnaście'
when 9
ret = ret + @sep + 'dziewiętnaście'
if t != 1 && d > 0
ret = ret + @sep + @digits[d]
if t == 1
d = 0
if ( h + t ) > 0 && d == 1
d = 0
if power > 0
if @exponent.hasOwnProperty(power)
lev = @exponent[power]
if !lev || !Array.isArray lev
return null
switch d
when 1
suf = lev[0]
when 2, 3, 4
suf = lev[1]
when 0, 5, 6, 7, 8, 9
suf = lev[2]
ret = ret + @sep + suf
if powsuffix != ''
ret = ret + @sep + powsuffix
return ret
toCurrencyWords: ($number, $currency = "pln", $convertFraction = true) ->
$number = Math.round($number * 100) / 100
numberParts = new String($number).split(".")
$decimal = numberParts[0] || "0"
$fraction = numberParts[1] || null
@_toCurrencyWords($decimal, $fraction, $currency, $convertFraction)
_toCurrencyWords: ($decimal, $fraction = null, currency = "pln", convertFraction = true) ->
currency = currency.toUpperCase()
if !@currencyNames.hasOwnProperty(currency)
currency = @defaultCurrency
currNames = @currencyNames[currency]
$decimal = "0" if !$decimal
ret = @toWords($decimal)
ret = ret.trim() if ret
lev = @getNumLevel($decimal)
ret = ret + @sep + currNames[0][lev]
if $fraction
if convertFraction
ret = ret + @sep + @toWords($fraction)
else
ret = ret + @sep + $fraction
lev = @getNumLevel($fraction)
ret = ret + @sep + currNames[1][lev]
return ret
getNumLevel: (num) ->
return 0 unless num
if num.length > 3
num = num.substr(-3);
num = parseInt(num)
$h = $t = $d = $lev = 0;
`switch (num.length) {
case 3:
$h = parseInt(num.substr( -3, 1));
case 2:
$t = parseInt(num.substr(-2, 1));
case 1:
$d = parseInt(num.substr(-1, 1));
break;
case 0:
return 0;
}`
if $t == 1
$d = 0;
if (( $h + $t ) > 0 && $d == 1)
$d = 0;
switch $d
when 1
$lev = 0;
when 2,3,4
$lev = 1;
else
$lev = 2;
$lev
| 107781 | class Number2Words
minus: 'minus'
exponent:
0: ['','','']
3: ['tysiąc','tysiące','tysięcy']
6: ['milion','miliony','milionów']
9: ['miliard','miliardy','miliardów']
12: ['bilion','biliony','bilionów']
15: ['biliard','biliardy','biliardów']
18: ['trylion','tryliony','trylionów']
21: ['tryliard','tryliardy','tryliardów']
24: ['kwadrylion','kwadryliony','kwadrylionów']
27: ['kwadryliard','kwadryliardy','kwadryliardów']
30: ['kwintylion','kwintyliony','kwintylionów']
33: ['kwintyliiard','kwintyliardy','kwintyliardów']
36: ['sekstylion','sekstyliony','sekstylionów']
39: ['sekstyliard','sekstyliardy','sekstyliardów']
42: ['septylion','septyliony','septylionów']
45: ['septyliard','septyliardy','septyliardów']
48: ['oktylion','oktyliony','oktylionów']
51: ['oktyliard','oktyliardy','oktyliardów']
54: ['nonylion','nonyliony','nonylionów']
57: ['nonyliard','nonyliardy','nonyliardów']
60: ['decylion','decyliony','decylionów']
63: ['decyliard','decyliardy','decyliardów']
100: ['centylion','centyliony','centylionów']
103: ['centyliard','centyliardy','centyliardów']
120: ['wicylion','wicylion','wicylion']
123: ['wicyliard','wicyliardy','wicyliardów']
180: ['trycylion','trycylion','trycylion']
183: ['trycyliard','trycyliardy','trycyliardów']
240: ['kwadragilion','kwadragilion','kwadragilion']
243: ['kwadragiliard','kwadragiliardy','kwadragiliardów']
300: ['kwinkwagilion','kwinkwagilion','kwinkwagilion']
303: ['kwinkwagiliard','kwinkwagiliardy','kwinkwagiliardów']
360: ['seskwilion','seskwilion','seskwilion']
363: ['seskwiliard','seskwiliardy','seskwiliardów']
420: ['septagilion','septagilion','septagilion']
423: ['septagiliard','septagiliardy','septagiliardów']
480: ['oktogilion','oktogilion','oktogilion']
483: ['oktogiliard','oktogiliardy','oktogiliardów']
540: ['nonagilion','nonagilion','nonagilion']
543: ['nonagiliard','nonagiliardy','nonagiliardów']
600: ['centylion','centyliony','centylionów']
603: ['centyliard','centyliardy','centyliardów']
6000018: ['milinilitrylion','milinilitryliony','milinilitrylionów']
digits: ['zero', 'jeden', 'dwa', 'trzy', 'cztery',
'pięć', 'sześć', 'siedem', 'osiem', 'dziewięć']
sep: ' '
currencyNames:
'ALL': [['lek','leki','leków'], ['quindarka','quindarki','quindarek']],
'AUD': [['dolar australijski', 'dolary australijskie', 'dolarów australijskich'], ['cent', 'centy', 'centów']],
'BAM': [['<NAME>','<NAME>','<NAME>'], ['fenig','fenigi','fenigów']],
'BGN': [['lew','lewy','lew'], ['stotinka','stotinki','stotinek']],
'BRL': [['real','reale','realów'], ['centavos','centavos','centavos']],
'BYR': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'CAD': [['dolar kanadyjski', 'dolary kanadyjskie', 'dolarów kanadyjskich'], ['cent', 'centy', 'centów']],
'CHF': [['<NAME>','<NAME>i <NAME>ie','franków szwajcarskich'], ['rapp','rappy','rappów']],
'CYP': [['funt cypryjski','funty cypryjskie','funtów cypryjskich'], ['cent', 'centy', 'centów']],
'CZK': [['korona czeska','korony czeskie','koron czeskich'], ['halerz','halerze','halerzy']],
'DKK': [['korona duńska','korony duńskie','koron duńskich'], ['ore','ore','ore']],
'EEK': [['korona estońska','korony estońskie','koron estońskich'], ['senti','senti','senti']],
'EUR': [['euro', 'euro', 'euro'], ['eurocent', 'eurocenty', 'eurocentów']],
'GBP': [['funt szterling','funty szterlingi','funtów szterlingów'], ['pens','pensy','pensów']],
'HKD': [['dolar Hongkongu','dolary Hongkongu','dolarów Hongkongu'], ['cent', 'centy', 'centów']],
'HRK': [['kuna','kuny','kun'], ['lipa','lipy','lip']],
'HUF': [['forint','forinty','forintów'], ['filler','fillery','fillerów']],
'ILS': [['nowy szekel','nowe szekele','nowych szekeli'], ['agora','agory','agorot']],
'ISK': [['korona islandzka','korony islandzkie','koron islandzkich'], ['aurar','aurar','aurar']],
'JPY': [['jen','jeny','jenów'], ['sen','seny','senów']],
'LTL': [['lit','lity','litów'], ['cent', 'centy', 'centów']],
'LVL': [['łat','łaty','łatów'], ['sentim','sentimy','sentimów']],
'MKD': [['denar','denary','denarów'], ['deni','deni','deni']],
'MTL': [['lira maltańska','liry maltańskie','lir maltańskich'], ['centym','centymy','centymów']],
'NOK': [['korona norweska','korony norweskie','koron norweskich'], ['oere','oere','oere']],
'PLN': [['złoty', 'złote', 'złotych'], ['grosz', 'grosze', 'groszy']],
'ROL': [['lej','leje','lei'], ['bani','bani','bani']],
'RUB': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'SEK': [['korona szwedzka','korony szwedzkie','koron szweckich'], ['oere','oere','oere']],
'SIT': [['tolar','tolary','tolarów'], ['stotinia','stotinie','stotini']],
'SKK': [['korona słowacka','korony słowackie','koron słowackich'], ['halerz','halerze','halerzy']],
'TRL': [['lira turecka','liry tureckie','lir tureckich'], ['kurusza','kurysze','kuruszy']],
'UAH': [['hrywna','hrywna','hrywna'], ['cent', 'centy', 'centów']],
'USD': [['dolar','dolary','dolarów'], ['cent', 'centy', 'centów']],
'YUM': [['dinar','dinary','dinarów'], ['para','para','para']],
'ZAR': [['rand','randy','randów'], ['cent', 'centy', 'centów']]
defaultCurrency: 'PLN'
constructor: () ->
toWords: (num, power = 0, powsuffix = '') ->
ret = ''
num = new String(num)
# add a @minus sign
if num.substr(0,1) == '-'
ret = "#{@sep}#{@minus}"
num = num.substr(1)
# // strip excessive zero signs and spaces
num = num.trim()
num = num.replace(/^0+/, '')
if num.length > 3
p = curp = maxp = num.length-1
while p > 0
# check for highest power
if @exponent.hasOwnProperty(p)
# send substr from curp to p
snum = num.substr(maxp - curp, curp - p + 1)
snum = snum.replace(/^0+/, '')
if (snum != '')
cursuffix = @exponent[power][@exponent[power].length-1]
if (powsuffix != '')
cursuffix = cursuffix + @sep + powsuffix
ret = ret + @toWords(snum, p, cursuffix)
curp = p - 1
p--
num = num.substr(maxp - curp, curp - p + 1)
if num == 0
ret
else if num == 0 || num == ''
return "#{@sep}#{@digits[0]}"
h = t = d = 0
# coffescript cant handle switch without breaks
`
switch (num.length) {
case 3:
h = parseInt(num.substr(-3, 1));
case 2:
t = parseInt(num.substr(-2, 1));
case 1:
d = parseInt(num.substr( -1, 1));
break;
case 0:
return;
}`
switch h
when 9
ret = ret + @sep + 'dziewięćset'
when 8
ret = ret + @sep + 'osiemset'
when 7
ret = ret + @sep + 'siedemset'
when 6
ret = ret + @sep + 'sześćset'
when 5
ret = ret + @sep + 'pięćset'
when 4
ret = ret + @sep + 'czterysta'
when 3
ret = ret + @sep + 'trzysta'
when 2
ret = ret + @sep + 'dwieście'
when 1
ret = ret + @sep + 'sto'
switch t
when 9, 8, 7, 6, 5
ret = ret + @sep + @digits[t] + 'dziesiąt'
when 4
ret = ret + @sep + 'czterdzieści'
when 3
ret = ret + @sep + 'trzydzieści'
when 2
ret = ret + @sep + 'dwadzieścia'
when 1
switch d
when 0
ret = ret + @sep + 'dziesięć'
when 1
ret = ret + @sep + 'jedenaście'
when 2, 3, 7, 8
ret = ret + @sep + @digits[d] + 'naście'
when 4
ret = ret + @sep + 'czternaście'
when 5
ret = ret + @sep + 'piętnaście'
when 6
ret = ret + @sep + 'szesnaście'
when 9
ret = ret + @sep + 'dziewiętnaście'
if t != 1 && d > 0
ret = ret + @sep + @digits[d]
if t == 1
d = 0
if ( h + t ) > 0 && d == 1
d = 0
if power > 0
if @exponent.hasOwnProperty(power)
lev = @exponent[power]
if !lev || !Array.isArray lev
return null
switch d
when 1
suf = lev[0]
when 2, 3, 4
suf = lev[1]
when 0, 5, 6, 7, 8, 9
suf = lev[2]
ret = ret + @sep + suf
if powsuffix != ''
ret = ret + @sep + powsuffix
return ret
toCurrencyWords: ($number, $currency = "pln", $convertFraction = true) ->
$number = Math.round($number * 100) / 100
numberParts = new String($number).split(".")
$decimal = numberParts[0] || "0"
$fraction = numberParts[1] || null
@_toCurrencyWords($decimal, $fraction, $currency, $convertFraction)
_toCurrencyWords: ($decimal, $fraction = null, currency = "pln", convertFraction = true) ->
currency = currency.toUpperCase()
if !@currencyNames.hasOwnProperty(currency)
currency = @defaultCurrency
currNames = @currencyNames[currency]
$decimal = "0" if !$decimal
ret = @toWords($decimal)
ret = ret.trim() if ret
lev = @getNumLevel($decimal)
ret = ret + @sep + currNames[0][lev]
if $fraction
if convertFraction
ret = ret + @sep + @toWords($fraction)
else
ret = ret + @sep + $fraction
lev = @getNumLevel($fraction)
ret = ret + @sep + currNames[1][lev]
return ret
getNumLevel: (num) ->
return 0 unless num
if num.length > 3
num = num.substr(-3);
num = parseInt(num)
$h = $t = $d = $lev = 0;
`switch (num.length) {
case 3:
$h = parseInt(num.substr( -3, 1));
case 2:
$t = parseInt(num.substr(-2, 1));
case 1:
$d = parseInt(num.substr(-1, 1));
break;
case 0:
return 0;
}`
if $t == 1
$d = 0;
if (( $h + $t ) > 0 && $d == 1)
$d = 0;
switch $d
when 1
$lev = 0;
when 2,3,4
$lev = 1;
else
$lev = 2;
$lev
| true | class Number2Words
minus: 'minus'
exponent:
0: ['','','']
3: ['tysiąc','tysiące','tysięcy']
6: ['milion','miliony','milionów']
9: ['miliard','miliardy','miliardów']
12: ['bilion','biliony','bilionów']
15: ['biliard','biliardy','biliardów']
18: ['trylion','tryliony','trylionów']
21: ['tryliard','tryliardy','tryliardów']
24: ['kwadrylion','kwadryliony','kwadrylionów']
27: ['kwadryliard','kwadryliardy','kwadryliardów']
30: ['kwintylion','kwintyliony','kwintylionów']
33: ['kwintyliiard','kwintyliardy','kwintyliardów']
36: ['sekstylion','sekstyliony','sekstylionów']
39: ['sekstyliard','sekstyliardy','sekstyliardów']
42: ['septylion','septyliony','septylionów']
45: ['septyliard','septyliardy','septyliardów']
48: ['oktylion','oktyliony','oktylionów']
51: ['oktyliard','oktyliardy','oktyliardów']
54: ['nonylion','nonyliony','nonylionów']
57: ['nonyliard','nonyliardy','nonyliardów']
60: ['decylion','decyliony','decylionów']
63: ['decyliard','decyliardy','decyliardów']
100: ['centylion','centyliony','centylionów']
103: ['centyliard','centyliardy','centyliardów']
120: ['wicylion','wicylion','wicylion']
123: ['wicyliard','wicyliardy','wicyliardów']
180: ['trycylion','trycylion','trycylion']
183: ['trycyliard','trycyliardy','trycyliardów']
240: ['kwadragilion','kwadragilion','kwadragilion']
243: ['kwadragiliard','kwadragiliardy','kwadragiliardów']
300: ['kwinkwagilion','kwinkwagilion','kwinkwagilion']
303: ['kwinkwagiliard','kwinkwagiliardy','kwinkwagiliardów']
360: ['seskwilion','seskwilion','seskwilion']
363: ['seskwiliard','seskwiliardy','seskwiliardów']
420: ['septagilion','septagilion','septagilion']
423: ['septagiliard','septagiliardy','septagiliardów']
480: ['oktogilion','oktogilion','oktogilion']
483: ['oktogiliard','oktogiliardy','oktogiliardów']
540: ['nonagilion','nonagilion','nonagilion']
543: ['nonagiliard','nonagiliardy','nonagiliardów']
600: ['centylion','centyliony','centylionów']
603: ['centyliard','centyliardy','centyliardów']
6000018: ['milinilitrylion','milinilitryliony','milinilitrylionów']
digits: ['zero', 'jeden', 'dwa', 'trzy', 'cztery',
'pięć', 'sześć', 'siedem', 'osiem', 'dziewięć']
sep: ' '
currencyNames:
'ALL': [['lek','leki','leków'], ['quindarka','quindarki','quindarek']],
'AUD': [['dolar australijski', 'dolary australijskie', 'dolarów australijskich'], ['cent', 'centy', 'centów']],
'BAM': [['PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PI'], ['fenig','fenigi','fenigów']],
'BGN': [['lew','lewy','lew'], ['stotinka','stotinki','stotinek']],
'BRL': [['real','reale','realów'], ['centavos','centavos','centavos']],
'BYR': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'CAD': [['dolar kanadyjski', 'dolary kanadyjskie', 'dolarów kanadyjskich'], ['cent', 'centy', 'centów']],
'CHF': [['PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PIi PI:NAME:<NAME>END_PIie','franków szwajcarskich'], ['rapp','rappy','rappów']],
'CYP': [['funt cypryjski','funty cypryjskie','funtów cypryjskich'], ['cent', 'centy', 'centów']],
'CZK': [['korona czeska','korony czeskie','koron czeskich'], ['halerz','halerze','halerzy']],
'DKK': [['korona duńska','korony duńskie','koron duńskich'], ['ore','ore','ore']],
'EEK': [['korona estońska','korony estońskie','koron estońskich'], ['senti','senti','senti']],
'EUR': [['euro', 'euro', 'euro'], ['eurocent', 'eurocenty', 'eurocentów']],
'GBP': [['funt szterling','funty szterlingi','funtów szterlingów'], ['pens','pensy','pensów']],
'HKD': [['dolar Hongkongu','dolary Hongkongu','dolarów Hongkongu'], ['cent', 'centy', 'centów']],
'HRK': [['kuna','kuny','kun'], ['lipa','lipy','lip']],
'HUF': [['forint','forinty','forintów'], ['filler','fillery','fillerów']],
'ILS': [['nowy szekel','nowe szekele','nowych szekeli'], ['agora','agory','agorot']],
'ISK': [['korona islandzka','korony islandzkie','koron islandzkich'], ['aurar','aurar','aurar']],
'JPY': [['jen','jeny','jenów'], ['sen','seny','senów']],
'LTL': [['lit','lity','litów'], ['cent', 'centy', 'centów']],
'LVL': [['łat','łaty','łatów'], ['sentim','sentimy','sentimów']],
'MKD': [['denar','denary','denarów'], ['deni','deni','deni']],
'MTL': [['lira maltańska','liry maltańskie','lir maltańskich'], ['centym','centymy','centymów']],
'NOK': [['korona norweska','korony norweskie','koron norweskich'], ['oere','oere','oere']],
'PLN': [['złoty', 'złote', 'złotych'], ['grosz', 'grosze', 'groszy']],
'ROL': [['lej','leje','lei'], ['bani','bani','bani']],
'RUB': [['rubel','ruble','rubli'], ['kopiejka','kopiejki','kopiejek']],
'SEK': [['korona szwedzka','korony szwedzkie','koron szweckich'], ['oere','oere','oere']],
'SIT': [['tolar','tolary','tolarów'], ['stotinia','stotinie','stotini']],
'SKK': [['korona słowacka','korony słowackie','koron słowackich'], ['halerz','halerze','halerzy']],
'TRL': [['lira turecka','liry tureckie','lir tureckich'], ['kurusza','kurysze','kuruszy']],
'UAH': [['hrywna','hrywna','hrywna'], ['cent', 'centy', 'centów']],
'USD': [['dolar','dolary','dolarów'], ['cent', 'centy', 'centów']],
'YUM': [['dinar','dinary','dinarów'], ['para','para','para']],
'ZAR': [['rand','randy','randów'], ['cent', 'centy', 'centów']]
defaultCurrency: 'PLN'
constructor: () ->
toWords: (num, power = 0, powsuffix = '') ->
ret = ''
num = new String(num)
# add a @minus sign
if num.substr(0,1) == '-'
ret = "#{@sep}#{@minus}"
num = num.substr(1)
# // strip excessive zero signs and spaces
num = num.trim()
num = num.replace(/^0+/, '')
if num.length > 3
p = curp = maxp = num.length-1
while p > 0
# check for highest power
if @exponent.hasOwnProperty(p)
# send substr from curp to p
snum = num.substr(maxp - curp, curp - p + 1)
snum = snum.replace(/^0+/, '')
if (snum != '')
cursuffix = @exponent[power][@exponent[power].length-1]
if (powsuffix != '')
cursuffix = cursuffix + @sep + powsuffix
ret = ret + @toWords(snum, p, cursuffix)
curp = p - 1
p--
num = num.substr(maxp - curp, curp - p + 1)
if num == 0
ret
else if num == 0 || num == ''
return "#{@sep}#{@digits[0]}"
h = t = d = 0
# coffescript cant handle switch without breaks
`
switch (num.length) {
case 3:
h = parseInt(num.substr(-3, 1));
case 2:
t = parseInt(num.substr(-2, 1));
case 1:
d = parseInt(num.substr( -1, 1));
break;
case 0:
return;
}`
switch h
when 9
ret = ret + @sep + 'dziewięćset'
when 8
ret = ret + @sep + 'osiemset'
when 7
ret = ret + @sep + 'siedemset'
when 6
ret = ret + @sep + 'sześćset'
when 5
ret = ret + @sep + 'pięćset'
when 4
ret = ret + @sep + 'czterysta'
when 3
ret = ret + @sep + 'trzysta'
when 2
ret = ret + @sep + 'dwieście'
when 1
ret = ret + @sep + 'sto'
switch t
when 9, 8, 7, 6, 5
ret = ret + @sep + @digits[t] + 'dziesiąt'
when 4
ret = ret + @sep + 'czterdzieści'
when 3
ret = ret + @sep + 'trzydzieści'
when 2
ret = ret + @sep + 'dwadzieścia'
when 1
switch d
when 0
ret = ret + @sep + 'dziesięć'
when 1
ret = ret + @sep + 'jedenaście'
when 2, 3, 7, 8
ret = ret + @sep + @digits[d] + 'naście'
when 4
ret = ret + @sep + 'czternaście'
when 5
ret = ret + @sep + 'piętnaście'
when 6
ret = ret + @sep + 'szesnaście'
when 9
ret = ret + @sep + 'dziewiętnaście'
if t != 1 && d > 0
ret = ret + @sep + @digits[d]
if t == 1
d = 0
if ( h + t ) > 0 && d == 1
d = 0
if power > 0
if @exponent.hasOwnProperty(power)
lev = @exponent[power]
if !lev || !Array.isArray lev
return null
switch d
when 1
suf = lev[0]
when 2, 3, 4
suf = lev[1]
when 0, 5, 6, 7, 8, 9
suf = lev[2]
ret = ret + @sep + suf
if powsuffix != ''
ret = ret + @sep + powsuffix
return ret
toCurrencyWords: ($number, $currency = "pln", $convertFraction = true) ->
$number = Math.round($number * 100) / 100
numberParts = new String($number).split(".")
$decimal = numberParts[0] || "0"
$fraction = numberParts[1] || null
@_toCurrencyWords($decimal, $fraction, $currency, $convertFraction)
_toCurrencyWords: ($decimal, $fraction = null, currency = "pln", convertFraction = true) ->
currency = currency.toUpperCase()
if !@currencyNames.hasOwnProperty(currency)
currency = @defaultCurrency
currNames = @currencyNames[currency]
$decimal = "0" if !$decimal
ret = @toWords($decimal)
ret = ret.trim() if ret
lev = @getNumLevel($decimal)
ret = ret + @sep + currNames[0][lev]
if $fraction
if convertFraction
ret = ret + @sep + @toWords($fraction)
else
ret = ret + @sep + $fraction
lev = @getNumLevel($fraction)
ret = ret + @sep + currNames[1][lev]
return ret
getNumLevel: (num) ->
return 0 unless num
if num.length > 3
num = num.substr(-3);
num = parseInt(num)
$h = $t = $d = $lev = 0;
`switch (num.length) {
case 3:
$h = parseInt(num.substr( -3, 1));
case 2:
$t = parseInt(num.substr(-2, 1));
case 1:
$d = parseInt(num.substr(-1, 1));
break;
case 0:
return 0;
}`
if $t == 1
$d = 0;
if (( $h + $t ) > 0 && $d == 1)
$d = 0;
switch $d
when 1
$lev = 0;
when 2,3,4
$lev = 1;
else
$lev = 2;
$lev
|
[
{
"context": "derationComment {...@props}\n key={\"moderation-#{moderation.id}\"}\n moderation={moder",
"end": 2556,
"score": 0.8476731777191162,
"start": 2543,
"tag": "KEY",
"value": "moderation-#{"
},
{
"context": "{...@props}\n key={\"m... | app/talk/moderations.cjsx | alexbfree/Panoptes-Front-End | 0 | React = require 'react'
talkClient = require 'panoptes-client/lib/talk-client'
auth = require 'panoptes-client/lib/auth'
Paginator = require './lib/paginator'
Loading = require '../components/loading-indicator'
page_size = require('./config').moderationsPageSize
{History} = require 'react-router'
updateQueryParams = require './lib/update-query-params'
ModerationComment = require './moderation/comment'
module.exports = React.createClass
displayName: 'TalkModerations'
mixins: [History]
getInitialState: ->
moderations: []
moderationsMeta: {}
user: null
loading: true
getDefaultProps: ->
foo: 'bar'
location:
query:
page: 1
state: 'opened'
componentDidMount: ->
@props.location.query.state or= 'opened'
@setModerations()
componentWillReceiveProps: (nextProps) ->
@setModerations nextProps.location.query
setModerations: ({page, state} = { }) ->
page or= @props.location.query.page
state or= @props.location.query.state
section = @props.section
@setState loading: true
auth.checkCurrent().then (user) =>
talkClient.type('moderations').get({section, state, page, page_size})
.then (moderations) =>
moderationsMeta = moderations[0]?.getMeta()
@setState {user, moderations, moderationsMeta, loading: false}
.catch (e) =>
@setState {loading: false}
throw new Error(e)
updateModeration: (moderation, action, message) ->
user_id = @state.user.id
moderation.update(actions: [{user_id, action, message}]).save().then =>
@setModerations()
filterByAction: (action) ->
updateQueryParams @history, state: action
nameOf: (action) ->
switch action
when 'closed' then 'deleted'
when 'all' then 'all reports'
else
action
render: ->
return <p>You must be logged in to view this page</p> unless @props.user
state = @props.location.query.state
<div className="talk moderations">
<section>
{['all', 'opened', 'ignored', 'closed'].map (action) =>
<button
key={action}
onClick={=> @filterByAction(action)}
className={if state is action then 'active' else ''}>
{@nameOf(action)}
</button>}
</section>
<div>
{if @state.loading
<Loading />
else if @state.moderations.length > 0
<div>
{for moderation in @state.moderations
<ModerationComment {...@props}
key={"moderation-#{moderation.id}"}
moderation={moderation}
updateModeration={@updateModeration} />}
<Paginator
page={+@state.moderationsMeta.page}
pageCount={+@state.moderationsMeta.page_count} />
</div>
else
<p>There are not currently any {@nameOf(state) unless state is 'all'} moderations.</p>}
</div>
</div>
| 147791 | React = require 'react'
talkClient = require 'panoptes-client/lib/talk-client'
auth = require 'panoptes-client/lib/auth'
Paginator = require './lib/paginator'
Loading = require '../components/loading-indicator'
page_size = require('./config').moderationsPageSize
{History} = require 'react-router'
updateQueryParams = require './lib/update-query-params'
ModerationComment = require './moderation/comment'
module.exports = React.createClass
displayName: 'TalkModerations'
mixins: [History]
getInitialState: ->
moderations: []
moderationsMeta: {}
user: null
loading: true
getDefaultProps: ->
foo: 'bar'
location:
query:
page: 1
state: 'opened'
componentDidMount: ->
@props.location.query.state or= 'opened'
@setModerations()
componentWillReceiveProps: (nextProps) ->
@setModerations nextProps.location.query
setModerations: ({page, state} = { }) ->
page or= @props.location.query.page
state or= @props.location.query.state
section = @props.section
@setState loading: true
auth.checkCurrent().then (user) =>
talkClient.type('moderations').get({section, state, page, page_size})
.then (moderations) =>
moderationsMeta = moderations[0]?.getMeta()
@setState {user, moderations, moderationsMeta, loading: false}
.catch (e) =>
@setState {loading: false}
throw new Error(e)
updateModeration: (moderation, action, message) ->
user_id = @state.user.id
moderation.update(actions: [{user_id, action, message}]).save().then =>
@setModerations()
filterByAction: (action) ->
updateQueryParams @history, state: action
nameOf: (action) ->
switch action
when 'closed' then 'deleted'
when 'all' then 'all reports'
else
action
render: ->
return <p>You must be logged in to view this page</p> unless @props.user
state = @props.location.query.state
<div className="talk moderations">
<section>
{['all', 'opened', 'ignored', 'closed'].map (action) =>
<button
key={action}
onClick={=> @filterByAction(action)}
className={if state is action then 'active' else ''}>
{@nameOf(action)}
</button>}
</section>
<div>
{if @state.loading
<Loading />
else if @state.moderations.length > 0
<div>
{for moderation in @state.moderations
<ModerationComment {...@props}
key={"<KEY>mod<KEY>"}
moderation={moderation}
updateModeration={@updateModeration} />}
<Paginator
page={+@state.moderationsMeta.page}
pageCount={+@state.moderationsMeta.page_count} />
</div>
else
<p>There are not currently any {@nameOf(state) unless state is 'all'} moderations.</p>}
</div>
</div>
| true | React = require 'react'
talkClient = require 'panoptes-client/lib/talk-client'
auth = require 'panoptes-client/lib/auth'
Paginator = require './lib/paginator'
Loading = require '../components/loading-indicator'
page_size = require('./config').moderationsPageSize
{History} = require 'react-router'
updateQueryParams = require './lib/update-query-params'
ModerationComment = require './moderation/comment'
module.exports = React.createClass
displayName: 'TalkModerations'
mixins: [History]
getInitialState: ->
moderations: []
moderationsMeta: {}
user: null
loading: true
getDefaultProps: ->
foo: 'bar'
location:
query:
page: 1
state: 'opened'
componentDidMount: ->
@props.location.query.state or= 'opened'
@setModerations()
componentWillReceiveProps: (nextProps) ->
@setModerations nextProps.location.query
setModerations: ({page, state} = { }) ->
page or= @props.location.query.page
state or= @props.location.query.state
section = @props.section
@setState loading: true
auth.checkCurrent().then (user) =>
talkClient.type('moderations').get({section, state, page, page_size})
.then (moderations) =>
moderationsMeta = moderations[0]?.getMeta()
@setState {user, moderations, moderationsMeta, loading: false}
.catch (e) =>
@setState {loading: false}
throw new Error(e)
updateModeration: (moderation, action, message) ->
user_id = @state.user.id
moderation.update(actions: [{user_id, action, message}]).save().then =>
@setModerations()
filterByAction: (action) ->
updateQueryParams @history, state: action
nameOf: (action) ->
switch action
when 'closed' then 'deleted'
when 'all' then 'all reports'
else
action
render: ->
return <p>You must be logged in to view this page</p> unless @props.user
state = @props.location.query.state
<div className="talk moderations">
<section>
{['all', 'opened', 'ignored', 'closed'].map (action) =>
<button
key={action}
onClick={=> @filterByAction(action)}
className={if state is action then 'active' else ''}>
{@nameOf(action)}
</button>}
</section>
<div>
{if @state.loading
<Loading />
else if @state.moderations.length > 0
<div>
{for moderation in @state.moderations
<ModerationComment {...@props}
key={"PI:KEY:<KEY>END_PImodPI:KEY:<KEY>END_PI"}
moderation={moderation}
updateModeration={@updateModeration} />}
<Paginator
page={+@state.moderationsMeta.page}
pageCount={+@state.moderationsMeta.page_count} />
</div>
else
<p>There are not currently any {@nameOf(state) unless state is 'all'} moderations.</p>}
</div>
</div>
|
[
{
"context": "yes\n sendVerificationEmail: no\n confirmPassword: yes\n negativeValidation: yes\n positiveValidation: y",
"end": 163,
"score": 0.929425060749054,
"start": 160,
"tag": "PASSWORD",
"value": "yes"
}
] | both/router/config.coffee | bkuri/scatpod | 0 | AccountsTemplates.configure
showForgotPasswordLink: yes
overrideLoginErrors: yes
enablePasswordChange: yes
sendVerificationEmail: no
confirmPassword: yes
negativeValidation: yes
positiveValidation: yes
negativeFeedback: no
positiveFeedback: no
AccountsTemplates.configureRoute 'changePwd'
AccountsTemplates.configureRoute 'enrollAccount'
AccountsTemplates.configureRoute 'forgotPwd'
AccountsTemplates.configureRoute 'resetPwd'
AccountsTemplates.configureRoute 'signIn'
AccountsTemplates.configureRoute 'signUp'
AccountsTemplates.configureRoute 'verifyEmail'
Router.configure
layoutTemplate: 'layout'
loadingTemplate: 'loading'
notFoundTemplate: 'not_found'
Router.plugin 'ensureSignedIn', only: ['search', 'settings']
T9n.setLanguage 'en'
| 38284 | AccountsTemplates.configure
showForgotPasswordLink: yes
overrideLoginErrors: yes
enablePasswordChange: yes
sendVerificationEmail: no
confirmPassword: <PASSWORD>
negativeValidation: yes
positiveValidation: yes
negativeFeedback: no
positiveFeedback: no
AccountsTemplates.configureRoute 'changePwd'
AccountsTemplates.configureRoute 'enrollAccount'
AccountsTemplates.configureRoute 'forgotPwd'
AccountsTemplates.configureRoute 'resetPwd'
AccountsTemplates.configureRoute 'signIn'
AccountsTemplates.configureRoute 'signUp'
AccountsTemplates.configureRoute 'verifyEmail'
Router.configure
layoutTemplate: 'layout'
loadingTemplate: 'loading'
notFoundTemplate: 'not_found'
Router.plugin 'ensureSignedIn', only: ['search', 'settings']
T9n.setLanguage 'en'
| true | AccountsTemplates.configure
showForgotPasswordLink: yes
overrideLoginErrors: yes
enablePasswordChange: yes
sendVerificationEmail: no
confirmPassword: PI:PASSWORD:<PASSWORD>END_PI
negativeValidation: yes
positiveValidation: yes
negativeFeedback: no
positiveFeedback: no
AccountsTemplates.configureRoute 'changePwd'
AccountsTemplates.configureRoute 'enrollAccount'
AccountsTemplates.configureRoute 'forgotPwd'
AccountsTemplates.configureRoute 'resetPwd'
AccountsTemplates.configureRoute 'signIn'
AccountsTemplates.configureRoute 'signUp'
AccountsTemplates.configureRoute 'verifyEmail'
Router.configure
layoutTemplate: 'layout'
loadingTemplate: 'loading'
notFoundTemplate: 'not_found'
Router.plugin 'ensureSignedIn', only: ['search', 'settings']
T9n.setLanguage 'en'
|
[
{
"context": "= parent || false\n @meta = ''\n @name = 'New Factor'\n @pairwise = [[]]\n @pairwise[0][0] = 1",
"end": 181,
"score": 0.9853496551513672,
"start": 171,
"tag": "NAME",
"value": "New Factor"
},
{
"context": "getTree()) for child in @childs\n \n ... | app/scripts/models/factor.model.coffee | sposmen/Analytic-Hierarchy-Process | 0 | angular.module('app.models', [])
.factory 'Factor', () ->
class Factor
constructor: (parent)->
@parent = parent || false
@meta = ''
@name = 'New Factor'
@pairwise = [[]]
@pairwise[0][0] = 1
@pairwisefractions = [[]]
@columnSum = []
@columnSumUnit = []
@rowSum = []
@pair_wise_options = [[[]]]
@pair_wise_options[0][0][0] = 1
@pair_wise_options_fractions = [[[]]]
@columnSumOptions = [[]]
@columnSumUnitOptions = [[]]
@rowSumOptions = [[]]
@optionsScore = []
@childs = []
getTree:->
childs = []
childs.push(child.getTree()) for child in @childs
name: @name
meta: @meta
pairwise:@pairwise
pair_wise_options:@pair_wise_options
childs:childs
setTree:(data)->
@name = data.name
@meta= data.meta
@pairwise= data.pairwise
@pairwisefractions= angular.copy data.pairwise
@pair_wise_options=data.pair_wise_options
@pair_wise_options_fractions = angular.copy data.pair_wise_options
for child in data.childs
i = @addChild() - 1
@childs[i].setTree(child)
getChilds:->
@childs
getChild:(index)->
@childs[index] || false
addChild:()->
@childs.push(new Factor(@))
removeChild:(index)->
@pairwise.splice(index, 1)
@pairwisefractions.splice(index, 1)
@pair_wise_options.splice(index, 1)
@pair_wise_options_fractions.splice(index, 1)
@childs.splice(index, 1)
hasChilds:->
!!@childs.length
hasParent:->
!!@parent
getOptionsScore:(index)->
result = 0
notChilded = true
if @hasChilds()
for child in @childs
if child.hasChilds()
notChilded = false
result += child.getOptionsScore(index)
if notChilded
result = if @optionsScore[index] != undefined and not isNaN(@optionsScore[index].score) then @optionsScore[index].score else 0
if !!@parent and
@parent.rowSum[@parent.childs.indexOf(@)] != undefined and
not isNaN(@parent.rowSum[@parent.childs.indexOf(@)]) and
@parent.rowSum[@parent.childs.indexOf(@)] != 0
result = result*@parent.rowSum[@parent.childs.indexOf(@)]
result
| 221199 | angular.module('app.models', [])
.factory 'Factor', () ->
class Factor
constructor: (parent)->
@parent = parent || false
@meta = ''
@name = '<NAME>'
@pairwise = [[]]
@pairwise[0][0] = 1
@pairwisefractions = [[]]
@columnSum = []
@columnSumUnit = []
@rowSum = []
@pair_wise_options = [[[]]]
@pair_wise_options[0][0][0] = 1
@pair_wise_options_fractions = [[[]]]
@columnSumOptions = [[]]
@columnSumUnitOptions = [[]]
@rowSumOptions = [[]]
@optionsScore = []
@childs = []
getTree:->
childs = []
childs.push(child.getTree()) for child in @childs
name: @name
meta: @meta
pairwise:@pairwise
pair_wise_options:@pair_wise_options
childs:childs
setTree:(data)->
@name = data.name
@meta= data.meta
@pairwise= data.pairwise
@pairwisefractions= angular.copy data.pairwise
@pair_wise_options=data.pair_wise_options
@pair_wise_options_fractions = angular.copy data.pair_wise_options
for child in data.childs
i = @addChild() - 1
@childs[i].setTree(child)
getChilds:->
@childs
getChild:(index)->
@childs[index] || false
addChild:()->
@childs.push(new Factor(@))
removeChild:(index)->
@pairwise.splice(index, 1)
@pairwisefractions.splice(index, 1)
@pair_wise_options.splice(index, 1)
@pair_wise_options_fractions.splice(index, 1)
@childs.splice(index, 1)
hasChilds:->
!!@childs.length
hasParent:->
!!@parent
getOptionsScore:(index)->
result = 0
notChilded = true
if @hasChilds()
for child in @childs
if child.hasChilds()
notChilded = false
result += child.getOptionsScore(index)
if notChilded
result = if @optionsScore[index] != undefined and not isNaN(@optionsScore[index].score) then @optionsScore[index].score else 0
if !!@parent and
@parent.rowSum[@parent.childs.indexOf(@)] != undefined and
not isNaN(@parent.rowSum[@parent.childs.indexOf(@)]) and
@parent.rowSum[@parent.childs.indexOf(@)] != 0
result = result*@parent.rowSum[@parent.childs.indexOf(@)]
result
| true | angular.module('app.models', [])
.factory 'Factor', () ->
class Factor
constructor: (parent)->
@parent = parent || false
@meta = ''
@name = 'PI:NAME:<NAME>END_PI'
@pairwise = [[]]
@pairwise[0][0] = 1
@pairwisefractions = [[]]
@columnSum = []
@columnSumUnit = []
@rowSum = []
@pair_wise_options = [[[]]]
@pair_wise_options[0][0][0] = 1
@pair_wise_options_fractions = [[[]]]
@columnSumOptions = [[]]
@columnSumUnitOptions = [[]]
@rowSumOptions = [[]]
@optionsScore = []
@childs = []
getTree:->
childs = []
childs.push(child.getTree()) for child in @childs
name: @name
meta: @meta
pairwise:@pairwise
pair_wise_options:@pair_wise_options
childs:childs
setTree:(data)->
@name = data.name
@meta= data.meta
@pairwise= data.pairwise
@pairwisefractions= angular.copy data.pairwise
@pair_wise_options=data.pair_wise_options
@pair_wise_options_fractions = angular.copy data.pair_wise_options
for child in data.childs
i = @addChild() - 1
@childs[i].setTree(child)
getChilds:->
@childs
getChild:(index)->
@childs[index] || false
addChild:()->
@childs.push(new Factor(@))
removeChild:(index)->
@pairwise.splice(index, 1)
@pairwisefractions.splice(index, 1)
@pair_wise_options.splice(index, 1)
@pair_wise_options_fractions.splice(index, 1)
@childs.splice(index, 1)
hasChilds:->
!!@childs.length
hasParent:->
!!@parent
getOptionsScore:(index)->
result = 0
notChilded = true
if @hasChilds()
for child in @childs
if child.hasChilds()
notChilded = false
result += child.getOptionsScore(index)
if notChilded
result = if @optionsScore[index] != undefined and not isNaN(@optionsScore[index].score) then @optionsScore[index].score else 0
if !!@parent and
@parent.rowSum[@parent.childs.indexOf(@)] != undefined and
not isNaN(@parent.rowSum[@parent.childs.indexOf(@)]) and
@parent.rowSum[@parent.childs.indexOf(@)] != 0
result = result*@parent.rowSum[@parent.childs.indexOf(@)]
result
|
[
{
"context": " @reconnect('Reloading flow list')\n if (@myId(message.user) && message.event in ['backend.user.",
"end": 4368,
"score": 0.9860572814941406,
"start": 4362,
"tag": "USERNAME",
"value": "(@myId"
},
{
"context": ".debug 'Received message', message\n\n auth... | src/flowdock.coffee | 65Mustang289/hubot-flowdock | 53 | flowdock = require 'flowdock'
try
{Adapter,TextMessage} = require 'hubot'
catch
prequire = require 'parent-require'
{Adapter, TextMessage} = prequire 'hubot'
class Flowdock extends Adapter
constructor: ->
super
@ignores = []
# Make sure hubot does not see commands posted using only a flow token (eg. no authenticated user)
if process.env.HUBOT_FLOWDOCK_ALLOW_ANONYMOUS_COMMANDS != '1'
@ignores.push('0')
# Make it possible to ignore users
if process.env.HUBOT_FLOWDOCK_IGNORED_USERS?
@ignores.push(id) for id in process.env.HUBOT_FLOWDOCK_IGNORED_USERS.split(',')
@robot.logger.info "Ignoring all messages from user ids #{@ignores.join(', ')}" if @ignores.length > 0
send: (envelope, strings...) ->
return if strings.length == 0
self = @
str = strings.shift()
if str.length > 8096
str = "** End of Message Truncated **\n" + str
str = str[0...8096]
metadata = envelope.metadata || envelope.message?.metadata || {}
flow = metadata.room || envelope.room
thread_id = metadata.thread_id
message_id = metadata.message_id
user = envelope.user
forceNewMessage = envelope.newMessage == true
sendRest = ->
self.send(envelope, strings...)
if user?
if flow?
if thread_id and not forceNewMessage
# respond to a thread
@bot.threadMessage flow, thread_id, str, [], sendRest
else if message_id and not forceNewMessage
# respond via comment if we have a parent message
@bot.comment flow, message_id, str, [], sendRest
else
@bot.message flow, str, [], sendRest
else if user.id
# If replying as private message, strip the preceding user tag
str = str.replace(new RegExp("^@#{user.name}: ", "i"), '')
@bot.privateMessage user.id, str, [], sendRest
else if flow
# support wider range of flow identifiers than just id for robot.messageRoom
flow = @findFlow(flow)
@bot.message flow, str, [], sendRest
reply: (envelope, strings...) ->
user = @userFromParams(envelope)
@send envelope, strings.map((str) -> "@#{user.name}: #{str}")...
userFromParams: (params) ->
# hubot < 2.4.2: params = user
# hubot >= 2.4.2: params = {user: user, ...}
if params.user then params.user else params
findFlow: (identifier) ->
return flow.id for flow in @flows when identifier == flow.id
return flow.id for flow in @flows when identifier == @flowPath(flow)
return flow.id for flow in @flows when identifier.toLowerCase() == flow.name.toLowerCase()
identifier
flowPath: (flow) ->
flow.organization.parameterized_name + '/' + flow.parameterized_name
flowFromParams: (params) ->
return flow for flow in @flows when params.room == flow.id
joinedFlows: ->
@flows.filter (f) -> f.joined && f.open
userFromId: (id, data) ->
# hubot < 2.5.0: @userForId
# hubot >=2.5.0: @robot.brain.userForId
@robot.brain?.userForId?(id, data) || @userForId(id, data)
changeUserNick: (id, newNick) ->
if id of @robot.brain.data.users
@robot.brain.data.users[id].name = newNick
needsReconnect: (message) ->
(@myId(message.content) && message.event == 'backend.user.block') ||
(@myId(message.user) && message.event in ['backend.user.join', 'flow-add', 'flow-remove'])
myId: (id) ->
String(id) == String(@bot.userId)
reconnect: (reason) ->
@robot.logger.info("Reconnecting: #{reason}")
@stream.end()
@stream.removeAllListeners()
@fetchFlowsAndConnect()
connect: ->
ids = (flow.id for flow in @joinedFlows())
@robot.logger.info('Flowdock: connecting')
@stream = @bot.stream(ids, active: 'idle', user: 1)
@stream.on 'connected', =>
@robot.logger.info('Flowdock: connected and streaming')
@robot.logger.info('Flowdock: listening to flows:', (flow.name for flow in @joinedFlows()).join(', '))
@stream.on 'clientError', (error) => @robot.logger.error('Flowdock: client error:', error)
@stream.on 'disconnected', => @robot.logger.info('Flowdock: disconnected')
@stream.on 'reconnecting', => @robot.logger.info('Flowdock: reconnecting')
@stream.on 'message', (message) =>
return if !message.content? || !message.event?
if @needsReconnect(message)
@reconnect('Reloading flow list')
if (@myId(message.user) && message.event in ['backend.user.join', 'flow-add'])
@robot.emit "flow-add", { id: message.content.id, name: message.content.name }
if message.event == 'user-edit' || message.event == 'backend.user.join'
@changeUserNick(message.content.user.id, message.content.user.nick)
return unless message.event in ['message', 'comment']
return if !message.id?
return if @myId(message.user)
return if String(message.user) in @ignores
@robot.logger.debug 'Received message', message
author = @userFromId(message.user)
thread_id = message.thread_id
messageId = if thread_id?
undefined
else if message.event == 'message'
message.id
else
# For comments the parent message id is embedded in an 'influx' tag
if message.tags
influxTag = do ->
for tag in message.tags
return tag if /^influx:/.test tag
(influxTag.split ':', 2)[1] if influxTag
msg = if message.event == 'comment' then message.content.text else message.content
# Reformat leading @mention name to be like "name: message" which is
# what hubot expects. Add bot name with private messages if not already given.
botPrefix = "#{@robot.name}: "
regex = new RegExp("^@#{@bot.userName}(,|\\b)", "i")
hubotMsg = msg.replace(regex, botPrefix)
if !message.flow && !hubotMsg.match(new RegExp("^#{@robot.name}", "i"))
hubotMsg = botPrefix + hubotMsg
author.room = message.flow # Many scripts expect author.room to be available
author.flow = message.flow # For backward compatibility
metadata =
room: message.flow
metadata['thread_id'] = thread_id if thread_id?
metadata['message_id'] = messageId if messageId?
messageObj = new TextMessage(author, hubotMsg, message.id, metadata)
# Support metadata even if hubot does not currently do that
messageObj.metadata = metadata if !messageObj.metadata?
@receive messageObj
run: ->
@apiToken = process.env.HUBOT_FLOWDOCK_API_TOKEN
@loginEmail = process.env.HUBOT_FLOWDOCK_LOGIN_EMAIL
@loginPassword = process.env.HUBOT_FLOWDOCK_LOGIN_PASSWORD
if @apiToken?
@bot = new flowdock.Session(@apiToken)
else if @loginEmail? && @loginPassword?
@bot = new flowdock.Session(@loginEmail, @loginPassword)
else
throw new Error("ERROR: No credentials given: Supply either environment variable HUBOT_FLOWDOCK_API_TOKEN or both HUBOT_FLOWDOCK_LOGIN_EMAIL and HUBOT_FLOWDOCK_LOGIN_PASSWORD")
@bot.on "error", (e) =>
@robot.logger.error("Unexpected error in Flowdock client: #{e}")
@emit e
@fetchFlowsAndConnect()
@emit 'connected'
fetchFlowsAndConnect: ->
@bot.flows (err, flows, res) =>
return if err?
@bot.userId = res.headers['flowdock-user']
@flows = flows
@robot.logger.info("Found #{@flows.length} flows, and I have joined #{@joinedFlows().length} of them.")
for flow in flows
for user in flow.users
if user.in_flow
data =
id: user.id
name: user.nick
savedUser = @userFromId user.id, data
if savedUser.name != data.name
@changeUserNick(savedUser.id, data.name)
if String(user.id) == String(@bot.userId)
@bot.userName = user.nick
@robot.logger.info("Connecting to Flowdock as user #{@bot.userName} (id #{@bot.userId}).")
if @flows.length == 0 || !@flows.some((flow) -> flow.open)
@robot.logger.warning(
"Your bot is not part of any flows and probably won't do much. " +
"Join some flows manually or add the bot to some flows and reconnect.")
if @bot.userName? && @robot.name.toLowerCase() != @bot.userName.toLowerCase()
@robot.logger.warning(
"You have configured this bot to use the wrong name (#{@robot.name}). Flowdock API says " +
"my name is #{@bot.userName}. You will run into problems if you don't fix this!")
@connect()
exports.use = (robot) ->
new Flowdock robot
| 108228 | flowdock = require 'flowdock'
try
{Adapter,TextMessage} = require 'hubot'
catch
prequire = require 'parent-require'
{Adapter, TextMessage} = prequire 'hubot'
class Flowdock extends Adapter
constructor: ->
super
@ignores = []
# Make sure hubot does not see commands posted using only a flow token (eg. no authenticated user)
if process.env.HUBOT_FLOWDOCK_ALLOW_ANONYMOUS_COMMANDS != '1'
@ignores.push('0')
# Make it possible to ignore users
if process.env.HUBOT_FLOWDOCK_IGNORED_USERS?
@ignores.push(id) for id in process.env.HUBOT_FLOWDOCK_IGNORED_USERS.split(',')
@robot.logger.info "Ignoring all messages from user ids #{@ignores.join(', ')}" if @ignores.length > 0
send: (envelope, strings...) ->
return if strings.length == 0
self = @
str = strings.shift()
if str.length > 8096
str = "** End of Message Truncated **\n" + str
str = str[0...8096]
metadata = envelope.metadata || envelope.message?.metadata || {}
flow = metadata.room || envelope.room
thread_id = metadata.thread_id
message_id = metadata.message_id
user = envelope.user
forceNewMessage = envelope.newMessage == true
sendRest = ->
self.send(envelope, strings...)
if user?
if flow?
if thread_id and not forceNewMessage
# respond to a thread
@bot.threadMessage flow, thread_id, str, [], sendRest
else if message_id and not forceNewMessage
# respond via comment if we have a parent message
@bot.comment flow, message_id, str, [], sendRest
else
@bot.message flow, str, [], sendRest
else if user.id
# If replying as private message, strip the preceding user tag
str = str.replace(new RegExp("^@#{user.name}: ", "i"), '')
@bot.privateMessage user.id, str, [], sendRest
else if flow
# support wider range of flow identifiers than just id for robot.messageRoom
flow = @findFlow(flow)
@bot.message flow, str, [], sendRest
reply: (envelope, strings...) ->
user = @userFromParams(envelope)
@send envelope, strings.map((str) -> "@#{user.name}: #{str}")...
userFromParams: (params) ->
# hubot < 2.4.2: params = user
# hubot >= 2.4.2: params = {user: user, ...}
if params.user then params.user else params
findFlow: (identifier) ->
return flow.id for flow in @flows when identifier == flow.id
return flow.id for flow in @flows when identifier == @flowPath(flow)
return flow.id for flow in @flows when identifier.toLowerCase() == flow.name.toLowerCase()
identifier
flowPath: (flow) ->
flow.organization.parameterized_name + '/' + flow.parameterized_name
flowFromParams: (params) ->
return flow for flow in @flows when params.room == flow.id
joinedFlows: ->
@flows.filter (f) -> f.joined && f.open
userFromId: (id, data) ->
# hubot < 2.5.0: @userForId
# hubot >=2.5.0: @robot.brain.userForId
@robot.brain?.userForId?(id, data) || @userForId(id, data)
changeUserNick: (id, newNick) ->
if id of @robot.brain.data.users
@robot.brain.data.users[id].name = newNick
needsReconnect: (message) ->
(@myId(message.content) && message.event == 'backend.user.block') ||
(@myId(message.user) && message.event in ['backend.user.join', 'flow-add', 'flow-remove'])
myId: (id) ->
String(id) == String(@bot.userId)
reconnect: (reason) ->
@robot.logger.info("Reconnecting: #{reason}")
@stream.end()
@stream.removeAllListeners()
@fetchFlowsAndConnect()
connect: ->
ids = (flow.id for flow in @joinedFlows())
@robot.logger.info('Flowdock: connecting')
@stream = @bot.stream(ids, active: 'idle', user: 1)
@stream.on 'connected', =>
@robot.logger.info('Flowdock: connected and streaming')
@robot.logger.info('Flowdock: listening to flows:', (flow.name for flow in @joinedFlows()).join(', '))
@stream.on 'clientError', (error) => @robot.logger.error('Flowdock: client error:', error)
@stream.on 'disconnected', => @robot.logger.info('Flowdock: disconnected')
@stream.on 'reconnecting', => @robot.logger.info('Flowdock: reconnecting')
@stream.on 'message', (message) =>
return if !message.content? || !message.event?
if @needsReconnect(message)
@reconnect('Reloading flow list')
if (@myId(message.user) && message.event in ['backend.user.join', 'flow-add'])
@robot.emit "flow-add", { id: message.content.id, name: message.content.name }
if message.event == 'user-edit' || message.event == 'backend.user.join'
@changeUserNick(message.content.user.id, message.content.user.nick)
return unless message.event in ['message', 'comment']
return if !message.id?
return if @myId(message.user)
return if String(message.user) in @ignores
@robot.logger.debug 'Received message', message
author = @userFromId(message.user)
thread_id = message.thread_id
messageId = if thread_id?
undefined
else if message.event == 'message'
message.id
else
# For comments the parent message id is embedded in an 'influx' tag
if message.tags
influxTag = do ->
for tag in message.tags
return tag if /^influx:/.test tag
(influxTag.split ':', 2)[1] if influxTag
msg = if message.event == 'comment' then message.content.text else message.content
# Reformat leading @mention name to be like "name: message" which is
# what hubot expects. Add bot name with private messages if not already given.
botPrefix = "#{@robot.name}: "
regex = new RegExp("^@#{@bot.userName}(,|\\b)", "i")
hubotMsg = msg.replace(regex, botPrefix)
if !message.flow && !hubotMsg.match(new RegExp("^#{@robot.name}", "i"))
hubotMsg = botPrefix + hubotMsg
author.room = message.flow # Many scripts expect author.room to be available
author.flow = message.flow # For backward compatibility
metadata =
room: message.flow
metadata['thread_id'] = thread_id if thread_id?
metadata['message_id'] = messageId if messageId?
messageObj = new TextMessage(author, hubotMsg, message.id, metadata)
# Support metadata even if hubot does not currently do that
messageObj.metadata = metadata if !messageObj.metadata?
@receive messageObj
run: ->
@apiToken = process.env.HUBOT_FLOWDOCK_API_TOKEN
@loginEmail = process.env.HUBOT_FLOWDOCK_LOGIN_EMAIL
@loginPassword = <PASSWORD>.<PASSWORD>.<PASSWORD>
if @apiToken?
@bot = new flowdock.Session(@apiToken)
else if @loginEmail? && @loginPassword?
@bot = new flowdock.Session(@loginEmail, @loginPassword)
else
throw new Error("ERROR: No credentials given: Supply either environment variable HUBOT_FLOWDOCK_API_TOKEN or both HUBOT_FLOWDOCK_LOGIN_EMAIL and HUBOT_FLOWDOCK_LOGIN_PASSWORD")
@bot.on "error", (e) =>
@robot.logger.error("Unexpected error in Flowdock client: #{e}")
@emit e
@fetchFlowsAndConnect()
@emit 'connected'
fetchFlowsAndConnect: ->
@bot.flows (err, flows, res) =>
return if err?
@bot.userId = res.headers['flowdock-user']
@flows = flows
@robot.logger.info("Found #{@flows.length} flows, and I have joined #{@joinedFlows().length} of them.")
for flow in flows
for user in flow.users
if user.in_flow
data =
id: user.id
name: user.nick
savedUser = @userFromId user.id, data
if savedUser.name != data.name
@changeUserNick(savedUser.id, data.name)
if String(user.id) == String(@bot.userId)
@bot.userName = user.nick
@robot.logger.info("Connecting to Flowdock as user #{@bot.userName} (id #{@bot.userId}).")
if @flows.length == 0 || !@flows.some((flow) -> flow.open)
@robot.logger.warning(
"Your bot is not part of any flows and probably won't do much. " +
"Join some flows manually or add the bot to some flows and reconnect.")
if @bot.userName? && @robot.name.toLowerCase() != @bot.userName.toLowerCase()
@robot.logger.warning(
"You have configured this bot to use the wrong name (#{@robot.name}). Flowdock API says " +
"my name is #{@bot.userName}. You will run into problems if you don't fix this!")
@connect()
exports.use = (robot) ->
new Flowdock robot
| true | flowdock = require 'flowdock'
try
{Adapter,TextMessage} = require 'hubot'
catch
prequire = require 'parent-require'
{Adapter, TextMessage} = prequire 'hubot'
class Flowdock extends Adapter
constructor: ->
super
@ignores = []
# Make sure hubot does not see commands posted using only a flow token (eg. no authenticated user)
if process.env.HUBOT_FLOWDOCK_ALLOW_ANONYMOUS_COMMANDS != '1'
@ignores.push('0')
# Make it possible to ignore users
if process.env.HUBOT_FLOWDOCK_IGNORED_USERS?
@ignores.push(id) for id in process.env.HUBOT_FLOWDOCK_IGNORED_USERS.split(',')
@robot.logger.info "Ignoring all messages from user ids #{@ignores.join(', ')}" if @ignores.length > 0
send: (envelope, strings...) ->
return if strings.length == 0
self = @
str = strings.shift()
if str.length > 8096
str = "** End of Message Truncated **\n" + str
str = str[0...8096]
metadata = envelope.metadata || envelope.message?.metadata || {}
flow = metadata.room || envelope.room
thread_id = metadata.thread_id
message_id = metadata.message_id
user = envelope.user
forceNewMessage = envelope.newMessage == true
sendRest = ->
self.send(envelope, strings...)
if user?
if flow?
if thread_id and not forceNewMessage
# respond to a thread
@bot.threadMessage flow, thread_id, str, [], sendRest
else if message_id and not forceNewMessage
# respond via comment if we have a parent message
@bot.comment flow, message_id, str, [], sendRest
else
@bot.message flow, str, [], sendRest
else if user.id
# If replying as private message, strip the preceding user tag
str = str.replace(new RegExp("^@#{user.name}: ", "i"), '')
@bot.privateMessage user.id, str, [], sendRest
else if flow
# support wider range of flow identifiers than just id for robot.messageRoom
flow = @findFlow(flow)
@bot.message flow, str, [], sendRest
reply: (envelope, strings...) ->
user = @userFromParams(envelope)
@send envelope, strings.map((str) -> "@#{user.name}: #{str}")...
userFromParams: (params) ->
# hubot < 2.4.2: params = user
# hubot >= 2.4.2: params = {user: user, ...}
if params.user then params.user else params
findFlow: (identifier) ->
return flow.id for flow in @flows when identifier == flow.id
return flow.id for flow in @flows when identifier == @flowPath(flow)
return flow.id for flow in @flows when identifier.toLowerCase() == flow.name.toLowerCase()
identifier
flowPath: (flow) ->
flow.organization.parameterized_name + '/' + flow.parameterized_name
flowFromParams: (params) ->
return flow for flow in @flows when params.room == flow.id
joinedFlows: ->
@flows.filter (f) -> f.joined && f.open
userFromId: (id, data) ->
# hubot < 2.5.0: @userForId
# hubot >=2.5.0: @robot.brain.userForId
@robot.brain?.userForId?(id, data) || @userForId(id, data)
changeUserNick: (id, newNick) ->
if id of @robot.brain.data.users
@robot.brain.data.users[id].name = newNick
needsReconnect: (message) ->
(@myId(message.content) && message.event == 'backend.user.block') ||
(@myId(message.user) && message.event in ['backend.user.join', 'flow-add', 'flow-remove'])
myId: (id) ->
String(id) == String(@bot.userId)
reconnect: (reason) ->
@robot.logger.info("Reconnecting: #{reason}")
@stream.end()
@stream.removeAllListeners()
@fetchFlowsAndConnect()
connect: ->
ids = (flow.id for flow in @joinedFlows())
@robot.logger.info('Flowdock: connecting')
@stream = @bot.stream(ids, active: 'idle', user: 1)
@stream.on 'connected', =>
@robot.logger.info('Flowdock: connected and streaming')
@robot.logger.info('Flowdock: listening to flows:', (flow.name for flow in @joinedFlows()).join(', '))
@stream.on 'clientError', (error) => @robot.logger.error('Flowdock: client error:', error)
@stream.on 'disconnected', => @robot.logger.info('Flowdock: disconnected')
@stream.on 'reconnecting', => @robot.logger.info('Flowdock: reconnecting')
@stream.on 'message', (message) =>
return if !message.content? || !message.event?
if @needsReconnect(message)
@reconnect('Reloading flow list')
if (@myId(message.user) && message.event in ['backend.user.join', 'flow-add'])
@robot.emit "flow-add", { id: message.content.id, name: message.content.name }
if message.event == 'user-edit' || message.event == 'backend.user.join'
@changeUserNick(message.content.user.id, message.content.user.nick)
return unless message.event in ['message', 'comment']
return if !message.id?
return if @myId(message.user)
return if String(message.user) in @ignores
@robot.logger.debug 'Received message', message
author = @userFromId(message.user)
thread_id = message.thread_id
messageId = if thread_id?
undefined
else if message.event == 'message'
message.id
else
# For comments the parent message id is embedded in an 'influx' tag
if message.tags
influxTag = do ->
for tag in message.tags
return tag if /^influx:/.test tag
(influxTag.split ':', 2)[1] if influxTag
msg = if message.event == 'comment' then message.content.text else message.content
# Reformat leading @mention name to be like "name: message" which is
# what hubot expects. Add bot name with private messages if not already given.
botPrefix = "#{@robot.name}: "
regex = new RegExp("^@#{@bot.userName}(,|\\b)", "i")
hubotMsg = msg.replace(regex, botPrefix)
if !message.flow && !hubotMsg.match(new RegExp("^#{@robot.name}", "i"))
hubotMsg = botPrefix + hubotMsg
author.room = message.flow # Many scripts expect author.room to be available
author.flow = message.flow # For backward compatibility
metadata =
room: message.flow
metadata['thread_id'] = thread_id if thread_id?
metadata['message_id'] = messageId if messageId?
messageObj = new TextMessage(author, hubotMsg, message.id, metadata)
# Support metadata even if hubot does not currently do that
messageObj.metadata = metadata if !messageObj.metadata?
@receive messageObj
run: ->
@apiToken = process.env.HUBOT_FLOWDOCK_API_TOKEN
@loginEmail = process.env.HUBOT_FLOWDOCK_LOGIN_EMAIL
@loginPassword = PI:PASSWORD:<PASSWORD>END_PI.PI:PASSWORD:<PASSWORD>END_PI.PI:PASSWORD:<PASSWORD>END_PI
if @apiToken?
@bot = new flowdock.Session(@apiToken)
else if @loginEmail? && @loginPassword?
@bot = new flowdock.Session(@loginEmail, @loginPassword)
else
throw new Error("ERROR: No credentials given: Supply either environment variable HUBOT_FLOWDOCK_API_TOKEN or both HUBOT_FLOWDOCK_LOGIN_EMAIL and HUBOT_FLOWDOCK_LOGIN_PASSWORD")
@bot.on "error", (e) =>
@robot.logger.error("Unexpected error in Flowdock client: #{e}")
@emit e
@fetchFlowsAndConnect()
@emit 'connected'
fetchFlowsAndConnect: ->
@bot.flows (err, flows, res) =>
return if err?
@bot.userId = res.headers['flowdock-user']
@flows = flows
@robot.logger.info("Found #{@flows.length} flows, and I have joined #{@joinedFlows().length} of them.")
for flow in flows
for user in flow.users
if user.in_flow
data =
id: user.id
name: user.nick
savedUser = @userFromId user.id, data
if savedUser.name != data.name
@changeUserNick(savedUser.id, data.name)
if String(user.id) == String(@bot.userId)
@bot.userName = user.nick
@robot.logger.info("Connecting to Flowdock as user #{@bot.userName} (id #{@bot.userId}).")
if @flows.length == 0 || !@flows.some((flow) -> flow.open)
@robot.logger.warning(
"Your bot is not part of any flows and probably won't do much. " +
"Join some flows manually or add the bot to some flows and reconnect.")
if @bot.userName? && @robot.name.toLowerCase() != @bot.userName.toLowerCase()
@robot.logger.warning(
"You have configured this bot to use the wrong name (#{@robot.name}). Flowdock API says " +
"my name is #{@bot.userName}. You will run into problems if you don't fix this!")
@connect()
exports.use = (robot) ->
new Flowdock robot
|
[
{
"context": "ata: (github, username) ->\n\n github.username ?= username\n\n github.homepage ?= \"https://github.com/#{use",
"end": 4116,
"score": 0.7911797165870667,
"start": 4108,
"tag": "USERNAME",
"value": "username"
},
{
"context": "enticate\n\n type: \"basic\"\n\n ... | lib/main.coffee | joaoafrmartins/spaghetty-github | 0 | { EOL } = require 'os'
{ basename, dirname, resolve } = require 'path'
{ readdirSyncRecursive: findAll } = require 'wrench'
async = require 'async'
merge = require 'lodash.merge'
GitHubApi = require 'github'
ACliCommand = require 'a-cli-command'
class Github extends ACliCommand
command:
name: "github"
options:
"version":
type: "string"
default: "3.0.0"
description: [
"the GitHub api version"
]
"timeout":
type: "number"
default: 5000
description: [
"the GitHub api request timeout"
]
"note":
type: "string"
default: "github cli access"
description: [
"the note required by the GitHub",
"authorizations api"
]
"scopes":
type: "array"
default: ["user", "repo", "public_repo", "delete_repo", "gist"]
description: [
"the requested scopes for the GitHub",
"authorizations api"
]
"login":
type: "boolean"
triggers: ["version", "timeout", "note", "scopes"]
description: [
"github account login trigger",
"in order to create an remove",
"github repo a oauth token is required",
"an application token can be created",
"at "
]
"repo":
type: "string"
default: basename(process.cwd())
description: [
"the github repository name"
]
"create":
type: "boolean"
triggers: [
"repo",
"version",
"timeout",
"note",
"scopes",
"init",
"license"
]
description: [
"creates a new github repository"
]
"delete":
type: "boolean"
triggers: ["repo", "version", "timeout", "note", "scopes"]
description: [
"deletes a github repository"
]
"init":
type: "boolean"
triggers: ["templates"]
default: true
description: [
"calls the init command on create",
"with the package-init-github template"
]
"templates":
type: "array"
default: [ "package-init-github" ]
description: [
"specifies with templates should be used",
"by package-init when using init"
]
"force":
type: "boolean"
description: [
"when using init assumes default values",
"without prompting for aditional information"
]
"commit":
type: "boolean"
triggers: ["origin", "message"]
description: [
"when true makes the create trigger",
"performs add, commit and push on repository",
"contents to origin master"
]
"origin":
type: "string"
default: "master"
description: [
"the remote origin name"
]
"message":
type: "string"
default: (new Date).toISOString()
description: [
"the commit message"
]
"recursive":
type: "boolean"
description: [
"when commit trigger is enabled",
"finds all npm packages owned by username",
"and tries to perform commit on all of them"
]
"gh-pages":
type: "string"
triggers: [ "repo", "gh-pages-template" ]
default: resolve "#{process.env.PWD}", "gh-pages"
description: [
"gh-pages branch location"
]
"gh-pages-template":
type: "string"
default: resolve "#{__dirname}", "gh-pages"
description: [
"gh-pages branch template"
]
"license":
type: "string"
default: "MIT"
description: [
"license applied to the software"
]
"author":
type: "string"
description: [
"the author of the software"
]
data: (github, username) ->
github.username ?= username
github.homepage ?= "https://github.com/#{username}"
basic: (github, callback) ->
@cli.prompt [{
type: "input",
name: "username",
message: "github username?",
default: github.username
},{
type: "password",
name: "password"
message: "github password?"
validate: (val) -> return val.length > 0
}], (res) =>
{ username, password } = res
github.basic = res
@data github, username
callback github
twoFactor: (github, callback) ->
@cli.prompt [{
type: "password",
name: "code",
message: "github security code?"
}], (res) =>
{ code } = res
github.payload.headers = 'X-GitHub-OTP': code
@authorize github, callback
authorize: (github, callback) ->
@api.authorization.create github.payload, (err, response) =>
if err
{ message, errors } = JSON.parse err.message
if err.message.match "two-factor"
return @twoFactor github, callback
else return callback err, response
if token = response.token
delete github.basic
delete github.payload
github.authorization = response
@cli.cache.put "github", github
@cli.cache.save()
@cli.console.info "oauth token: #{token}"
return @authenticate github, callback
callback err, response
create: (github, callback) ->
@basic github, (github) =>
@api.authenticate
type: "basic"
username: github.basic.username
password: github.basic.password
github.payload =
scopes: github.scopes
note: github.note
note_url: github.homepage
@authorize github, callback
authenticate: (github, callback) ->
if not github?.authorization?.token
return @create github, callback
@api.authenticate
type: "oauth"
token: github.authorization.token
callback null, github
error: (err, github) ->
return [
"something when wrong!",
"#{JSON.stringify(github, null, 2)}",
"#{err}"
].join EOL
init: (command, repo, next) ->
@allRepos = {}
@isAuthenticated = false
tmp = resolve pwd(), "tmp-#{repo.name}"
@exec "git clone #{repo.ssh_url} #{tmp}", (err, res) =>
if err then return next err, null
mv resolve("#{tmp}",".git"), pwd()
rm "-Rf", tmp
args = [ "init" ]
{ force, templates, commit } = command.args
if force
args.push "--force"
if templates
args.push "--templates"
args.push JSON.stringify templates
@cli.run args, (err, res) =>
if err then return next err, null
next err, res
forceAuthentication: (command, next) ->
delete command.args.login
{ github } = @cli.cache.get()
github = merge github or {}, command.args
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
@allRepos = {}
repos = Object.keys github.repos
repos.map (r) => @allRepos[r] = true
@isAuthenticated = true
next null, github
commit: (message, origin, next) ->
@exec "git add .", (err, res) =>
if err then return next null, err
@exec "git commit -am '#{message}'", (err, res) =>
if err then return next null, err
@exec "git push origin #{origin}", next
getAllRepos: (pwd=process.env.PWD, blacklist={}) ->
repos = []
blacklist = {}
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
{ repos: whitelist } = @cli.cache.get "github"
findAll(pwd).map (file) =>
if file.match(/package.json$/) isnt null
try
file = "#{pwd}/#{file}"
pkg = require(file)
if not whitelist[pkg.name] then return null
if blacklist[pkg.name] then return null
url = pkg?.repository?.url or ''
if url.match(username) isnt null
blacklist[pkg.name] = true
repos.push dirname(file)
catch err
repos
license: (command, next) ->
@shell
_license = (l, a, done) =>
lfile = "#{process.cwd()}/LICENSE.txt"
pkg.license = l or "MIT"
if l is "MIT"
y = new Date().getFullYear()
"""
The MIT License (MIT)
Copyright (c) #{y} #{a}
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
""".to lfile
@cli.console.info "created license file: #{lfile}"
done()
file = "#{process.cwd()}/package"
pkg = require file
{ license, author } = command.args
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
options =
url: "https://api.github.com/users/#{username}"
headers:
'User-Agent': 'spaghetty'
request = require 'request'
request options, (err, response, body) ->
{ name: author } = JSON.parse(body)
pkg.author = author
_license license, author, () ->
JSON.stringify(pkg, null, 2).to "#{file}.json"
next null, "license"
ghPages: (command, next) ->
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
repo = command.args.repo
repoUrl = "git@github.com:#{username}/#{repo}.git"
template = command.args['gh-pages-template']
dir = command.args['gh-pages']
cmds = [
"mkdir #{dir}",
"git clone #{repoUrl} #{dir}/#{repo}",
"cd #{dir}/#{repo}",
"mv #{dir}/#{repo}/.git #{dir}",
"cp -R #{template}/* #{dir}/",
"rm -Rf #{dir}/#{repo}",
"git -C #{dir} checkout --orphan gh-pages",
"git -C #{dir} add .",
"git -C #{dir} commit -am \"gh-pages\"",
"git -C #{dir} push origin gh-pages"
]
_series = () =>
cmd = cmds.shift()
if not cmd
return next null, "gh-pages created successfully!"
@exec cmd, (err, res) =>
if err then return next null, err
_series()
_series()
delete: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.delete
delete github.repo
delete github.login
@cli.prompt [{
type: "confirm"
name: "confirmed"
message: [
"are you shure you want"
"to delete #{github.username}/#{repo}?"
].join EOL
}], (response) =>
if response.confirmed
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
user = github.username
payload =
"user": "#{user}"
"repo": "#{repo}"
@api.repos.delete payload, (err, response) =>
message = "#{user}/#{repo}"
if data = github?.repos?[repo]
delete github.repos[repo]
@cli.cache.put 'github', github
@cli.cache.save()
data ?= message
rm "-Rf", resolve(pwd(), ".git")
pkgfile = resolve(pwd(), 'package.json')
if test "-e", pkgfile
pkg = JSON.parse cat pkgfile
delete pkg.bugs
delete pkg.repository
delete pkg.homepage
JSON.stringify(pkg, null, 2).to pkgfile
@cli.console.error message
next null, data
create: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.create
delete github.repo
delete github.login
delete github.init
delete github.templates
delete github.force
delete github.commit
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
payload = { "name": repo }
@api.repos.create payload, (err, response) =>
if err then return next @error(err, response), null
if response.id
github.repos ?= {}
github.repos[response.name] = response
@cli.cache.put "github", github
@cli.cache.save()
user = github.username
@init command, response, (err, res) =>
@cli.console.info "#{user}/#{repo}"
next null, response
"license?": (command, next) ->
if command.args.recursive
@shell
repos = @getAllRepos()
_series = () =>
repo = repos.shift()
if not repo then return next null, "license"
cd repo
@license command, (err, res) =>
if err then return next null, err
_series()
_series()
else
@license command, (err, res) =>
if err then return next null, err
next null, "license"
"gh-pages?": (command, next) ->
@ghPages command, next
"commit?": (command, next) ->
{ origin, recursive, message } = command.args
if not recursive then return @commit message, origin, next
@shell
repos = @getAllRepos()
commitFn = (res, done) =>
dir = repos.shift()
if dir
cd dir
@commit message, origin, (e, r) =>
if e then res.push e else res.push "#{dir}#{EOL}#{EOL}#{r}"
commitFn res, done
else
done null, res
commitFn [], (err, res) =>
next null, res.join EOL
"login?": (command, next) ->
if not @isAuthenticated then @forceAuthentication(
command, next
)
"create?": (command, next) ->
@create command, (err, res) ->
next err, res
"delete?": (command, next) ->
@delete command, (err, res) ->
next err, res
module.exports = Github
| 188954 | { EOL } = require 'os'
{ basename, dirname, resolve } = require 'path'
{ readdirSyncRecursive: findAll } = require 'wrench'
async = require 'async'
merge = require 'lodash.merge'
GitHubApi = require 'github'
ACliCommand = require 'a-cli-command'
class Github extends ACliCommand
command:
name: "github"
options:
"version":
type: "string"
default: "3.0.0"
description: [
"the GitHub api version"
]
"timeout":
type: "number"
default: 5000
description: [
"the GitHub api request timeout"
]
"note":
type: "string"
default: "github cli access"
description: [
"the note required by the GitHub",
"authorizations api"
]
"scopes":
type: "array"
default: ["user", "repo", "public_repo", "delete_repo", "gist"]
description: [
"the requested scopes for the GitHub",
"authorizations api"
]
"login":
type: "boolean"
triggers: ["version", "timeout", "note", "scopes"]
description: [
"github account login trigger",
"in order to create an remove",
"github repo a oauth token is required",
"an application token can be created",
"at "
]
"repo":
type: "string"
default: basename(process.cwd())
description: [
"the github repository name"
]
"create":
type: "boolean"
triggers: [
"repo",
"version",
"timeout",
"note",
"scopes",
"init",
"license"
]
description: [
"creates a new github repository"
]
"delete":
type: "boolean"
triggers: ["repo", "version", "timeout", "note", "scopes"]
description: [
"deletes a github repository"
]
"init":
type: "boolean"
triggers: ["templates"]
default: true
description: [
"calls the init command on create",
"with the package-init-github template"
]
"templates":
type: "array"
default: [ "package-init-github" ]
description: [
"specifies with templates should be used",
"by package-init when using init"
]
"force":
type: "boolean"
description: [
"when using init assumes default values",
"without prompting for aditional information"
]
"commit":
type: "boolean"
triggers: ["origin", "message"]
description: [
"when true makes the create trigger",
"performs add, commit and push on repository",
"contents to origin master"
]
"origin":
type: "string"
default: "master"
description: [
"the remote origin name"
]
"message":
type: "string"
default: (new Date).toISOString()
description: [
"the commit message"
]
"recursive":
type: "boolean"
description: [
"when commit trigger is enabled",
"finds all npm packages owned by username",
"and tries to perform commit on all of them"
]
"gh-pages":
type: "string"
triggers: [ "repo", "gh-pages-template" ]
default: resolve "#{process.env.PWD}", "gh-pages"
description: [
"gh-pages branch location"
]
"gh-pages-template":
type: "string"
default: resolve "#{__dirname}", "gh-pages"
description: [
"gh-pages branch template"
]
"license":
type: "string"
default: "MIT"
description: [
"license applied to the software"
]
"author":
type: "string"
description: [
"the author of the software"
]
data: (github, username) ->
github.username ?= username
github.homepage ?= "https://github.com/#{username}"
basic: (github, callback) ->
@cli.prompt [{
type: "input",
name: "username",
message: "github username?",
default: github.username
},{
type: "password",
name: "password"
message: "github password?"
validate: (val) -> return val.length > 0
}], (res) =>
{ username, password } = res
github.basic = res
@data github, username
callback github
twoFactor: (github, callback) ->
@cli.prompt [{
type: "password",
name: "code",
message: "github security code?"
}], (res) =>
{ code } = res
github.payload.headers = 'X-GitHub-OTP': code
@authorize github, callback
authorize: (github, callback) ->
@api.authorization.create github.payload, (err, response) =>
if err
{ message, errors } = JSON.parse err.message
if err.message.match "two-factor"
return @twoFactor github, callback
else return callback err, response
if token = response.token
delete github.basic
delete github.payload
github.authorization = response
@cli.cache.put "github", github
@cli.cache.save()
@cli.console.info "oauth token: #{token}"
return @authenticate github, callback
callback err, response
create: (github, callback) ->
@basic github, (github) =>
@api.authenticate
type: "basic"
username: github.basic.username
password: <PASSWORD>
github.payload =
scopes: github.scopes
note: github.note
note_url: github.homepage
@authorize github, callback
authenticate: (github, callback) ->
if not github?.authorization?.token
return @create github, callback
@api.authenticate
type: "oauth"
token: github.authorization.token
callback null, github
error: (err, github) ->
return [
"something when wrong!",
"#{JSON.stringify(github, null, 2)}",
"#{err}"
].join EOL
init: (command, repo, next) ->
@allRepos = {}
@isAuthenticated = false
tmp = resolve pwd(), "tmp-#{repo.name}"
@exec "git clone #{repo.ssh_url} #{tmp}", (err, res) =>
if err then return next err, null
mv resolve("#{tmp}",".git"), pwd()
rm "-Rf", tmp
args = [ "init" ]
{ force, templates, commit } = command.args
if force
args.push "--force"
if templates
args.push "--templates"
args.push JSON.stringify templates
@cli.run args, (err, res) =>
if err then return next err, null
next err, res
forceAuthentication: (command, next) ->
delete command.args.login
{ github } = @cli.cache.get()
github = merge github or {}, command.args
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
@allRepos = {}
repos = Object.keys github.repos
repos.map (r) => @allRepos[r] = true
@isAuthenticated = true
next null, github
commit: (message, origin, next) ->
@exec "git add .", (err, res) =>
if err then return next null, err
@exec "git commit -am '#{message}'", (err, res) =>
if err then return next null, err
@exec "git push origin #{origin}", next
getAllRepos: (pwd=process.env.PWD, blacklist={}) ->
repos = []
blacklist = {}
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
{ repos: whitelist } = @cli.cache.get "github"
findAll(pwd).map (file) =>
if file.match(/package.json$/) isnt null
try
file = "#{pwd}/#{file}"
pkg = require(file)
if not whitelist[pkg.name] then return null
if blacklist[pkg.name] then return null
url = pkg?.repository?.url or ''
if url.match(username) isnt null
blacklist[pkg.name] = true
repos.push dirname(file)
catch err
repos
license: (command, next) ->
@shell
_license = (l, a, done) =>
lfile = "#{process.cwd()}/LICENSE.txt"
pkg.license = l or "MIT"
if l is "MIT"
y = new Date().getFullYear()
"""
The MIT License (MIT)
Copyright (c) #{y} #{a}
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
""".to lfile
@cli.console.info "created license file: #{lfile}"
done()
file = "#{process.cwd()}/package"
pkg = require file
{ license, author } = command.args
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
options =
url: "https://api.github.com/users/#{username}"
headers:
'User-Agent': 'spaghetty'
request = require 'request'
request options, (err, response, body) ->
{ name: author } = JSON.parse(body)
pkg.author = author
_license license, author, () ->
JSON.stringify(pkg, null, 2).to "#{file}.json"
next null, "license"
ghPages: (command, next) ->
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
repo = command.args.repo
repoUrl = "<EMAIL>:#{username}/#{repo}.git"
template = command.args['gh-pages-template']
dir = command.args['gh-pages']
cmds = [
"mkdir #{dir}",
"git clone #{repoUrl} #{dir}/#{repo}",
"cd #{dir}/#{repo}",
"mv #{dir}/#{repo}/.git #{dir}",
"cp -R #{template}/* #{dir}/",
"rm -Rf #{dir}/#{repo}",
"git -C #{dir} checkout --orphan gh-pages",
"git -C #{dir} add .",
"git -C #{dir} commit -am \"gh-pages\"",
"git -C #{dir} push origin gh-pages"
]
_series = () =>
cmd = cmds.shift()
if not cmd
return next null, "gh-pages created successfully!"
@exec cmd, (err, res) =>
if err then return next null, err
_series()
_series()
delete: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.delete
delete github.repo
delete github.login
@cli.prompt [{
type: "confirm"
name: "confirmed"
message: [
"are you shure you want"
"to delete #{github.username}/#{repo}?"
].join EOL
}], (response) =>
if response.confirmed
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
user = github.username
payload =
"user": "#{user}"
"repo": "#{repo}"
@api.repos.delete payload, (err, response) =>
message = "#{user}/#{repo}"
if data = github?.repos?[repo]
delete github.repos[repo]
@cli.cache.put 'github', github
@cli.cache.save()
data ?= message
rm "-Rf", resolve(pwd(), ".git")
pkgfile = resolve(pwd(), 'package.json')
if test "-e", pkgfile
pkg = JSON.parse cat pkgfile
delete pkg.bugs
delete pkg.repository
delete pkg.homepage
JSON.stringify(pkg, null, 2).to pkgfile
@cli.console.error message
next null, data
create: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.create
delete github.repo
delete github.login
delete github.init
delete github.templates
delete github.force
delete github.commit
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
payload = { "name": repo }
@api.repos.create payload, (err, response) =>
if err then return next @error(err, response), null
if response.id
github.repos ?= {}
github.repos[response.name] = response
@cli.cache.put "github", github
@cli.cache.save()
user = github.username
@init command, response, (err, res) =>
@cli.console.info "#{user}/#{repo}"
next null, response
"license?": (command, next) ->
if command.args.recursive
@shell
repos = @getAllRepos()
_series = () =>
repo = repos.shift()
if not repo then return next null, "license"
cd repo
@license command, (err, res) =>
if err then return next null, err
_series()
_series()
else
@license command, (err, res) =>
if err then return next null, err
next null, "license"
"gh-pages?": (command, next) ->
@ghPages command, next
"commit?": (command, next) ->
{ origin, recursive, message } = command.args
if not recursive then return @commit message, origin, next
@shell
repos = @getAllRepos()
commitFn = (res, done) =>
dir = repos.shift()
if dir
cd dir
@commit message, origin, (e, r) =>
if e then res.push e else res.push "#{dir}#{EOL}#{EOL}#{r}"
commitFn res, done
else
done null, res
commitFn [], (err, res) =>
next null, res.join EOL
"login?": (command, next) ->
if not @isAuthenticated then @forceAuthentication(
command, next
)
"create?": (command, next) ->
@create command, (err, res) ->
next err, res
"delete?": (command, next) ->
@delete command, (err, res) ->
next err, res
module.exports = Github
| true | { EOL } = require 'os'
{ basename, dirname, resolve } = require 'path'
{ readdirSyncRecursive: findAll } = require 'wrench'
async = require 'async'
merge = require 'lodash.merge'
GitHubApi = require 'github'
ACliCommand = require 'a-cli-command'
class Github extends ACliCommand
command:
name: "github"
options:
"version":
type: "string"
default: "3.0.0"
description: [
"the GitHub api version"
]
"timeout":
type: "number"
default: 5000
description: [
"the GitHub api request timeout"
]
"note":
type: "string"
default: "github cli access"
description: [
"the note required by the GitHub",
"authorizations api"
]
"scopes":
type: "array"
default: ["user", "repo", "public_repo", "delete_repo", "gist"]
description: [
"the requested scopes for the GitHub",
"authorizations api"
]
"login":
type: "boolean"
triggers: ["version", "timeout", "note", "scopes"]
description: [
"github account login trigger",
"in order to create an remove",
"github repo a oauth token is required",
"an application token can be created",
"at "
]
"repo":
type: "string"
default: basename(process.cwd())
description: [
"the github repository name"
]
"create":
type: "boolean"
triggers: [
"repo",
"version",
"timeout",
"note",
"scopes",
"init",
"license"
]
description: [
"creates a new github repository"
]
"delete":
type: "boolean"
triggers: ["repo", "version", "timeout", "note", "scopes"]
description: [
"deletes a github repository"
]
"init":
type: "boolean"
triggers: ["templates"]
default: true
description: [
"calls the init command on create",
"with the package-init-github template"
]
"templates":
type: "array"
default: [ "package-init-github" ]
description: [
"specifies with templates should be used",
"by package-init when using init"
]
"force":
type: "boolean"
description: [
"when using init assumes default values",
"without prompting for aditional information"
]
"commit":
type: "boolean"
triggers: ["origin", "message"]
description: [
"when true makes the create trigger",
"performs add, commit and push on repository",
"contents to origin master"
]
"origin":
type: "string"
default: "master"
description: [
"the remote origin name"
]
"message":
type: "string"
default: (new Date).toISOString()
description: [
"the commit message"
]
"recursive":
type: "boolean"
description: [
"when commit trigger is enabled",
"finds all npm packages owned by username",
"and tries to perform commit on all of them"
]
"gh-pages":
type: "string"
triggers: [ "repo", "gh-pages-template" ]
default: resolve "#{process.env.PWD}", "gh-pages"
description: [
"gh-pages branch location"
]
"gh-pages-template":
type: "string"
default: resolve "#{__dirname}", "gh-pages"
description: [
"gh-pages branch template"
]
"license":
type: "string"
default: "MIT"
description: [
"license applied to the software"
]
"author":
type: "string"
description: [
"the author of the software"
]
data: (github, username) ->
github.username ?= username
github.homepage ?= "https://github.com/#{username}"
basic: (github, callback) ->
@cli.prompt [{
type: "input",
name: "username",
message: "github username?",
default: github.username
},{
type: "password",
name: "password"
message: "github password?"
validate: (val) -> return val.length > 0
}], (res) =>
{ username, password } = res
github.basic = res
@data github, username
callback github
twoFactor: (github, callback) ->
@cli.prompt [{
type: "password",
name: "code",
message: "github security code?"
}], (res) =>
{ code } = res
github.payload.headers = 'X-GitHub-OTP': code
@authorize github, callback
authorize: (github, callback) ->
@api.authorization.create github.payload, (err, response) =>
if err
{ message, errors } = JSON.parse err.message
if err.message.match "two-factor"
return @twoFactor github, callback
else return callback err, response
if token = response.token
delete github.basic
delete github.payload
github.authorization = response
@cli.cache.put "github", github
@cli.cache.save()
@cli.console.info "oauth token: #{token}"
return @authenticate github, callback
callback err, response
create: (github, callback) ->
@basic github, (github) =>
@api.authenticate
type: "basic"
username: github.basic.username
password: PI:PASSWORD:<PASSWORD>END_PI
github.payload =
scopes: github.scopes
note: github.note
note_url: github.homepage
@authorize github, callback
authenticate: (github, callback) ->
if not github?.authorization?.token
return @create github, callback
@api.authenticate
type: "oauth"
token: github.authorization.token
callback null, github
error: (err, github) ->
return [
"something when wrong!",
"#{JSON.stringify(github, null, 2)}",
"#{err}"
].join EOL
init: (command, repo, next) ->
@allRepos = {}
@isAuthenticated = false
tmp = resolve pwd(), "tmp-#{repo.name}"
@exec "git clone #{repo.ssh_url} #{tmp}", (err, res) =>
if err then return next err, null
mv resolve("#{tmp}",".git"), pwd()
rm "-Rf", tmp
args = [ "init" ]
{ force, templates, commit } = command.args
if force
args.push "--force"
if templates
args.push "--templates"
args.push JSON.stringify templates
@cli.run args, (err, res) =>
if err then return next err, null
next err, res
forceAuthentication: (command, next) ->
delete command.args.login
{ github } = @cli.cache.get()
github = merge github or {}, command.args
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
@allRepos = {}
repos = Object.keys github.repos
repos.map (r) => @allRepos[r] = true
@isAuthenticated = true
next null, github
commit: (message, origin, next) ->
@exec "git add .", (err, res) =>
if err then return next null, err
@exec "git commit -am '#{message}'", (err, res) =>
if err then return next null, err
@exec "git push origin #{origin}", next
getAllRepos: (pwd=process.env.PWD, blacklist={}) ->
repos = []
blacklist = {}
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
{ repos: whitelist } = @cli.cache.get "github"
findAll(pwd).map (file) =>
if file.match(/package.json$/) isnt null
try
file = "#{pwd}/#{file}"
pkg = require(file)
if not whitelist[pkg.name] then return null
if blacklist[pkg.name] then return null
url = pkg?.repository?.url or ''
if url.match(username) isnt null
blacklist[pkg.name] = true
repos.push dirname(file)
catch err
repos
license: (command, next) ->
@shell
_license = (l, a, done) =>
lfile = "#{process.cwd()}/LICENSE.txt"
pkg.license = l or "MIT"
if l is "MIT"
y = new Date().getFullYear()
"""
The MIT License (MIT)
Copyright (c) #{y} #{a}
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
""".to lfile
@cli.console.info "created license file: #{lfile}"
done()
file = "#{process.cwd()}/package"
pkg = require file
{ license, author } = command.args
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
options =
url: "https://api.github.com/users/#{username}"
headers:
'User-Agent': 'spaghetty'
request = require 'request'
request options, (err, response, body) ->
{ name: author } = JSON.parse(body)
pkg.author = author
_license license, author, () ->
JSON.stringify(pkg, null, 2).to "#{file}.json"
next null, "license"
ghPages: (command, next) ->
{ github } = @cli.cache.get()
{ username } = @cli.cache.get "github"
repo = command.args.repo
repoUrl = "PI:EMAIL:<EMAIL>END_PI:#{username}/#{repo}.git"
template = command.args['gh-pages-template']
dir = command.args['gh-pages']
cmds = [
"mkdir #{dir}",
"git clone #{repoUrl} #{dir}/#{repo}",
"cd #{dir}/#{repo}",
"mv #{dir}/#{repo}/.git #{dir}",
"cp -R #{template}/* #{dir}/",
"rm -Rf #{dir}/#{repo}",
"git -C #{dir} checkout --orphan gh-pages",
"git -C #{dir} add .",
"git -C #{dir} commit -am \"gh-pages\"",
"git -C #{dir} push origin gh-pages"
]
_series = () =>
cmd = cmds.shift()
if not cmd
return next null, "gh-pages created successfully!"
@exec cmd, (err, res) =>
if err then return next null, err
_series()
_series()
delete: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.delete
delete github.repo
delete github.login
@cli.prompt [{
type: "confirm"
name: "confirmed"
message: [
"are you shure you want"
"to delete #{github.username}/#{repo}?"
].join EOL
}], (response) =>
if response.confirmed
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
user = github.username
payload =
"user": "#{user}"
"repo": "#{repo}"
@api.repos.delete payload, (err, response) =>
message = "#{user}/#{repo}"
if data = github?.repos?[repo]
delete github.repos[repo]
@cli.cache.put 'github', github
@cli.cache.save()
data ?= message
rm "-Rf", resolve(pwd(), ".git")
pkgfile = resolve(pwd(), 'package.json')
if test "-e", pkgfile
pkg = JSON.parse cat pkgfile
delete pkg.bugs
delete pkg.repository
delete pkg.homepage
JSON.stringify(pkg, null, 2).to pkgfile
@cli.console.error message
next null, data
create: (command, next) ->
@shell
{ repo } = command.args
{ github } = @cli.cache.get()
github = merge github or {}, command.args
delete github.create
delete github.repo
delete github.login
delete github.init
delete github.templates
delete github.force
delete github.commit
@api ?= new GitHubApi github
@authenticate github, (err, github) =>
if err then return next @error(err, github), null
payload = { "name": repo }
@api.repos.create payload, (err, response) =>
if err then return next @error(err, response), null
if response.id
github.repos ?= {}
github.repos[response.name] = response
@cli.cache.put "github", github
@cli.cache.save()
user = github.username
@init command, response, (err, res) =>
@cli.console.info "#{user}/#{repo}"
next null, response
"license?": (command, next) ->
if command.args.recursive
@shell
repos = @getAllRepos()
_series = () =>
repo = repos.shift()
if not repo then return next null, "license"
cd repo
@license command, (err, res) =>
if err then return next null, err
_series()
_series()
else
@license command, (err, res) =>
if err then return next null, err
next null, "license"
"gh-pages?": (command, next) ->
@ghPages command, next
"commit?": (command, next) ->
{ origin, recursive, message } = command.args
if not recursive then return @commit message, origin, next
@shell
repos = @getAllRepos()
commitFn = (res, done) =>
dir = repos.shift()
if dir
cd dir
@commit message, origin, (e, r) =>
if e then res.push e else res.push "#{dir}#{EOL}#{EOL}#{r}"
commitFn res, done
else
done null, res
commitFn [], (err, res) =>
next null, res.join EOL
"login?": (command, next) ->
if not @isAuthenticated then @forceAuthentication(
command, next
)
"create?": (command, next) ->
@create command, (err, res) ->
next err, res
"delete?": (command, next) ->
@delete command, (err, res) ->
next err, res
module.exports = Github
|
[
{
"context": "# Copyright © 2014 All rights reserved\n# Author: nhim175@gmail.com\n\nClassLogger =\n\n debug: () ->\n console.debug.",
"end": 66,
"score": 0.9999077320098877,
"start": 49,
"tag": "EMAIL",
"value": "nhim175@gmail.com"
}
] | src/mixins/logger.coffee | nhim175/scorpionsmasher | 0 | # Copyright © 2014 All rights reserved
# Author: nhim175@gmail.com
ClassLogger =
debug: () ->
console.debug.apply console, @_build_msg(arguments)
info: () ->
console.info.apply console, @_build_msg(arguments)
warn: () ->
console.warn.apply console, @_build_msg(arguments)
error: () ->
console.error.apply console, @_build_msg(arguments)
# private
_build_msg: (txt) ->
txt = Array.prototype.slice.call(txt, 0)
if @logPrefix? then txt.splice 0, 0, "[#{@logPrefix}]"
txt
module.exports = ClassLogger | 196996 | # Copyright © 2014 All rights reserved
# Author: <EMAIL>
ClassLogger =
debug: () ->
console.debug.apply console, @_build_msg(arguments)
info: () ->
console.info.apply console, @_build_msg(arguments)
warn: () ->
console.warn.apply console, @_build_msg(arguments)
error: () ->
console.error.apply console, @_build_msg(arguments)
# private
_build_msg: (txt) ->
txt = Array.prototype.slice.call(txt, 0)
if @logPrefix? then txt.splice 0, 0, "[#{@logPrefix}]"
txt
module.exports = ClassLogger | true | # Copyright © 2014 All rights reserved
# Author: PI:EMAIL:<EMAIL>END_PI
ClassLogger =
debug: () ->
console.debug.apply console, @_build_msg(arguments)
info: () ->
console.info.apply console, @_build_msg(arguments)
warn: () ->
console.warn.apply console, @_build_msg(arguments)
error: () ->
console.error.apply console, @_build_msg(arguments)
# private
_build_msg: (txt) ->
txt = Array.prototype.slice.call(txt, 0)
if @logPrefix? then txt.splice 0, 0, "[#{@logPrefix}]"
txt
module.exports = ClassLogger |
[
{
"context": "#\n# Title Caps\n# \n# Ported to JavaScript By John Resig - http://ejohn.org/ - 21 May 2008\n# Original by J",
"end": 54,
"score": 0.9998745918273926,
"start": 44,
"tag": "NAME",
"value": "John Resig"
},
{
"context": "ig - http://ejohn.org/ - 21 May 2008\n# Original by ... | title_caps.js.coffee | Simplero/titleCase | 0 | #
# Title Caps
#
# Ported to JavaScript By John Resig - http://ejohn.org/ - 21 May 2008
# Original by John Gruber - http://daringfireball.net/ - 10 May 2008
# Ported to CoffeeScript and improved by Calvin Correli - 2 April 2015
# License: http://www.opensource.org/licenses/mit-license.php
#
# List comes from here: http://lanecc.libguides.com/content.php?pid=38483&sid=295540
small = "(" + ["a","aboard","about","above","absent","across","after","against","along","alongside","amid","amidst","among","amongst","an","and","around","as","as","aslant","astride","at","athwart","atop","barring","before","behind","below","beneath","beside","besides","between","beyond","but","by","despite","down","during","except","failing","following","for","for","from","in","inside","into","like","mid","minus","near","next","nor","notwithstanding","of","off","on","onto","opposite","or","out","outside","over","past","per","plus","regarding","round","save","since","so","than","the","through","throughout","till","times","to","toward","towards","under","underneath","unlike","until","up","upon","via","v","v.", "vs", "vs.","when","with","within","without","worth","yet"].join("|") + ")"
punct = "([!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]*)"
lower = (word) ->
word.toLowerCase()
capitalize = (word) ->
word.substr(0,1).toUpperCase() + word.substr(1)
upper = (word) ->
word.toUpperCase()
window.titleCaps = (title) ->
parts = []
split = /[:.;?!] |(?: |^)["Ò]/g
index = 0
while true
m = split.exec(title)
parts.push(
title.substring(index, if m then m.index else title.length)
.replace(/\b([A-Za-z][a-z.'Õ]*)\b/g, (all) ->
if /[A-Za-z]\.[A-Za-z]/.test(all) then all else capitalize(all)
)
.replace(RegExp("\\b" + small + "\\b", "ig"), lower)
.replace(RegExp("^" + punct + small + "\\b", "ig"), (all, punct, word) ->
punct + capitalize(word)
)
.replace(RegExp("\\b" + small + punct + "$", "ig"), capitalize))
index = split.lastIndex
if m
parts.push(m[0])
else
break
parts.join("")
.replace(/ V(s?)\. /ig, " v$1. ")
.replace(/(['Õ])S\b/ig, "$1s")
.replace(/\b(AT&T|Q&A)\b/ig, upper)
String.prototype.toTitleCaps = ->
titleCaps(this)
| 69080 | #
# Title Caps
#
# Ported to JavaScript By <NAME> - http://ejohn.org/ - 21 May 2008
# Original by <NAME> - http://daringfireball.net/ - 10 May 2008
# Ported to CoffeeScript and improved by <NAME> - 2 April 2015
# License: http://www.opensource.org/licenses/mit-license.php
#
# List comes from here: http://lanecc.libguides.com/content.php?pid=38483&sid=295540
small = "(" + ["a","aboard","about","above","absent","across","after","against","along","alongside","amid","amidst","among","amongst","an","and","around","as","as","aslant","astride","at","athwart","atop","barring","before","behind","below","beneath","beside","besides","between","beyond","but","by","despite","down","during","except","failing","following","for","for","from","in","inside","into","like","mid","minus","near","next","nor","notwithstanding","of","off","on","onto","opposite","or","out","outside","over","past","per","plus","regarding","round","save","since","so","than","the","through","throughout","till","times","to","toward","towards","under","underneath","unlike","until","up","upon","via","v","v.", "vs", "vs.","when","with","within","without","worth","yet"].join("|") + ")"
punct = "([!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]*)"
lower = (word) ->
word.toLowerCase()
capitalize = (word) ->
word.substr(0,1).toUpperCase() + word.substr(1)
upper = (word) ->
word.toUpperCase()
window.titleCaps = (title) ->
parts = []
split = /[:.;?!] |(?: |^)["Ò]/g
index = 0
while true
m = split.exec(title)
parts.push(
title.substring(index, if m then m.index else title.length)
.replace(/\b([A-Za-z][a-z.'Õ]*)\b/g, (all) ->
if /[A-Za-z]\.[A-Za-z]/.test(all) then all else capitalize(all)
)
.replace(RegExp("\\b" + small + "\\b", "ig"), lower)
.replace(RegExp("^" + punct + small + "\\b", "ig"), (all, punct, word) ->
punct + capitalize(word)
)
.replace(RegExp("\\b" + small + punct + "$", "ig"), capitalize))
index = split.lastIndex
if m
parts.push(m[0])
else
break
parts.join("")
.replace(/ V(s?)\. /ig, " v$1. ")
.replace(/(['Õ])S\b/ig, "$1s")
.replace(/\b(AT&T|Q&A)\b/ig, upper)
String.prototype.toTitleCaps = ->
titleCaps(this)
| true | #
# Title Caps
#
# Ported to JavaScript By PI:NAME:<NAME>END_PI - http://ejohn.org/ - 21 May 2008
# Original by PI:NAME:<NAME>END_PI - http://daringfireball.net/ - 10 May 2008
# Ported to CoffeeScript and improved by PI:NAME:<NAME>END_PI - 2 April 2015
# License: http://www.opensource.org/licenses/mit-license.php
#
# List comes from here: http://lanecc.libguides.com/content.php?pid=38483&sid=295540
small = "(" + ["a","aboard","about","above","absent","across","after","against","along","alongside","amid","amidst","among","amongst","an","and","around","as","as","aslant","astride","at","athwart","atop","barring","before","behind","below","beneath","beside","besides","between","beyond","but","by","despite","down","during","except","failing","following","for","for","from","in","inside","into","like","mid","minus","near","next","nor","notwithstanding","of","off","on","onto","opposite","or","out","outside","over","past","per","plus","regarding","round","save","since","so","than","the","through","throughout","till","times","to","toward","towards","under","underneath","unlike","until","up","upon","via","v","v.", "vs", "vs.","when","with","within","without","worth","yet"].join("|") + ")"
punct = "([!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]*)"
lower = (word) ->
word.toLowerCase()
capitalize = (word) ->
word.substr(0,1).toUpperCase() + word.substr(1)
upper = (word) ->
word.toUpperCase()
window.titleCaps = (title) ->
parts = []
split = /[:.;?!] |(?: |^)["Ò]/g
index = 0
while true
m = split.exec(title)
parts.push(
title.substring(index, if m then m.index else title.length)
.replace(/\b([A-Za-z][a-z.'Õ]*)\b/g, (all) ->
if /[A-Za-z]\.[A-Za-z]/.test(all) then all else capitalize(all)
)
.replace(RegExp("\\b" + small + "\\b", "ig"), lower)
.replace(RegExp("^" + punct + small + "\\b", "ig"), (all, punct, word) ->
punct + capitalize(word)
)
.replace(RegExp("\\b" + small + punct + "$", "ig"), capitalize))
index = split.lastIndex
if m
parts.push(m[0])
else
break
parts.join("")
.replace(/ V(s?)\. /ig, " v$1. ")
.replace(/(['Õ])S\b/ig, "$1s")
.replace(/\b(AT&T|Q&A)\b/ig, upper)
String.prototype.toTitleCaps = ->
titleCaps(this)
|
[
{
"context": "# * https://github.com/jasoncypret/expressionUI\n# *\n# * Copyright (c) 2013 Jason Cyp",
"end": 34,
"score": 0.9995006322860718,
"start": 23,
"tag": "USERNAME",
"value": "jasoncypret"
},
{
"context": "asoncypret/expressionUI\n# *\n# * Copyright (c) 2013 Jason Cypret (ht... | vendor/assets/javascripts/expressionui.notify.js.coffee | jasoncypret/expressionUI | 0 | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 Jason Cypret (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
selector: ""
message: null
sticky: false
width: "full"
style: "success"
append_type: "prepend"
allow_multiple: false
invert: false
icon: true
icon_character: null
loading: null
position: "top"
duration: 5000
content: "visible"
text_align: "bottom"
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
unless options.allow_multiple
selector = ""
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify")
when "after"
selector = jQuery(this).next(".notify")
when "before"
selector = jQuery(this).prev(".notify")
jQuery(selector).notify "close", options, false if selector.length > 0
jQuery(this).notify "_add_markup", options
else
jQuery(this).notify "_add_markup", options
_add_markup: (options) ->
markup = undefined
if options.icon_character == null
options.icon_character = "X" if options.style is "error"
options.icon_character = "R" if options.style is "success"
options.icon_character = "!" if options.style is "tip"
options.icon_character = '"' if options.style is "info"
options.message = "" unless options.message
markup = "<div style='display:none;' class='notify" + options.selector + " text_align_" + options.text_align + " style_" + options.style + " position_" + options.position + " width_" + options.width + " invert_" + options.invert + " loading_" + options.loading + " icon_" + options.icon + "'>"
markup += "<div class='notify_wrap'>"
markup += "<div class='icon'>" + options.icon_character + "</div>" if not options.loading? and options.icon
switch options.loading
when "dots"
markup += "<div class='indicator'>"
markup += "<div class='dot1'></div>"
markup += "<div class='dot2'></div>"
markup += "<div class='dot3'></div>"
markup += "<div class='dot4'></div>"
markup += "<div class='dot5'></div>"
markup += "<div class='dot6'></div>"
markup += "<div class='dot7'></div>"
markup += "<div class='dot8'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "spinner"
markup += "<div class='indicator'></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "bars"
markup += "<div class='indicator'><div class='progress'></div></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "circles"
markup += "<div class='indicator'>"
markup += "<div class='circle1'></div>"
markup += "<div class='circle2'></div>"
markup += "<div class='circle3'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
else
(if (options.style is "loading") then markup += "<div class='progress'></div><div class='msg'>" + options.message + "</div>" else markup += "<div class='msg'>" + options.message + "</div>")
markup += "<a class='notify_close' href='javascript:;'><span>x</span></a>"
markup += "</div>"
markup += "</div>"
jQuery(this).notify "_open", options, markup
_open: (options, markup) ->
selector = undefined
options.beforeOpen()
jQuery(this).addClass("notify_container content_" + options.content)[options.append_type] markup
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify[style*='display']")
when "after"
selector = jQuery(this).next(".notify[style*='display']")
when "before"
selector = jQuery(this).prev(".notify[style*='display']")
jQuery(selector).slideDown "fast"
options.afterOpen()
jQuery(this).notify "_setupEvents", options, selector
close: (options, animate) ->
selector = undefined
animate = (if typeof animate isnt "undefined" then animate else true)
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
switch options.append_type
when "prepend", "append", "before"
selector = jQuery(this).parents(".notify_container")
when "after"
selector = jQuery(this).prev(".notify_container")
if animate
jQuery(this).slideUp "fast", ->
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
jQuery(this).remove()
else
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
return jQuery(this).remove()
options.afterClose()
close_all: (options) ->
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
jQuery(".notify").slideUp "fast", ->
jQuery(this).closest("[class^=content_]").removeClass "content_hidden content_overlay content_visible"
jQuery(this).remove()
options.afterClose()
loading: (options) ->
options = jQuery.extend({}, methods.defaults, options)
_setupEvents: (options, selector) ->
unless options.sticky
t = setTimeout(->
jQuery(selector).notify "close", options
clearTimeout t
, options.duration)
jQuery(selector).click ->
clearTimeout t
jQuery(selector).notify "close", options
jQuery.fn.notify = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Notify"
) jQuery
| 142166 | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 <NAME> (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
selector: ""
message: null
sticky: false
width: "full"
style: "success"
append_type: "prepend"
allow_multiple: false
invert: false
icon: true
icon_character: null
loading: null
position: "top"
duration: 5000
content: "visible"
text_align: "bottom"
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
unless options.allow_multiple
selector = ""
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify")
when "after"
selector = jQuery(this).next(".notify")
when "before"
selector = jQuery(this).prev(".notify")
jQuery(selector).notify "close", options, false if selector.length > 0
jQuery(this).notify "_add_markup", options
else
jQuery(this).notify "_add_markup", options
_add_markup: (options) ->
markup = undefined
if options.icon_character == null
options.icon_character = "X" if options.style is "error"
options.icon_character = "R" if options.style is "success"
options.icon_character = "!" if options.style is "tip"
options.icon_character = '"' if options.style is "info"
options.message = "" unless options.message
markup = "<div style='display:none;' class='notify" + options.selector + " text_align_" + options.text_align + " style_" + options.style + " position_" + options.position + " width_" + options.width + " invert_" + options.invert + " loading_" + options.loading + " icon_" + options.icon + "'>"
markup += "<div class='notify_wrap'>"
markup += "<div class='icon'>" + options.icon_character + "</div>" if not options.loading? and options.icon
switch options.loading
when "dots"
markup += "<div class='indicator'>"
markup += "<div class='dot1'></div>"
markup += "<div class='dot2'></div>"
markup += "<div class='dot3'></div>"
markup += "<div class='dot4'></div>"
markup += "<div class='dot5'></div>"
markup += "<div class='dot6'></div>"
markup += "<div class='dot7'></div>"
markup += "<div class='dot8'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "spinner"
markup += "<div class='indicator'></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "bars"
markup += "<div class='indicator'><div class='progress'></div></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "circles"
markup += "<div class='indicator'>"
markup += "<div class='circle1'></div>"
markup += "<div class='circle2'></div>"
markup += "<div class='circle3'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
else
(if (options.style is "loading") then markup += "<div class='progress'></div><div class='msg'>" + options.message + "</div>" else markup += "<div class='msg'>" + options.message + "</div>")
markup += "<a class='notify_close' href='javascript:;'><span>x</span></a>"
markup += "</div>"
markup += "</div>"
jQuery(this).notify "_open", options, markup
_open: (options, markup) ->
selector = undefined
options.beforeOpen()
jQuery(this).addClass("notify_container content_" + options.content)[options.append_type] markup
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify[style*='display']")
when "after"
selector = jQuery(this).next(".notify[style*='display']")
when "before"
selector = jQuery(this).prev(".notify[style*='display']")
jQuery(selector).slideDown "fast"
options.afterOpen()
jQuery(this).notify "_setupEvents", options, selector
close: (options, animate) ->
selector = undefined
animate = (if typeof animate isnt "undefined" then animate else true)
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
switch options.append_type
when "prepend", "append", "before"
selector = jQuery(this).parents(".notify_container")
when "after"
selector = jQuery(this).prev(".notify_container")
if animate
jQuery(this).slideUp "fast", ->
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
jQuery(this).remove()
else
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
return jQuery(this).remove()
options.afterClose()
close_all: (options) ->
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
jQuery(".notify").slideUp "fast", ->
jQuery(this).closest("[class^=content_]").removeClass "content_hidden content_overlay content_visible"
jQuery(this).remove()
options.afterClose()
loading: (options) ->
options = jQuery.extend({}, methods.defaults, options)
_setupEvents: (options, selector) ->
unless options.sticky
t = setTimeout(->
jQuery(selector).notify "close", options
clearTimeout t
, options.duration)
jQuery(selector).click ->
clearTimeout t
jQuery(selector).notify "close", options
jQuery.fn.notify = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Notify"
) jQuery
| true | # * https://github.com/jasoncypret/expressionUI
# *
# * Copyright (c) 2013 PI:NAME:<NAME>END_PI (http://jasoncypret.com/)
# * Licensed under the MIT License
#
((factory) ->
"use strict"
if typeof define is "function" and define.amd
# Register as an anonymous AMD module:
define [
"jquery"
], factory
else
# Not using AMD
factory window.jQuery
return
) (jQuery) ->
((jQuery) ->
methods =
defaults:
selector: ""
message: null
sticky: false
width: "full"
style: "success"
append_type: "prepend"
allow_multiple: false
invert: false
icon: true
icon_character: null
loading: null
position: "top"
duration: 5000
content: "visible"
text_align: "bottom"
beforeOpen: jQuery.noop
afterOpen: jQuery.noop
beforeClose: jQuery.noop
afterClose: jQuery.noop
init: (options) ->
options = jQuery.extend({}, methods.defaults, options)
unless options.allow_multiple
selector = ""
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify")
when "after"
selector = jQuery(this).next(".notify")
when "before"
selector = jQuery(this).prev(".notify")
jQuery(selector).notify "close", options, false if selector.length > 0
jQuery(this).notify "_add_markup", options
else
jQuery(this).notify "_add_markup", options
_add_markup: (options) ->
markup = undefined
if options.icon_character == null
options.icon_character = "X" if options.style is "error"
options.icon_character = "R" if options.style is "success"
options.icon_character = "!" if options.style is "tip"
options.icon_character = '"' if options.style is "info"
options.message = "" unless options.message
markup = "<div style='display:none;' class='notify" + options.selector + " text_align_" + options.text_align + " style_" + options.style + " position_" + options.position + " width_" + options.width + " invert_" + options.invert + " loading_" + options.loading + " icon_" + options.icon + "'>"
markup += "<div class='notify_wrap'>"
markup += "<div class='icon'>" + options.icon_character + "</div>" if not options.loading? and options.icon
switch options.loading
when "dots"
markup += "<div class='indicator'>"
markup += "<div class='dot1'></div>"
markup += "<div class='dot2'></div>"
markup += "<div class='dot3'></div>"
markup += "<div class='dot4'></div>"
markup += "<div class='dot5'></div>"
markup += "<div class='dot6'></div>"
markup += "<div class='dot7'></div>"
markup += "<div class='dot8'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "spinner"
markup += "<div class='indicator'></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "bars"
markup += "<div class='indicator'><div class='progress'></div></div>"
markup += "<div class='msg'>" + options.message + "</div>"
when "circles"
markup += "<div class='indicator'>"
markup += "<div class='circle1'></div>"
markup += "<div class='circle2'></div>"
markup += "<div class='circle3'></div>"
markup += "</div>"
markup += "<div class='msg'>" + options.message + "</div>"
else
(if (options.style is "loading") then markup += "<div class='progress'></div><div class='msg'>" + options.message + "</div>" else markup += "<div class='msg'>" + options.message + "</div>")
markup += "<a class='notify_close' href='javascript:;'><span>x</span></a>"
markup += "</div>"
markup += "</div>"
jQuery(this).notify "_open", options, markup
_open: (options, markup) ->
selector = undefined
options.beforeOpen()
jQuery(this).addClass("notify_container content_" + options.content)[options.append_type] markup
switch options.append_type
when "prepend", "append"
selector = jQuery(this).find(".notify[style*='display']")
when "after"
selector = jQuery(this).next(".notify[style*='display']")
when "before"
selector = jQuery(this).prev(".notify[style*='display']")
jQuery(selector).slideDown "fast"
options.afterOpen()
jQuery(this).notify "_setupEvents", options, selector
close: (options, animate) ->
selector = undefined
animate = (if typeof animate isnt "undefined" then animate else true)
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
switch options.append_type
when "prepend", "append", "before"
selector = jQuery(this).parents(".notify_container")
when "after"
selector = jQuery(this).prev(".notify_container")
if animate
jQuery(this).slideUp "fast", ->
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
jQuery(this).remove()
else
jQuery(selector).removeClass "content_hidden content_overlay content_visible notify_container"
return jQuery(this).remove()
options.afterClose()
close_all: (options) ->
options = jQuery.extend({}, methods.defaults, options)
options.beforeClose()
jQuery(".notify").slideUp "fast", ->
jQuery(this).closest("[class^=content_]").removeClass "content_hidden content_overlay content_visible"
jQuery(this).remove()
options.afterClose()
loading: (options) ->
options = jQuery.extend({}, methods.defaults, options)
_setupEvents: (options, selector) ->
unless options.sticky
t = setTimeout(->
jQuery(selector).notify "close", options
clearTimeout t
, options.duration)
jQuery(selector).click ->
clearTimeout t
jQuery(selector).notify "close", options
jQuery.fn.notify = (method) ->
if methods[method]
methods[method].apply this, Array::slice.call(arguments, 1)
else if typeof method is "object" or not method
methods.init.apply this, arguments
else
jQuery.error "Method " + method + " does not exist on jQuery.Notify"
) jQuery
|
[
{
"context": "e: \"Lasso Select\"\n icon: \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABIAAAAQCAYAAAAbBi9cAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAABx0RVh0U29mdHdhcmUAQWRvYmUgRmlyZXdvcmtzIENTNui8sowAAAGlSURBVDiNldNNiM1hFMfxz/3PHQqxoCgWYmNDk0jyUqwsuP/719xnPVkQStl4mYWpsVXKQkYpL1m4qWmyYElZkDLKyiSbk... | bokehjs/src/coffee/tool/gestures/lasso_select_tool.coffee | rothnic/bokeh | 1 | _ = require "underscore"
PolySelection = require "../../renderer/overlay/poly_selection"
SelectTool = require "./select_tool"
class LassoSelectToolView extends SelectTool.View
initialize: (options) ->
super(options)
@listenTo(@model, 'change:active', @_active_change)
@data = null
_active_change: () ->
if not @mget('active')
@_clear_overlay()
_keyup: (e) ->
if e.keyCode == 13
@_clear_overlay()
_pan_start: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data = {vx: [vx], vy: [vy]}
return null
_pan: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data.vx.push(vx)
@data.vy.push(vy)
overlay = @mget('overlay')
new_data = {}
new_data.vx = _.clone(@data.vx)
new_data.vy = _.clone(@data.vy)
overlay.set('data', new_data)
if @mget('select_every_mousemove')
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, false, append)
_pan_end: (e) ->
@_clear_overlay()
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, true, append)
_clear_overlay: () ->
@mget('overlay').set('data', null)
_select: (vx, vy, final, append) ->
geometry = {
type: 'poly'
vx: vx
vy: vy
}
for r in @mget('renderers')
ds = r.get('data_source')
sm = ds.get('selection_manager')
sm.select(@, @plot_view.renderers[r.id], geometry, final, append)
@_save_geometry(geometry, final, append)
return null
class LassoSelectTool extends SelectTool.Model
default_view: LassoSelectToolView
type: "LassoSelectTool"
tool_name: "Lasso Select"
icon: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABIAAAAQCAYAAAAbBi9cAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAABx0RVh0U29mdHdhcmUAQWRvYmUgRmlyZXdvcmtzIENTNui8sowAAAGlSURBVDiNldNNiM1hFMfxz/3PHQqxoCgWYmNDk0jyUqwsuP/719xnPVkQStl4mYWpsVXKQkYpL1m4qWmyYElZkDLKyiSbkdKYNBovo8m1uM+d/nPd/2TO6nn5nW+/c57zlCwQ9eGRBPuwF7uxAUswjme4V6tWxqFUAFiLXlSxDaswiz9RkqAL79Ffq1YeldoAXTiNs9iIn3iN0Zj0OULWYycORU1fKQdZh5s4ggncxX28DVk6W+D8MG5hrJQr5Ql68AADIUvfFTZvPuw5VpZjOVcjZCBk6eD/ACJkF7ZgMMEJVHB7kZDNeIhXGEpwEg3cWASkFy9i3vFatTJTxvJ4sAcvo3ANpkOW/sold+MgTsUKRlGbm6P68Mh59GvOSR2/cVTzqYfifisOYDtm4vmlkKVTjUZDC5TgIi5gBX7gG7qxVHNuluEjHuN6yNI3LadzoJz1HejDMXzP3X2Njp+GLJ1o79c/oBzwGgK+YHV0cyVk6eV27YKgCNuEKZzBubjeH7J0rAiUdAKFLP0QsnQSdzCp+Wl7Omlb0RGUi0+YRlmz+YXxF2YZkqkolYwKAAAAAElFTkSuQmCC"
event_type: "pan"
default_order: 12
initialize: (attrs, options) ->
super(attrs, options)
@set('overlay', new PolySelection.Model({line_width: 2}))
plot_renderers = @get('plot').get('renderers')
plot_renderers.push(@get('overlay'))
@get('plot').set('renderers', plot_renderers)
defaults: () ->
return _.extend({}, super(), {
select_every_mousemove: true
})
module.exports =
Model: LassoSelectTool
View: LassoSelectToolView | 7169 | _ = require "underscore"
PolySelection = require "../../renderer/overlay/poly_selection"
SelectTool = require "./select_tool"
class LassoSelectToolView extends SelectTool.View
initialize: (options) ->
super(options)
@listenTo(@model, 'change:active', @_active_change)
@data = null
_active_change: () ->
if not @mget('active')
@_clear_overlay()
_keyup: (e) ->
if e.keyCode == 13
@_clear_overlay()
_pan_start: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data = {vx: [vx], vy: [vy]}
return null
_pan: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data.vx.push(vx)
@data.vy.push(vy)
overlay = @mget('overlay')
new_data = {}
new_data.vx = _.clone(@data.vx)
new_data.vy = _.clone(@data.vy)
overlay.set('data', new_data)
if @mget('select_every_mousemove')
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, false, append)
_pan_end: (e) ->
@_clear_overlay()
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, true, append)
_clear_overlay: () ->
@mget('overlay').set('data', null)
_select: (vx, vy, final, append) ->
geometry = {
type: 'poly'
vx: vx
vy: vy
}
for r in @mget('renderers')
ds = r.get('data_source')
sm = ds.get('selection_manager')
sm.select(@, @plot_view.renderers[r.id], geometry, final, append)
@_save_geometry(geometry, final, append)
return null
class LassoSelectTool extends SelectTool.Model
default_view: LassoSelectToolView
type: "LassoSelectTool"
tool_name: "Lasso Select"
icon: "data:image/png;base64,i<KEY>Vk<KEY>eV<KEY>Zz<KEY>"
event_type: "pan"
default_order: 12
initialize: (attrs, options) ->
super(attrs, options)
@set('overlay', new PolySelection.Model({line_width: 2}))
plot_renderers = @get('plot').get('renderers')
plot_renderers.push(@get('overlay'))
@get('plot').set('renderers', plot_renderers)
defaults: () ->
return _.extend({}, super(), {
select_every_mousemove: true
})
module.exports =
Model: LassoSelectTool
View: LassoSelectToolView | true | _ = require "underscore"
PolySelection = require "../../renderer/overlay/poly_selection"
SelectTool = require "./select_tool"
class LassoSelectToolView extends SelectTool.View
initialize: (options) ->
super(options)
@listenTo(@model, 'change:active', @_active_change)
@data = null
_active_change: () ->
if not @mget('active')
@_clear_overlay()
_keyup: (e) ->
if e.keyCode == 13
@_clear_overlay()
_pan_start: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data = {vx: [vx], vy: [vy]}
return null
_pan: (e) ->
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
@data.vx.push(vx)
@data.vy.push(vy)
overlay = @mget('overlay')
new_data = {}
new_data.vx = _.clone(@data.vx)
new_data.vy = _.clone(@data.vy)
overlay.set('data', new_data)
if @mget('select_every_mousemove')
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, false, append)
_pan_end: (e) ->
@_clear_overlay()
append = e.srcEvent.shiftKey ? false
@_select(@data.vx, @data.vy, true, append)
_clear_overlay: () ->
@mget('overlay').set('data', null)
_select: (vx, vy, final, append) ->
geometry = {
type: 'poly'
vx: vx
vy: vy
}
for r in @mget('renderers')
ds = r.get('data_source')
sm = ds.get('selection_manager')
sm.select(@, @plot_view.renderers[r.id], geometry, final, append)
@_save_geometry(geometry, final, append)
return null
class LassoSelectTool extends SelectTool.Model
default_view: LassoSelectToolView
type: "LassoSelectTool"
tool_name: "Lasso Select"
icon: "data:image/png;base64,iPI:KEY:<KEY>END_PIVkPI:KEY:<KEY>END_PIeVPI:KEY:<KEY>END_PIZzPI:KEY:<KEY>END_PI"
event_type: "pan"
default_order: 12
initialize: (attrs, options) ->
super(attrs, options)
@set('overlay', new PolySelection.Model({line_width: 2}))
plot_renderers = @get('plot').get('renderers')
plot_renderers.push(@get('overlay'))
@get('plot').set('renderers', plot_renderers)
defaults: () ->
return _.extend({}, super(), {
select_every_mousemove: true
})
module.exports =
Model: LassoSelectTool
View: LassoSelectToolView |
[
{
"context": "##\n * Federated Wiki : Node Server\n *\n * Copyright Ward Cunningham and other contributors\n * Licensed under the MIT ",
"end": 67,
"score": 0.9998902082443237,
"start": 52,
"tag": "NAME",
"value": "Ward Cunningham"
},
{
"context": "nsed under the MIT license.\n * htt... | node_modules/wiki-server/lib/server.coffee | jpietrok-pnnl/wiki | 0 | ###
* Federated Wiki : Node Server
*
* Copyright Ward Cunningham and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt
###
# **server.coffee** is the main guts of the express version
# of (Smallest Federated Wiki)[https://github.com/WardCunningham/Smallest-Federated-Wiki].
# The CLI and Farm are just front ends
# for setting arguments, and spawning servers. In a complex system
# you would probably want to replace the CLI/Farm with your own code,
# and use server.coffee directly.
#
#### Dependencies ####
# anything not in the standard library is included in the repo, or
# can be installed with an:
# npm install
require('coffee-trace')
# Standard lib
fs = require 'fs'
path = require 'path'
http = require 'http'
# From npm
mkdirp = require 'mkdirp'
express = require 'express'
hbs = require 'express-hbs'
glob = require 'glob'
es = require 'event-stream'
JSONStream = require 'JSONStream'
async = require 'async'
f = require('flates')
sanitize = require 'sanitize-caja'
# Express 4 middleware
logger = require 'morgan'
cookieParser = require 'cookie-parser'
methodOverride = require 'method-override'
## session = require 'express-session'
sessions = require 'client-sessions'
bodyParser = require 'body-parser'
errorHandler = require 'errorhandler'
request = require 'request'
# Local files
random = require './random_id'
defargs = require './defaultargs'
wiki = require 'wiki-client/lib/wiki'
pluginsFactory = require './plugins'
sitemapFactory = require './sitemap'
render = (page) ->
return f.div({class: "twins"}, f.p('')) + '\n' +
f.div({class: "header"}, f.h1(
f.a({href: '/', style: 'text-decoration: none'},
f.img({height: '32px', src: '/favicon.png'})) +
' ' + (page.title))) + '\n' +
f.div {class: "story"},
page.story.map((story) ->
if story.type is 'paragraph'
f.div {class: "item paragraph"}, f.p(wiki.resolveLinks(story.text))
else if story.type is 'image'
f.div {class: "item image"},
f.img({class: "thumbnail", src: story.url}),
f.p(wiki.resolveLinks(story.text or story.caption or 'uploaded image'))
else if story.type is 'html'
f.div {class: "item html"},
f.p(wiki.resolveLinks(story.text or '', sanitize))
else f.div {class: "item"}, f.p(wiki.resolveLinks(story.text or ''))
).join('\n')
# Set export objects for node and coffee to a function that generates a sfw server.
module.exports = exports = (argv) ->
# Create the main application object, app.
app = express()
# remove x-powered-by header
app.disable('x-powered-by')
# defaultargs.coffee exports a function that takes the argv object
# that is passed in and then does its
# best to supply sane defaults for any arguments that are missing.
argv = defargs(argv)
app.startOpts = argv
log = (stuff...) ->
console.log stuff if argv.debug
loga = (stuff...) ->
console.log stuff
ourErrorHandler = (req, res, next) ->
fired = false
res.e = (error, status) ->
if !fired
fired = true
res.statusCode = status or 500
res.end 'Server ' + error
log "Res sent:", res.statusCode, error
else
log "Already fired", error
next()
# Require the database adapter and initialize it with options.
app.pagehandler = pagehandler = require(argv.database.type)(argv)
# Require the sitemap adapter and initialize it with options.
app.sitemaphandler = sitemaphandler = sitemapFactory(argv)
# Require the security adapter and initialize it with options.
app.securityhandler = securityhandler = require(argv.security_type)(log, loga, argv)
# If the site is owned, owner will contain the name of the owner
owner = ''
# If the user is logged in, user will contain their identity
user = ''
# Called from authentication when the site is claimed,
# to update the name of the owner held here.
updateOwner = (id) ->
owner = id
#### Middleware ####
#
# Allow json to be got cross origin.
cors = (req, res, next) ->
res.header('Access-Control-Allow-Origin', '*')
next()
remoteGet = (remote, slug, cb) ->
[host, port] = remote.split(':')
getopts = {
host: host
port: port or 80
path: "/#{slug}.json"
}
# TODO: This needs more robust error handling, just trying to
# keep it from taking down the server.
http.get(getopts, (resp) ->
responsedata = ''
resp.on 'data', (chunk) ->
responsedata += chunk
resp.on 'error', (e) ->
cb(e, 'Page not found', 404)
resp.on 'end', ->
if resp.statusCode == 404
cb(null, 'Page not found', 404)
else if responsedata
cb(null, JSON.parse(responsedata), resp.statusCode)
else
cb(null, 'Page not found', 404)
).on 'error', (e) ->
cb(e, 'Page not found', 404)
#### Express configuration ####
# Set up all the standard express server options,
# including hbs to use handlebars/mustache templates
# saved with a .html extension, and no layout.
app.set('views', path.join(__dirname, '..', '..', 'wiki-client', '/views'))
app.set('view engine', 'html')
app.engine('html', hbs.express4())
app.set('view options', layout: false)
# use logger, at least in development, probably needs a param to configure (or turn off).
# use stream to direct to somewhere other than stdout.
app.use(logger('tiny'))
app.use(cookieParser())
app.use(bodyParser.json({ limit: argv.uploadLimit}))
app.use(bodyParser.urlencoded({ extended: true, limit: argv.uploadLimit}))
app.use(methodOverride())
cookieValue = {
httpOnly: true
}
cookieValue['domain'] = argv.wiki_domain if argv.wiki_domain
# use secureProxy as TLS is terminated in outside the node process
cookieValue['secureProxy'] = true if argv.secure_cookie
app.use(sessions({
cookieName: 'wikiSession',
requestKey: 'session',
secret: argv.cookieSecret,
# make the session session_duration days long
duration: argv.session_duration * 24 * 60 * 60 * 1000,
# add 12 hours to session if less than 12 hours to expiry
activeDuration: 24 * 60 * 60 * 1000,
cookie: cookieValue
}))
app.use(ourErrorHandler)
# Add static route to the client
app.use(express.static(argv.client))
# Add static routes to the plugins client.
glob "wiki-plugin-*/client", {cwd: argv.packageDir}, (e, plugins) ->
plugins.map (plugin) ->
pluginName = plugin.slice(12, -7)
pluginPath = '/plugins/' + pluginName
app.use(pluginPath, express.static(path.join(argv.packageDir, plugin)))
# Add static routes to the security client.
if argv.security != './security'
app.use('/security', express.static(path.join(argv.packageDir, argv.security_type, 'client')))
##### Set up standard environments. #####
# In dev mode turn on console.log debugging as well as showing the stack on err.
if 'development' == app.get('env')
app.use(errorHandler())
argv.debug = console? and true
# Show all of the options a server is using.
log argv
#### Routes ####
# Routes currently make up the bulk of the Express port of
# Smallest Federated Wiki. Most routes use literal names,
# or regexes to match, and then access req.params directly.
##### Redirects #####
# Common redirects that may get used throughout the routes.
index = argv.home + '.html'
oops = '/oops'
##### Get routes #####
# Routes have mostly been kept together by http verb, with the exception
# of the openID related routes which are at the end together.
# Main route for initial contact. Allows us to
# link into a specific set of pages, local and remote.
# Can also be handled by the client, but it also sets up
# the login status, and related footer html, which the client
# relies on to know if it is logged in or not.
app.get ///^((/[a-zA-Z0-9:.-]+/[a-z0-9-]+(_rev\d+)?)+)/?$///, (req, res, next) ->
urlPages = (i for i in req.params[0].split('/') by 2)[1..]
urlLocs = (j for j in req.params[0].split('/')[1..] by 2)
if ['plugin', 'auth'].indexOf(urlLocs[0]) > -1
return next()
title = urlPages[..].pop().replace(/-+/g,' ')
user = securityhandler.getUser(req)
info = {
title
pages: []
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
for page, idx in urlPages
if urlLocs[idx] is 'view'
pageDiv = {page}
else
pageDiv = {page, origin: """data-site=#{urlLocs[idx]}"""}
info.pages.push(pageDiv)
res.render('static.html', info)
app.get ///([a-z0-9-]+)\.html$///, (req, res, next) ->
slug = req.params[0]
log(slug)
if slug is 'runtests'
return next()
pagehandler.get slug, (e, page, status) ->
if e then return res.e e
if status is 404
return res.status(status).send(page)
page.title ||= slug.replace(/-+/g,' ')
page.story ||= []
user = securityhandler.getUser(req)
info = {
title: page.title
pages: [
page: slug
generated: """data-server-generated=true"""
story: render(page)
]
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
res.render('static.html', info)
app.get ///system/factories.json///, (req, res) ->
res.status(200)
res.header('Content-Type', 'application/json')
# Plugins are located in packages in argv.packageDir, with package names of the form wiki-plugin-*
glob path.join(argv.packageDir, 'wiki-plugin-*', 'factory.json'), (e, files) ->
if e then return res.e(e)
files = files.map (file) ->
return fs.createReadStream(file).on('error', res.e).pipe(JSONStream.parse())
es.concat.apply(null, files)
.on('error', res.e)
.pipe(JSONStream.stringify())
.pipe(res)
###### Json Routes ######
# Handle fetching local and remote json pages.
# Local pages are handled by the pagehandler module.
app.get ///^/([a-z0-9-]+)\.json$///, cors, (req, res) ->
file = req.params[0]
pagehandler.get file, (e, page, status) ->
if e then return res.e e
res.status(status or 200).send(page)
# Remote pages use the http client to retrieve the page
# and sends it to the client. TODO: consider caching remote pages locally.
app.get ///^/remote/([a-zA-Z0-9:\.-]+)/([a-z0-9-]+)\.json$///, (req, res) ->
remoteGet req.params[0], req.params[1], (e, page, status) ->
if e
log "remoteGet error:", e
return res.e e
res.status(status or 200).send(page)
###### Theme Routes ######
# If themes doesn't exist send 404 and let the client
# deal with it.
app.get /^\/theme\/(\w+\.\w+)$/, cors, (req,res) ->
res.sendFile(path.join(argv.status, 'theme', req.params[0]), (e) ->
if (e)
# swallow the error if the theme does not exist...
if req.path is '/theme/style.css'
res.set('Content-Type', 'text/css')
res.send('')
else
res.sendStatus(404)
)
###### Favicon Routes ######
# If favLoc doesn't exist send 404 and let the client
# deal with it.
favLoc = path.join(argv.status, 'favicon.png')
app.get '/favicon.png', cors, (req,res) ->
res.sendFile(favLoc)
authorized = (req, res, next) ->
if securityhandler.isAuthorized(req)
next()
else
console.log 'rejecting', req.path
res.sendStatus(403)
# Accept favicon image posted to the server, and if it does not already exist
# save it.
app.post '/favicon.png', authorized, (req, res) ->
favicon = req.body.image.replace(///^data:image/png;base64,///, "")
buf = new Buffer(favicon, 'base64')
fs.exists argv.status, (exists) ->
if exists
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
else
mkdirp argv.status, ->
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
# Redirect remote favicons to the server they are needed from.
app.get ///^/remote/([a-zA-Z0-9:\.-]+/favicon.png)$///, (req, res) ->
remotefav = "http://#{req.params[0]}"
res.redirect(remotefav)
###### Meta Routes ######
# Send an array of pages in the database via json
app.get '/system/slugs.json', cors, (req, res) ->
fs.readdir argv.db, (e, files) ->
if e then return res.e e
res.send(files)
# Returns a list of installed plugins. (does this get called anymore!)
app.get '/system/plugins.json', cors, (req, res) ->
glob "wiki-plugin-*", {cwd: argv.packageDir}, (e, files) ->
if e then return res.e e
# extract the plugin name from the name of the directory it's installed in
files = files.map (file) -> file.slice(12)
res.send(files)
#
sitemapLoc = path.join(argv.status, 'sitemap.json')
app.get '/system/sitemap.json', cors, (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(sitemapLoc)
else
# only createSitemap if we are not already creating one
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
# wait for the sitemap file to be written, before sending
sitemaphandler.once 'finished', ->
res.sendFile(sitemapLoc)
xmlSitemapLoc = path.join(argv.status, 'sitemap.xml')
app.get '/sitemap.xml', (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(xmlSitemapLoc)
else
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
sitemaphandler.once 'finished', ->
res.sendFile(xmlSitemapLoc)
app.get '/system/export.json', cors, (req, res) ->
pagehandler.pages (e, sitemap) ->
return res.e(e) if e
async.map(
sitemap,
(stub, done) ->
pagehandler.get(stub.slug, (error, page) ->
return done(e) if e
done(null, {slug: stub.slug, page})
)
,
(e, pages) ->
return res.e(e) if e
res.json(pages.reduce( (dict, combined) ->
dict[combined.slug] = combined.page
dict
, {}))
)
##### Define security routes #####
securityhandler.defineRoutes app, cors, updateOwner
##### Proxy routes #####
app.get '/proxy/*', authorized, (req, res) ->
pathParts = req.path.split('/')
remoteHost = pathParts[2]
pathParts.splice(0,3)
remoteResource = pathParts.join('/')
requestURL = 'http://' + remoteHost + '/' + remoteResource
console.log("PROXY Request: ", requestURL)
if requestURL.endsWith('.json') or requestURL.endsWith('.png')
requestOptions = {
host: remoteHost
port: 80
path: remoteResource
}
try
request
.get(requestURL, requestOptions)
.on('error', (err) ->
console.log("ERROR: Request ", requestURL, err))
.pipe(res)
catch error
console.log "PROXY Error", error
res.status(500).end()
else
res.status(400).end()
##### Put routes #####
app.put /^\/page\/([a-z0-9-]+)\/action$/i, authorized, (req, res) ->
action = JSON.parse(req.body.action)
# Handle all of the possible actions to be taken on a page,
actionCB = (e, page, status) ->
#if e then return res.e e
if status is 404
res.status(status).send(page)
# Using Coffee-Scripts implicit returns we assign page.story to the
# result of a list comprehension by way of a switch expression.
try
page.story = switch action.type
when 'move'
action.order.map (id) ->
page.story.filter((para) ->
id == para.id
)[0] or throw('Ignoring move. Try reload.')
when 'add'
idx = page.story.map((para) -> para.id).indexOf(action.after) + 1
page.story.splice(idx, 0, action.item)
page.story
when 'remove'
page.story.filter (para) ->
para?.id != action.id
when 'edit'
page.story.map (para) ->
if para.id is action.id
action.item
else
para
when 'create', 'fork'
page.story or []
else
log "Unfamiliar action:", action
#page.story
throw('Unfamiliar action ignored')
catch e
return res.e e
# Add a blank journal if it does not exist.
# And add what happened to the journal.
if not page.journal
page.journal = []
if action.fork
page.journal.push({type: "fork", site: action.fork})
delete action.fork
page.journal.push(action)
pagehandler.put req.params[0], page, (e) ->
if e then return res.e e
res.send('ok')
# log 'saved'
# update sitemap
sitemaphandler.update(req.params[0], page)
# log action
# If the action is a fork, get the page from the remote server,
# otherwise ask pagehandler for it.
if action.fork
remoteGet(action.fork, req.params[0], actionCB)
else if action.type is 'create'
# Prevent attempt to write circular structure
itemCopy = JSON.parse(JSON.stringify(action.item))
pagehandler.get req.params[0], (e, page, status) ->
if e then return actionCB(e)
unless status is 404
res.status(409).send('Page already exists.')
else
actionCB(null, itemCopy)
else if action.type == 'fork'
if action.item # push
itemCopy = JSON.parse(JSON.stringify(action.item))
delete action.item
actionCB(null, itemCopy)
else # pull
remoteGet(action.site, req.params[0], actionCB)
else
pagehandler.get(req.params[0], actionCB)
# Return the oops page when login fails.
app.get '/oops', (req, res) ->
res.statusCode = 403
res.render('oops.html', {msg:'This is not your wiki!'})
# Traditional request to / redirects to index :)
app.get '/', (req, res) ->
res.redirect(index)
#### Start the server ####
# Wait to make sure owner is known before listening.
securityhandler.retrieveOwner (e) ->
# Throw if you can't find the initial owner
if e then throw e
owner = securityhandler.getOwner()
console.log "owner: " + owner
app.emit 'owner-set'
app.on 'running-serv', (server) ->
### Plugins ###
# Should replace most WebSocketServers below.
plugins = pluginsFactory(argv)
plugins.startServers({argv, app})
### Sitemap ###
# create sitemap at start-up
sitemaphandler.createSitemap(pagehandler)
# Return app when called, so that it can be watched for events and shutdown with .close() externally.
app
| 55073 | ###
* Federated Wiki : Node Server
*
* Copyright <NAME> and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt
###
# **server.coffee** is the main guts of the express version
# of (Smallest Federated Wiki)[https://github.com/WardCunningham/Smallest-Federated-Wiki].
# The CLI and Farm are just front ends
# for setting arguments, and spawning servers. In a complex system
# you would probably want to replace the CLI/Farm with your own code,
# and use server.coffee directly.
#
#### Dependencies ####
# anything not in the standard library is included in the repo, or
# can be installed with an:
# npm install
require('coffee-trace')
# Standard lib
fs = require 'fs'
path = require 'path'
http = require 'http'
# From npm
mkdirp = require 'mkdirp'
express = require 'express'
hbs = require 'express-hbs'
glob = require 'glob'
es = require 'event-stream'
JSONStream = require 'JSONStream'
async = require 'async'
f = require('flates')
sanitize = require 'sanitize-caja'
# Express 4 middleware
logger = require 'morgan'
cookieParser = require 'cookie-parser'
methodOverride = require 'method-override'
## session = require 'express-session'
sessions = require 'client-sessions'
bodyParser = require 'body-parser'
errorHandler = require 'errorhandler'
request = require 'request'
# Local files
random = require './random_id'
defargs = require './defaultargs'
wiki = require 'wiki-client/lib/wiki'
pluginsFactory = require './plugins'
sitemapFactory = require './sitemap'
render = (page) ->
return f.div({class: "twins"}, f.p('')) + '\n' +
f.div({class: "header"}, f.h1(
f.a({href: '/', style: 'text-decoration: none'},
f.img({height: '32px', src: '/favicon.png'})) +
' ' + (page.title))) + '\n' +
f.div {class: "story"},
page.story.map((story) ->
if story.type is 'paragraph'
f.div {class: "item paragraph"}, f.p(wiki.resolveLinks(story.text))
else if story.type is 'image'
f.div {class: "item image"},
f.img({class: "thumbnail", src: story.url}),
f.p(wiki.resolveLinks(story.text or story.caption or 'uploaded image'))
else if story.type is 'html'
f.div {class: "item html"},
f.p(wiki.resolveLinks(story.text or '', sanitize))
else f.div {class: "item"}, f.p(wiki.resolveLinks(story.text or ''))
).join('\n')
# Set export objects for node and coffee to a function that generates a sfw server.
module.exports = exports = (argv) ->
# Create the main application object, app.
app = express()
# remove x-powered-by header
app.disable('x-powered-by')
# defaultargs.coffee exports a function that takes the argv object
# that is passed in and then does its
# best to supply sane defaults for any arguments that are missing.
argv = defargs(argv)
app.startOpts = argv
log = (stuff...) ->
console.log stuff if argv.debug
loga = (stuff...) ->
console.log stuff
ourErrorHandler = (req, res, next) ->
fired = false
res.e = (error, status) ->
if !fired
fired = true
res.statusCode = status or 500
res.end 'Server ' + error
log "Res sent:", res.statusCode, error
else
log "Already fired", error
next()
# Require the database adapter and initialize it with options.
app.pagehandler = pagehandler = require(argv.database.type)(argv)
# Require the sitemap adapter and initialize it with options.
app.sitemaphandler = sitemaphandler = sitemapFactory(argv)
# Require the security adapter and initialize it with options.
app.securityhandler = securityhandler = require(argv.security_type)(log, loga, argv)
# If the site is owned, owner will contain the name of the owner
owner = ''
# If the user is logged in, user will contain their identity
user = ''
# Called from authentication when the site is claimed,
# to update the name of the owner held here.
updateOwner = (id) ->
owner = id
#### Middleware ####
#
# Allow json to be got cross origin.
cors = (req, res, next) ->
res.header('Access-Control-Allow-Origin', '*')
next()
remoteGet = (remote, slug, cb) ->
[host, port] = remote.split(':')
getopts = {
host: host
port: port or 80
path: "/#{slug}.json"
}
# TODO: This needs more robust error handling, just trying to
# keep it from taking down the server.
http.get(getopts, (resp) ->
responsedata = ''
resp.on 'data', (chunk) ->
responsedata += chunk
resp.on 'error', (e) ->
cb(e, 'Page not found', 404)
resp.on 'end', ->
if resp.statusCode == 404
cb(null, 'Page not found', 404)
else if responsedata
cb(null, JSON.parse(responsedata), resp.statusCode)
else
cb(null, 'Page not found', 404)
).on 'error', (e) ->
cb(e, 'Page not found', 404)
#### Express configuration ####
# Set up all the standard express server options,
# including hbs to use handlebars/mustache templates
# saved with a .html extension, and no layout.
app.set('views', path.join(__dirname, '..', '..', 'wiki-client', '/views'))
app.set('view engine', 'html')
app.engine('html', hbs.express4())
app.set('view options', layout: false)
# use logger, at least in development, probably needs a param to configure (or turn off).
# use stream to direct to somewhere other than stdout.
app.use(logger('tiny'))
app.use(cookieParser())
app.use(bodyParser.json({ limit: argv.uploadLimit}))
app.use(bodyParser.urlencoded({ extended: true, limit: argv.uploadLimit}))
app.use(methodOverride())
cookieValue = {
httpOnly: true
}
cookieValue['domain'] = argv.wiki_domain if argv.wiki_domain
# use secureProxy as TLS is terminated in outside the node process
cookieValue['secureProxy'] = true if argv.secure_cookie
app.use(sessions({
cookieName: 'wikiSession',
requestKey: '<KEY>',
secret: argv.cookieSecret,
# make the session session_duration days long
duration: argv.session_duration * 24 * 60 * 60 * 1000,
# add 12 hours to session if less than 12 hours to expiry
activeDuration: 24 * 60 * 60 * 1000,
cookie: cookieValue
}))
app.use(ourErrorHandler)
# Add static route to the client
app.use(express.static(argv.client))
# Add static routes to the plugins client.
glob "wiki-plugin-*/client", {cwd: argv.packageDir}, (e, plugins) ->
plugins.map (plugin) ->
pluginName = plugin.slice(12, -7)
pluginPath = '/plugins/' + pluginName
app.use(pluginPath, express.static(path.join(argv.packageDir, plugin)))
# Add static routes to the security client.
if argv.security != './security'
app.use('/security', express.static(path.join(argv.packageDir, argv.security_type, 'client')))
##### Set up standard environments. #####
# In dev mode turn on console.log debugging as well as showing the stack on err.
if 'development' == app.get('env')
app.use(errorHandler())
argv.debug = console? and true
# Show all of the options a server is using.
log argv
#### Routes ####
# Routes currently make up the bulk of the Express port of
# Smallest Federated Wiki. Most routes use literal names,
# or regexes to match, and then access req.params directly.
##### Redirects #####
# Common redirects that may get used throughout the routes.
index = argv.home + '.html'
oops = '/oops'
##### Get routes #####
# Routes have mostly been kept together by http verb, with the exception
# of the openID related routes which are at the end together.
# Main route for initial contact. Allows us to
# link into a specific set of pages, local and remote.
# Can also be handled by the client, but it also sets up
# the login status, and related footer html, which the client
# relies on to know if it is logged in or not.
app.get ///^((/[a-zA-Z0-9:.-]+/[a-z0-9-]+(_rev\d+)?)+)/?$///, (req, res, next) ->
urlPages = (i for i in req.params[0].split('/') by 2)[1..]
urlLocs = (j for j in req.params[0].split('/')[1..] by 2)
if ['plugin', 'auth'].indexOf(urlLocs[0]) > -1
return next()
title = urlPages[..].pop().replace(/-+/g,' ')
user = securityhandler.getUser(req)
info = {
title
pages: []
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
for page, idx in urlPages
if urlLocs[idx] is 'view'
pageDiv = {page}
else
pageDiv = {page, origin: """data-site=#{urlLocs[idx]}"""}
info.pages.push(pageDiv)
res.render('static.html', info)
app.get ///([a-z0-9-]+)\.html$///, (req, res, next) ->
slug = req.params[0]
log(slug)
if slug is 'runtests'
return next()
pagehandler.get slug, (e, page, status) ->
if e then return res.e e
if status is 404
return res.status(status).send(page)
page.title ||= slug.replace(/-+/g,' ')
page.story ||= []
user = securityhandler.getUser(req)
info = {
title: page.title
pages: [
page: slug
generated: """data-server-generated=true"""
story: render(page)
]
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
res.render('static.html', info)
app.get ///system/factories.json///, (req, res) ->
res.status(200)
res.header('Content-Type', 'application/json')
# Plugins are located in packages in argv.packageDir, with package names of the form wiki-plugin-*
glob path.join(argv.packageDir, 'wiki-plugin-*', 'factory.json'), (e, files) ->
if e then return res.e(e)
files = files.map (file) ->
return fs.createReadStream(file).on('error', res.e).pipe(JSONStream.parse())
es.concat.apply(null, files)
.on('error', res.e)
.pipe(JSONStream.stringify())
.pipe(res)
###### Json Routes ######
# Handle fetching local and remote json pages.
# Local pages are handled by the pagehandler module.
app.get ///^/([a-z0-9-]+)\.json$///, cors, (req, res) ->
file = req.params[0]
pagehandler.get file, (e, page, status) ->
if e then return res.e e
res.status(status or 200).send(page)
# Remote pages use the http client to retrieve the page
# and sends it to the client. TODO: consider caching remote pages locally.
app.get ///^/remote/([a-zA-Z0-9:\.-]+)/([a-z0-9-]+)\.json$///, (req, res) ->
remoteGet req.params[0], req.params[1], (e, page, status) ->
if e
log "remoteGet error:", e
return res.e e
res.status(status or 200).send(page)
###### Theme Routes ######
# If themes doesn't exist send 404 and let the client
# deal with it.
app.get /^\/theme\/(\w+\.\w+)$/, cors, (req,res) ->
res.sendFile(path.join(argv.status, 'theme', req.params[0]), (e) ->
if (e)
# swallow the error if the theme does not exist...
if req.path is '/theme/style.css'
res.set('Content-Type', 'text/css')
res.send('')
else
res.sendStatus(404)
)
###### Favicon Routes ######
# If favLoc doesn't exist send 404 and let the client
# deal with it.
favLoc = path.join(argv.status, 'favicon.png')
app.get '/favicon.png', cors, (req,res) ->
res.sendFile(favLoc)
authorized = (req, res, next) ->
if securityhandler.isAuthorized(req)
next()
else
console.log 'rejecting', req.path
res.sendStatus(403)
# Accept favicon image posted to the server, and if it does not already exist
# save it.
app.post '/favicon.png', authorized, (req, res) ->
favicon = req.body.image.replace(///^data:image/png;base64,///, "")
buf = new Buffer(favicon, 'base64')
fs.exists argv.status, (exists) ->
if exists
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
else
mkdirp argv.status, ->
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
# Redirect remote favicons to the server they are needed from.
app.get ///^/remote/([a-zA-Z0-9:\.-]+/favicon.png)$///, (req, res) ->
remotefav = "http://#{req.params[0]}"
res.redirect(remotefav)
###### Meta Routes ######
# Send an array of pages in the database via json
app.get '/system/slugs.json', cors, (req, res) ->
fs.readdir argv.db, (e, files) ->
if e then return res.e e
res.send(files)
# Returns a list of installed plugins. (does this get called anymore!)
app.get '/system/plugins.json', cors, (req, res) ->
glob "wiki-plugin-*", {cwd: argv.packageDir}, (e, files) ->
if e then return res.e e
# extract the plugin name from the name of the directory it's installed in
files = files.map (file) -> file.slice(12)
res.send(files)
#
sitemapLoc = path.join(argv.status, 'sitemap.json')
app.get '/system/sitemap.json', cors, (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(sitemapLoc)
else
# only createSitemap if we are not already creating one
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
# wait for the sitemap file to be written, before sending
sitemaphandler.once 'finished', ->
res.sendFile(sitemapLoc)
xmlSitemapLoc = path.join(argv.status, 'sitemap.xml')
app.get '/sitemap.xml', (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(xmlSitemapLoc)
else
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
sitemaphandler.once 'finished', ->
res.sendFile(xmlSitemapLoc)
app.get '/system/export.json', cors, (req, res) ->
pagehandler.pages (e, sitemap) ->
return res.e(e) if e
async.map(
sitemap,
(stub, done) ->
pagehandler.get(stub.slug, (error, page) ->
return done(e) if e
done(null, {slug: stub.slug, page})
)
,
(e, pages) ->
return res.e(e) if e
res.json(pages.reduce( (dict, combined) ->
dict[combined.slug] = combined.page
dict
, {}))
)
##### Define security routes #####
securityhandler.defineRoutes app, cors, updateOwner
##### Proxy routes #####
app.get '/proxy/*', authorized, (req, res) ->
pathParts = req.path.split('/')
remoteHost = pathParts[2]
pathParts.splice(0,3)
remoteResource = pathParts.join('/')
requestURL = 'http://' + remoteHost + '/' + remoteResource
console.log("PROXY Request: ", requestURL)
if requestURL.endsWith('.json') or requestURL.endsWith('.png')
requestOptions = {
host: remoteHost
port: 80
path: remoteResource
}
try
request
.get(requestURL, requestOptions)
.on('error', (err) ->
console.log("ERROR: Request ", requestURL, err))
.pipe(res)
catch error
console.log "PROXY Error", error
res.status(500).end()
else
res.status(400).end()
##### Put routes #####
app.put /^\/page\/([a-z0-9-]+)\/action$/i, authorized, (req, res) ->
action = JSON.parse(req.body.action)
# Handle all of the possible actions to be taken on a page,
actionCB = (e, page, status) ->
#if e then return res.e e
if status is 404
res.status(status).send(page)
# Using Coffee-Scripts implicit returns we assign page.story to the
# result of a list comprehension by way of a switch expression.
try
page.story = switch action.type
when 'move'
action.order.map (id) ->
page.story.filter((para) ->
id == para.id
)[0] or throw('Ignoring move. Try reload.')
when 'add'
idx = page.story.map((para) -> para.id).indexOf(action.after) + 1
page.story.splice(idx, 0, action.item)
page.story
when 'remove'
page.story.filter (para) ->
para?.id != action.id
when 'edit'
page.story.map (para) ->
if para.id is action.id
action.item
else
para
when 'create', 'fork'
page.story or []
else
log "Unfamiliar action:", action
#page.story
throw('Unfamiliar action ignored')
catch e
return res.e e
# Add a blank journal if it does not exist.
# And add what happened to the journal.
if not page.journal
page.journal = []
if action.fork
page.journal.push({type: "fork", site: action.fork})
delete action.fork
page.journal.push(action)
pagehandler.put req.params[0], page, (e) ->
if e then return res.e e
res.send('ok')
# log 'saved'
# update sitemap
sitemaphandler.update(req.params[0], page)
# log action
# If the action is a fork, get the page from the remote server,
# otherwise ask pagehandler for it.
if action.fork
remoteGet(action.fork, req.params[0], actionCB)
else if action.type is 'create'
# Prevent attempt to write circular structure
itemCopy = JSON.parse(JSON.stringify(action.item))
pagehandler.get req.params[0], (e, page, status) ->
if e then return actionCB(e)
unless status is 404
res.status(409).send('Page already exists.')
else
actionCB(null, itemCopy)
else if action.type == 'fork'
if action.item # push
itemCopy = JSON.parse(JSON.stringify(action.item))
delete action.item
actionCB(null, itemCopy)
else # pull
remoteGet(action.site, req.params[0], actionCB)
else
pagehandler.get(req.params[0], actionCB)
# Return the oops page when login fails.
app.get '/oops', (req, res) ->
res.statusCode = 403
res.render('oops.html', {msg:'This is not your wiki!'})
# Traditional request to / redirects to index :)
app.get '/', (req, res) ->
res.redirect(index)
#### Start the server ####
# Wait to make sure owner is known before listening.
securityhandler.retrieveOwner (e) ->
# Throw if you can't find the initial owner
if e then throw e
owner = securityhandler.getOwner()
console.log "owner: " + owner
app.emit 'owner-set'
app.on 'running-serv', (server) ->
### Plugins ###
# Should replace most WebSocketServers below.
plugins = pluginsFactory(argv)
plugins.startServers({argv, app})
### Sitemap ###
# create sitemap at start-up
sitemaphandler.createSitemap(pagehandler)
# Return app when called, so that it can be watched for events and shutdown with .close() externally.
app
| true | ###
* Federated Wiki : Node Server
*
* Copyright PI:NAME:<NAME>END_PI and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt
###
# **server.coffee** is the main guts of the express version
# of (Smallest Federated Wiki)[https://github.com/WardCunningham/Smallest-Federated-Wiki].
# The CLI and Farm are just front ends
# for setting arguments, and spawning servers. In a complex system
# you would probably want to replace the CLI/Farm with your own code,
# and use server.coffee directly.
#
#### Dependencies ####
# anything not in the standard library is included in the repo, or
# can be installed with an:
# npm install
require('coffee-trace')
# Standard lib
fs = require 'fs'
path = require 'path'
http = require 'http'
# From npm
mkdirp = require 'mkdirp'
express = require 'express'
hbs = require 'express-hbs'
glob = require 'glob'
es = require 'event-stream'
JSONStream = require 'JSONStream'
async = require 'async'
f = require('flates')
sanitize = require 'sanitize-caja'
# Express 4 middleware
logger = require 'morgan'
cookieParser = require 'cookie-parser'
methodOverride = require 'method-override'
## session = require 'express-session'
sessions = require 'client-sessions'
bodyParser = require 'body-parser'
errorHandler = require 'errorhandler'
request = require 'request'
# Local files
random = require './random_id'
defargs = require './defaultargs'
wiki = require 'wiki-client/lib/wiki'
pluginsFactory = require './plugins'
sitemapFactory = require './sitemap'
render = (page) ->
return f.div({class: "twins"}, f.p('')) + '\n' +
f.div({class: "header"}, f.h1(
f.a({href: '/', style: 'text-decoration: none'},
f.img({height: '32px', src: '/favicon.png'})) +
' ' + (page.title))) + '\n' +
f.div {class: "story"},
page.story.map((story) ->
if story.type is 'paragraph'
f.div {class: "item paragraph"}, f.p(wiki.resolveLinks(story.text))
else if story.type is 'image'
f.div {class: "item image"},
f.img({class: "thumbnail", src: story.url}),
f.p(wiki.resolveLinks(story.text or story.caption or 'uploaded image'))
else if story.type is 'html'
f.div {class: "item html"},
f.p(wiki.resolveLinks(story.text or '', sanitize))
else f.div {class: "item"}, f.p(wiki.resolveLinks(story.text or ''))
).join('\n')
# Set export objects for node and coffee to a function that generates a sfw server.
module.exports = exports = (argv) ->
# Create the main application object, app.
app = express()
# remove x-powered-by header
app.disable('x-powered-by')
# defaultargs.coffee exports a function that takes the argv object
# that is passed in and then does its
# best to supply sane defaults for any arguments that are missing.
argv = defargs(argv)
app.startOpts = argv
log = (stuff...) ->
console.log stuff if argv.debug
loga = (stuff...) ->
console.log stuff
ourErrorHandler = (req, res, next) ->
fired = false
res.e = (error, status) ->
if !fired
fired = true
res.statusCode = status or 500
res.end 'Server ' + error
log "Res sent:", res.statusCode, error
else
log "Already fired", error
next()
# Require the database adapter and initialize it with options.
app.pagehandler = pagehandler = require(argv.database.type)(argv)
# Require the sitemap adapter and initialize it with options.
app.sitemaphandler = sitemaphandler = sitemapFactory(argv)
# Require the security adapter and initialize it with options.
app.securityhandler = securityhandler = require(argv.security_type)(log, loga, argv)
# If the site is owned, owner will contain the name of the owner
owner = ''
# If the user is logged in, user will contain their identity
user = ''
# Called from authentication when the site is claimed,
# to update the name of the owner held here.
updateOwner = (id) ->
owner = id
#### Middleware ####
#
# Allow json to be got cross origin.
cors = (req, res, next) ->
res.header('Access-Control-Allow-Origin', '*')
next()
remoteGet = (remote, slug, cb) ->
[host, port] = remote.split(':')
getopts = {
host: host
port: port or 80
path: "/#{slug}.json"
}
# TODO: This needs more robust error handling, just trying to
# keep it from taking down the server.
http.get(getopts, (resp) ->
responsedata = ''
resp.on 'data', (chunk) ->
responsedata += chunk
resp.on 'error', (e) ->
cb(e, 'Page not found', 404)
resp.on 'end', ->
if resp.statusCode == 404
cb(null, 'Page not found', 404)
else if responsedata
cb(null, JSON.parse(responsedata), resp.statusCode)
else
cb(null, 'Page not found', 404)
).on 'error', (e) ->
cb(e, 'Page not found', 404)
#### Express configuration ####
# Set up all the standard express server options,
# including hbs to use handlebars/mustache templates
# saved with a .html extension, and no layout.
app.set('views', path.join(__dirname, '..', '..', 'wiki-client', '/views'))
app.set('view engine', 'html')
app.engine('html', hbs.express4())
app.set('view options', layout: false)
# use logger, at least in development, probably needs a param to configure (or turn off).
# use stream to direct to somewhere other than stdout.
app.use(logger('tiny'))
app.use(cookieParser())
app.use(bodyParser.json({ limit: argv.uploadLimit}))
app.use(bodyParser.urlencoded({ extended: true, limit: argv.uploadLimit}))
app.use(methodOverride())
cookieValue = {
httpOnly: true
}
cookieValue['domain'] = argv.wiki_domain if argv.wiki_domain
# use secureProxy as TLS is terminated in outside the node process
cookieValue['secureProxy'] = true if argv.secure_cookie
app.use(sessions({
cookieName: 'wikiSession',
requestKey: 'PI:KEY:<KEY>END_PI',
secret: argv.cookieSecret,
# make the session session_duration days long
duration: argv.session_duration * 24 * 60 * 60 * 1000,
# add 12 hours to session if less than 12 hours to expiry
activeDuration: 24 * 60 * 60 * 1000,
cookie: cookieValue
}))
app.use(ourErrorHandler)
# Add static route to the client
app.use(express.static(argv.client))
# Add static routes to the plugins client.
glob "wiki-plugin-*/client", {cwd: argv.packageDir}, (e, plugins) ->
plugins.map (plugin) ->
pluginName = plugin.slice(12, -7)
pluginPath = '/plugins/' + pluginName
app.use(pluginPath, express.static(path.join(argv.packageDir, plugin)))
# Add static routes to the security client.
if argv.security != './security'
app.use('/security', express.static(path.join(argv.packageDir, argv.security_type, 'client')))
##### Set up standard environments. #####
# In dev mode turn on console.log debugging as well as showing the stack on err.
if 'development' == app.get('env')
app.use(errorHandler())
argv.debug = console? and true
# Show all of the options a server is using.
log argv
#### Routes ####
# Routes currently make up the bulk of the Express port of
# Smallest Federated Wiki. Most routes use literal names,
# or regexes to match, and then access req.params directly.
##### Redirects #####
# Common redirects that may get used throughout the routes.
index = argv.home + '.html'
oops = '/oops'
##### Get routes #####
# Routes have mostly been kept together by http verb, with the exception
# of the openID related routes which are at the end together.
# Main route for initial contact. Allows us to
# link into a specific set of pages, local and remote.
# Can also be handled by the client, but it also sets up
# the login status, and related footer html, which the client
# relies on to know if it is logged in or not.
app.get ///^((/[a-zA-Z0-9:.-]+/[a-z0-9-]+(_rev\d+)?)+)/?$///, (req, res, next) ->
urlPages = (i for i in req.params[0].split('/') by 2)[1..]
urlLocs = (j for j in req.params[0].split('/')[1..] by 2)
if ['plugin', 'auth'].indexOf(urlLocs[0]) > -1
return next()
title = urlPages[..].pop().replace(/-+/g,' ')
user = securityhandler.getUser(req)
info = {
title
pages: []
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
for page, idx in urlPages
if urlLocs[idx] is 'view'
pageDiv = {page}
else
pageDiv = {page, origin: """data-site=#{urlLocs[idx]}"""}
info.pages.push(pageDiv)
res.render('static.html', info)
app.get ///([a-z0-9-]+)\.html$///, (req, res, next) ->
slug = req.params[0]
log(slug)
if slug is 'runtests'
return next()
pagehandler.get slug, (e, page, status) ->
if e then return res.e e
if status is 404
return res.status(status).send(page)
page.title ||= slug.replace(/-+/g,' ')
page.story ||= []
user = securityhandler.getUser(req)
info = {
title: page.title
pages: [
page: slug
generated: """data-server-generated=true"""
story: render(page)
]
authenticated: if user
true
else
false
user: user
seedNeighbors: argv.neighbors
owned: if owner
true
else
false
isOwner: if securityhandler.isAuthorized(req)
true
else
false
ownedBy: if owner
owner
else
''
}
res.render('static.html', info)
app.get ///system/factories.json///, (req, res) ->
res.status(200)
res.header('Content-Type', 'application/json')
# Plugins are located in packages in argv.packageDir, with package names of the form wiki-plugin-*
glob path.join(argv.packageDir, 'wiki-plugin-*', 'factory.json'), (e, files) ->
if e then return res.e(e)
files = files.map (file) ->
return fs.createReadStream(file).on('error', res.e).pipe(JSONStream.parse())
es.concat.apply(null, files)
.on('error', res.e)
.pipe(JSONStream.stringify())
.pipe(res)
###### Json Routes ######
# Handle fetching local and remote json pages.
# Local pages are handled by the pagehandler module.
app.get ///^/([a-z0-9-]+)\.json$///, cors, (req, res) ->
file = req.params[0]
pagehandler.get file, (e, page, status) ->
if e then return res.e e
res.status(status or 200).send(page)
# Remote pages use the http client to retrieve the page
# and sends it to the client. TODO: consider caching remote pages locally.
app.get ///^/remote/([a-zA-Z0-9:\.-]+)/([a-z0-9-]+)\.json$///, (req, res) ->
remoteGet req.params[0], req.params[1], (e, page, status) ->
if e
log "remoteGet error:", e
return res.e e
res.status(status or 200).send(page)
###### Theme Routes ######
# If themes doesn't exist send 404 and let the client
# deal with it.
app.get /^\/theme\/(\w+\.\w+)$/, cors, (req,res) ->
res.sendFile(path.join(argv.status, 'theme', req.params[0]), (e) ->
if (e)
# swallow the error if the theme does not exist...
if req.path is '/theme/style.css'
res.set('Content-Type', 'text/css')
res.send('')
else
res.sendStatus(404)
)
###### Favicon Routes ######
# If favLoc doesn't exist send 404 and let the client
# deal with it.
favLoc = path.join(argv.status, 'favicon.png')
app.get '/favicon.png', cors, (req,res) ->
res.sendFile(favLoc)
authorized = (req, res, next) ->
if securityhandler.isAuthorized(req)
next()
else
console.log 'rejecting', req.path
res.sendStatus(403)
# Accept favicon image posted to the server, and if it does not already exist
# save it.
app.post '/favicon.png', authorized, (req, res) ->
favicon = req.body.image.replace(///^data:image/png;base64,///, "")
buf = new Buffer(favicon, 'base64')
fs.exists argv.status, (exists) ->
if exists
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
else
mkdirp argv.status, ->
fs.writeFile favLoc, buf, (e) ->
if e then return res.e e
res.send('Favicon Saved')
# Redirect remote favicons to the server they are needed from.
app.get ///^/remote/([a-zA-Z0-9:\.-]+/favicon.png)$///, (req, res) ->
remotefav = "http://#{req.params[0]}"
res.redirect(remotefav)
###### Meta Routes ######
# Send an array of pages in the database via json
app.get '/system/slugs.json', cors, (req, res) ->
fs.readdir argv.db, (e, files) ->
if e then return res.e e
res.send(files)
# Returns a list of installed plugins. (does this get called anymore!)
app.get '/system/plugins.json', cors, (req, res) ->
glob "wiki-plugin-*", {cwd: argv.packageDir}, (e, files) ->
if e then return res.e e
# extract the plugin name from the name of the directory it's installed in
files = files.map (file) -> file.slice(12)
res.send(files)
#
sitemapLoc = path.join(argv.status, 'sitemap.json')
app.get '/system/sitemap.json', cors, (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(sitemapLoc)
else
# only createSitemap if we are not already creating one
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
# wait for the sitemap file to be written, before sending
sitemaphandler.once 'finished', ->
res.sendFile(sitemapLoc)
xmlSitemapLoc = path.join(argv.status, 'sitemap.xml')
app.get '/sitemap.xml', (req, res) ->
fs.exists sitemapLoc, (exists) ->
if exists
res.sendFile(xmlSitemapLoc)
else
sitemaphandler.createSitemap (pagehandler) if !sitemaphandler.isWorking()
sitemaphandler.once 'finished', ->
res.sendFile(xmlSitemapLoc)
app.get '/system/export.json', cors, (req, res) ->
pagehandler.pages (e, sitemap) ->
return res.e(e) if e
async.map(
sitemap,
(stub, done) ->
pagehandler.get(stub.slug, (error, page) ->
return done(e) if e
done(null, {slug: stub.slug, page})
)
,
(e, pages) ->
return res.e(e) if e
res.json(pages.reduce( (dict, combined) ->
dict[combined.slug] = combined.page
dict
, {}))
)
##### Define security routes #####
securityhandler.defineRoutes app, cors, updateOwner
##### Proxy routes #####
app.get '/proxy/*', authorized, (req, res) ->
pathParts = req.path.split('/')
remoteHost = pathParts[2]
pathParts.splice(0,3)
remoteResource = pathParts.join('/')
requestURL = 'http://' + remoteHost + '/' + remoteResource
console.log("PROXY Request: ", requestURL)
if requestURL.endsWith('.json') or requestURL.endsWith('.png')
requestOptions = {
host: remoteHost
port: 80
path: remoteResource
}
try
request
.get(requestURL, requestOptions)
.on('error', (err) ->
console.log("ERROR: Request ", requestURL, err))
.pipe(res)
catch error
console.log "PROXY Error", error
res.status(500).end()
else
res.status(400).end()
##### Put routes #####
app.put /^\/page\/([a-z0-9-]+)\/action$/i, authorized, (req, res) ->
action = JSON.parse(req.body.action)
# Handle all of the possible actions to be taken on a page,
actionCB = (e, page, status) ->
#if e then return res.e e
if status is 404
res.status(status).send(page)
# Using Coffee-Scripts implicit returns we assign page.story to the
# result of a list comprehension by way of a switch expression.
try
page.story = switch action.type
when 'move'
action.order.map (id) ->
page.story.filter((para) ->
id == para.id
)[0] or throw('Ignoring move. Try reload.')
when 'add'
idx = page.story.map((para) -> para.id).indexOf(action.after) + 1
page.story.splice(idx, 0, action.item)
page.story
when 'remove'
page.story.filter (para) ->
para?.id != action.id
when 'edit'
page.story.map (para) ->
if para.id is action.id
action.item
else
para
when 'create', 'fork'
page.story or []
else
log "Unfamiliar action:", action
#page.story
throw('Unfamiliar action ignored')
catch e
return res.e e
# Add a blank journal if it does not exist.
# And add what happened to the journal.
if not page.journal
page.journal = []
if action.fork
page.journal.push({type: "fork", site: action.fork})
delete action.fork
page.journal.push(action)
pagehandler.put req.params[0], page, (e) ->
if e then return res.e e
res.send('ok')
# log 'saved'
# update sitemap
sitemaphandler.update(req.params[0], page)
# log action
# If the action is a fork, get the page from the remote server,
# otherwise ask pagehandler for it.
if action.fork
remoteGet(action.fork, req.params[0], actionCB)
else if action.type is 'create'
# Prevent attempt to write circular structure
itemCopy = JSON.parse(JSON.stringify(action.item))
pagehandler.get req.params[0], (e, page, status) ->
if e then return actionCB(e)
unless status is 404
res.status(409).send('Page already exists.')
else
actionCB(null, itemCopy)
else if action.type == 'fork'
if action.item # push
itemCopy = JSON.parse(JSON.stringify(action.item))
delete action.item
actionCB(null, itemCopy)
else # pull
remoteGet(action.site, req.params[0], actionCB)
else
pagehandler.get(req.params[0], actionCB)
# Return the oops page when login fails.
app.get '/oops', (req, res) ->
res.statusCode = 403
res.render('oops.html', {msg:'This is not your wiki!'})
# Traditional request to / redirects to index :)
app.get '/', (req, res) ->
res.redirect(index)
#### Start the server ####
# Wait to make sure owner is known before listening.
securityhandler.retrieveOwner (e) ->
# Throw if you can't find the initial owner
if e then throw e
owner = securityhandler.getOwner()
console.log "owner: " + owner
app.emit 'owner-set'
app.on 'running-serv', (server) ->
### Plugins ###
# Should replace most WebSocketServers below.
plugins = pluginsFactory(argv)
plugins.startServers({argv, app})
### Sitemap ###
# create sitemap at start-up
sitemaphandler.createSitemap(pagehandler)
# Return app when called, so that it can be watched for events and shutdown with .close() externally.
app
|
[
{
"context": "_JSON, 'utf8'))\n resultStr = '[{\"firstName\":\"Jihad\",\"lastName\":\"Saladin\",\"address\":{\"street\":\"12 Bea",
"end": 1348,
"score": 0.9998074173927307,
"start": 1343,
"tag": "NAME",
"value": "Jihad"
},
{
"context": " resultStr = '[{\"firstName\":\"Jihad\",... | spec/processFileSpec.coffee | Lxxyx/excel-as-json | 3 | processFile = require('../src/excel-as-json').processFile
fs = require 'fs'
# TODO: How to get chai defined in a more global way
chai = require 'chai'
chai.should()
expect = chai.expect;
ROW_XLSX = 'data/row-oriented.xlsx'
ROW_JSON = 'build/row-oriented.json'
COL_XLSX = 'data/col-oriented.xlsx'
COL_JSON = 'build/col-oriented.json'
describe 'process file', ->
it 'should notify on file does not exist', (done) ->
processFile 'data/doesNotExist.xlsx', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up when a file does not exist and no callback is provided', (done) ->
processFile 'data/doesNotExist.xlsx', ->
done()
it 'should notify on read error', (done) ->
processFile 'data/row-oriented.csv', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up on read error when no callback is provided', (done) ->
processFile 'data/row-oriented.csv', ->
done()
it 'should process row oriented Excel files and return the parsed object', (done) ->
processFile ROW_XLSX, ROW_JSON, false, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(ROW_JSON, 'utf8'))
resultStr = '[{"firstName":"Jihad","lastName":"Saladin","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"Marcus","lastName":"Rivapoli","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should process col oriented Excel files', (done) ->
processFile COL_XLSX, COL_JSON, true, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(COL_JSON, 'utf8'))
resultStr = '[{"firstName":"Jihad","lastName":"Saladin","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615},"isEmployee":true,"phones":[{"type":"home","number":"123.456.7890"},{"type":"work","number":"098.765.4321"}],"aliases":["stormagedden","bob"]},{"firstName":"Marcus","lastName":"Rivapoli","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657},"isEmployee":false,"phones":[{"type":"home","number":"123.456.7891"},{"type":"work","number":"098.765.4322"}],"aliases":["mac","markie"]}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should return a parsed object without writing a file', (done) ->
# Ensure result file does not exit
try fs.unlinkSync ROW_JSON
catch # ignore file does not exist
processFile ROW_XLSX, undefined, false, (err, data) ->
expect(err).to.be.an 'undefined'
fs.existsSync(ROW_JSON).should.equal false
resultStr = '[{"firstName":"Jihad","lastName":"Saladin","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"Marcus","lastName":"Rivapoli","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(data).should.equal resultStr
done()
it 'should notify on write error', (done) ->
processFile ROW_XLSX, 'build', false, (err, data) ->
expect(err).to.be.an 'string'
done()
| 59463 | processFile = require('../src/excel-as-json').processFile
fs = require 'fs'
# TODO: How to get chai defined in a more global way
chai = require 'chai'
chai.should()
expect = chai.expect;
ROW_XLSX = 'data/row-oriented.xlsx'
ROW_JSON = 'build/row-oriented.json'
COL_XLSX = 'data/col-oriented.xlsx'
COL_JSON = 'build/col-oriented.json'
describe 'process file', ->
it 'should notify on file does not exist', (done) ->
processFile 'data/doesNotExist.xlsx', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up when a file does not exist and no callback is provided', (done) ->
processFile 'data/doesNotExist.xlsx', ->
done()
it 'should notify on read error', (done) ->
processFile 'data/row-oriented.csv', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up on read error when no callback is provided', (done) ->
processFile 'data/row-oriented.csv', ->
done()
it 'should process row oriented Excel files and return the parsed object', (done) ->
processFile ROW_XLSX, ROW_JSON, false, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(ROW_JSON, 'utf8'))
resultStr = '[{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should process col oriented Excel files', (done) ->
processFile COL_XLSX, COL_JSON, true, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(COL_JSON, 'utf8'))
resultStr = '[{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615},"isEmployee":true,"phones":[{"type":"home","number":"123.456.7890"},{"type":"work","number":"098.765.4321"}],"aliases":["stormagedden","bob"]},{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657},"isEmployee":false,"phones":[{"type":"home","number":"123.456.7891"},{"type":"work","number":"098.765.4322"}],"aliases":["mac","markie"]}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should return a parsed object without writing a file', (done) ->
# Ensure result file does not exit
try fs.unlinkSync ROW_JSON
catch # ignore file does not exist
processFile ROW_XLSX, undefined, false, (err, data) ->
expect(err).to.be.an 'undefined'
fs.existsSync(ROW_JSON).should.equal false
resultStr = '[{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"<NAME>","lastName":"<NAME>","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(data).should.equal resultStr
done()
it 'should notify on write error', (done) ->
processFile ROW_XLSX, 'build', false, (err, data) ->
expect(err).to.be.an 'string'
done()
| true | processFile = require('../src/excel-as-json').processFile
fs = require 'fs'
# TODO: How to get chai defined in a more global way
chai = require 'chai'
chai.should()
expect = chai.expect;
ROW_XLSX = 'data/row-oriented.xlsx'
ROW_JSON = 'build/row-oriented.json'
COL_XLSX = 'data/col-oriented.xlsx'
COL_JSON = 'build/col-oriented.json'
describe 'process file', ->
it 'should notify on file does not exist', (done) ->
processFile 'data/doesNotExist.xlsx', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up when a file does not exist and no callback is provided', (done) ->
processFile 'data/doesNotExist.xlsx', ->
done()
it 'should notify on read error', (done) ->
processFile 'data/row-oriented.csv', null, false, (err, data) ->
err.should.be.a 'string'
expect(data).to.be.an 'undefined'
done()
it 'should not blow up on read error when no callback is provided', (done) ->
processFile 'data/row-oriented.csv', ->
done()
it 'should process row oriented Excel files and return the parsed object', (done) ->
processFile ROW_XLSX, ROW_JSON, false, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(ROW_JSON, 'utf8'))
resultStr = '[{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should process col oriented Excel files', (done) ->
processFile COL_XLSX, COL_JSON, true, (err, data) ->
expect(err).to.be.an 'undefined'
result = JSON.parse(fs.readFileSync(COL_JSON, 'utf8'))
resultStr = '[{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615},"isEmployee":true,"phones":[{"type":"home","number":"123.456.7890"},{"type":"work","number":"098.765.4321"}],"aliases":["stormagedden","bob"]},{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657},"isEmployee":false,"phones":[{"type":"home","number":"123.456.7891"},{"type":"work","number":"098.765.4322"}],"aliases":["mac","markie"]}]'
JSON.stringify(result).should.equal resultStr
JSON.stringify(data).should.equal resultStr
done()
it 'should return a parsed object without writing a file', (done) ->
# Ensure result file does not exit
try fs.unlinkSync ROW_JSON
catch # ignore file does not exist
processFile ROW_XLSX, undefined, false, (err, data) ->
expect(err).to.be.an 'undefined'
fs.existsSync(ROW_JSON).should.equal false
resultStr = '[{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"12 Beaver Court","city":"Snowmass","state":"CO","zip":81615}},{"firstName":"PI:NAME:<NAME>END_PI","lastName":"PI:NAME:<NAME>END_PI","address":{"street":"16 Vail Rd","city":"Vail","state":"CO","zip":81657}}]'
JSON.stringify(data).should.equal resultStr
done()
it 'should notify on write error', (done) ->
processFile ROW_XLSX, 'build', false, (err, data) ->
expect(err).to.be.an 'string'
done()
|
[
{
"context": "files parser/validator\n#\n# Copyright (C) 2011-2012 Nikolay Nemshilov\n#\n\n\n#\n# Validates the package content\n#\n# @param ",
"end": 80,
"score": 0.9998878240585327,
"start": 63,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | cli/package.coffee | lovely-io/lovely.io-stl | 2 | #
# Package files parser/validator
#
# Copyright (C) 2011-2012 Nikolay Nemshilov
#
#
# Validates the package content
#
# @param {Object} data
#
validate = (data) ->
errors = [];
data.name || errors.push("miss the 'name' field")
data.version || errors.push("miss the 'version' field")
data.description || errors.push("miss the 'description' field")
data.author || errors.push("miss the 'author' field")
data.license || errors.push("miss the 'license' field")
data.version && (data.version.match(/^\d+\.\d+\.\d+$/) ||
errors.push("'version' should match the '\d+.\d+.\d+' format"))
if errors.length
print "Failed to parse the 'package.json' file:\n".red +
errors.join("\n");
process.exit()
#
# Reads the package data out fo the given dreictory
#
exports.read = read = (directory) ->
fs = require('fs')
directory or= process.cwd()
for name in ['lovely', 'package']
if fs.existsSync("#{directory}/#{name}.json")
data = fs.readFileSync("#{directory}/#{name}.json")
data = JSON.parse(data.toString())
data && validate(data)
data
#
# Parsing the current package
#
try
for key, value of read()
exports[key] = value
catch e
# file does not exists | 55759 | #
# Package files parser/validator
#
# Copyright (C) 2011-2012 <NAME>
#
#
# Validates the package content
#
# @param {Object} data
#
validate = (data) ->
errors = [];
data.name || errors.push("miss the 'name' field")
data.version || errors.push("miss the 'version' field")
data.description || errors.push("miss the 'description' field")
data.author || errors.push("miss the 'author' field")
data.license || errors.push("miss the 'license' field")
data.version && (data.version.match(/^\d+\.\d+\.\d+$/) ||
errors.push("'version' should match the '\d+.\d+.\d+' format"))
if errors.length
print "Failed to parse the 'package.json' file:\n".red +
errors.join("\n");
process.exit()
#
# Reads the package data out fo the given dreictory
#
exports.read = read = (directory) ->
fs = require('fs')
directory or= process.cwd()
for name in ['lovely', 'package']
if fs.existsSync("#{directory}/#{name}.json")
data = fs.readFileSync("#{directory}/#{name}.json")
data = JSON.parse(data.toString())
data && validate(data)
data
#
# Parsing the current package
#
try
for key, value of read()
exports[key] = value
catch e
# file does not exists | true | #
# Package files parser/validator
#
# Copyright (C) 2011-2012 PI:NAME:<NAME>END_PI
#
#
# Validates the package content
#
# @param {Object} data
#
validate = (data) ->
errors = [];
data.name || errors.push("miss the 'name' field")
data.version || errors.push("miss the 'version' field")
data.description || errors.push("miss the 'description' field")
data.author || errors.push("miss the 'author' field")
data.license || errors.push("miss the 'license' field")
data.version && (data.version.match(/^\d+\.\d+\.\d+$/) ||
errors.push("'version' should match the '\d+.\d+.\d+' format"))
if errors.length
print "Failed to parse the 'package.json' file:\n".red +
errors.join("\n");
process.exit()
#
# Reads the package data out fo the given dreictory
#
exports.read = read = (directory) ->
fs = require('fs')
directory or= process.cwd()
for name in ['lovely', 'package']
if fs.existsSync("#{directory}/#{name}.json")
data = fs.readFileSync("#{directory}/#{name}.json")
data = JSON.parse(data.toString())
data && validate(data)
data
#
# Parsing the current package
#
try
for key, value of read()
exports[key] = value
catch e
# file does not exists |
[
{
"context": "org'\n id: 'id_RedmineTimeTracker'\n pass: 'pass_RedmineTimeTracker'\n }\n\n beforeEach () ->\n angular.mock.module(",
"end": 237,
"score": 0.9989056587219238,
"start": 214,
"tag": "PASSWORD",
"value": "pass_RedmineTimeTracker"
},
{
"context": " 'http://... | test/account_test.coffee | Chanshi712/RedmineTimeTracker | 73 | expect = chai.expect
describe 'account.coffee', ->
Account = null
$rootScope = null
$q = null
Platform = null
_auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'pass_RedmineTimeTracker'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_Account_, _$rootScope_, _$q_, _Platform_) ->
Account = _Account_
$rootScope = _$rootScope_
$q = _$q_
Platform = _Platform_
###
test for isValid()
###
describe 'create(param)', ->
it 'should return AccountModel which has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: 'test_apiKey'
pass: 'test_pass'
name: 'test_name'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.name)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
it 'should return AccountModel which doesn\'t has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: 'test_apiKey'
pass: 'test_pass'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.url)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
###
test for isValid()
###
describe 'AccountModel.isValid()', ->
it 'should return true', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'test_pass'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return true', () ->
auth = {
url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return false if url missing.', () ->
auth = {
# url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if apiKey is missing.', () ->
auth = {
url: 'http://github.com'
# apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if id is missing', () ->
auth = {
url: 'http://github.com'
# id: 'test_id'
pass: 'test_pass'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if password is missing', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
# pass: 'test_pass'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
describe 'AccountModel.parseProjectList()', ->
_auth = {
url: 'http://github.com'
apiKey: 'api key'
}
it 'should return [1, 2, 3]', () ->
str = "1, 2, 3"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1,2,3])
it 'should return [1]', () ->
str = "1"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1])
it 'should return [100000000000000000]', () ->
str = "100000000000000000"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([100000000000000000])
# Range is not support.
# it 'should return [[1-3]]', () ->
# str = "1-3"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1,3]])
# it 'should return [1, [3-5], 10]', () ->
# str = "1, 3-5, 10"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([1, [3, 5], 10])
# it 'should return [[1-3], [100-102]]', () ->
# str = "1-3, 100-102"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1, 3], [100, 102]])
it 'should return null', () ->
str = null
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.be.null
###
test for load()
###
describe 'load', ->
it 'should not returns accounts.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts).to.be.empty
done()
it 'should returns a account.', (done) ->
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'pass_RedmineTimeTracker'
apiKey: 'apiKey_RedmineTimeTracker'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj = Account.create(auth).encrypt()
deferred.resolve([obj])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth.id)
expect(accounts[0].apiKey).to.equal(auth.apiKey)
expect(accounts[0].pass).to.equal(auth.pass)
done()
it 'should returns two accounts.', (done) ->
auth1 = {
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'pass_RedmineTimeTracker1'
}
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'pass_RedmineTimeTracker2'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj1 = Account.create(auth1).encrypt()
obj2 = Account.create(auth2).encrypt()
deferred.resolve([obj1, obj2])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth1.id)
expect(accounts[0].pass).to.equal(auth1.pass)
expect(accounts[1].id).to.equal(auth2.id)
expect(accounts[1].pass).to.equal(auth2.pass)
done()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
Account.load().then () ->
done(new Error())
, () ->
done()
###*
test for addAccount(account)
###
describe 'addAccount(account)', ->
it 'should save a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'pass_RedmineTimeTracker'
apiKey: 'apiKey_RedmineTimeTracker'
}
# exec
Account.addAccount(auth).then (res) ->
expect(res.url).to.equal(auth.url)
expect(res.id).to.equal(auth.id)
expect(res.apiKey).to.equal(auth.apiKey)
expect(res.pass).to.equal(auth.pass)
done()
it 'should overwrite a account to same url account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'pass_RedmineTimeTracker1'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2b'
pass: 'pass_RedmineTimeTracker2b'
}]
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'pass_RedmineTimeTracker2'
}
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auth2))
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then (res) ->
expect(res.url).to.equal(auths[1].url)
expect(res.id).to.equal(auths[1].id)
expect(res.pass).to.equal(auths[1].pass)
accounts = Account.getAccounts()
for a, i in auths
data = accounts[i]
expect(data.url).to.equal(a.url)
expect(data.id).to.equal(a.id)
expect(data.pass).to.equal(a.pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'pass_RedmineTimeTracker'
apiKey: 'apiKey_RedmineTimeTracker'
}
# exec
Account.addAccount(auth).then () ->
done(new Error())
, () ->
done()
###*
test for removeAccount(url)
###
describe 'removeAccount(url)', ->
it 'should remove a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'pass_RedmineTimeTracker1'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'pass_RedmineTimeTracker2'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.removeAccount('http://demo.redmine.org1'))
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(1)
expect(accounts[0].url).to.equal(auths[1].url)
expect(accounts[0].id).to.equal(auths[1].id)
expect(accounts[0].pass).to.equal(auths[1].pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'pass_RedmineTimeTracker'
apiKey: 'apiKey_RedmineTimeTracker'
}
# exec
Account.addAccount(auth)
.then(-> Account.removeAccount('pass_RedmineTimeTracker2'))
.then () ->
done(new Error())
, () ->
done()
###*
test for clearAccount()
###
describe 'clearAccount()', ->
it 'should remove all account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'pass_RedmineTimeTracker1'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'pass_RedmineTimeTracker2'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.clearAccount())
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(0)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
# exec
Account.clearAccount().then () ->
done(new Error())
, () ->
done()
| 23098 | expect = chai.expect
describe 'account.coffee', ->
Account = null
$rootScope = null
$q = null
Platform = null
_auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: '<PASSWORD>'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_Account_, _$rootScope_, _$q_, _Platform_) ->
Account = _Account_
$rootScope = _$rootScope_
$q = _$q_
Platform = _Platform_
###
test for isValid()
###
describe 'create(param)', ->
it 'should return AccountModel which has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: 'test_apiKey'
pass: '<PASSWORD>'
name: '<NAME>_name'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.name)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
it 'should return AccountModel which doesn\'t has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: '<KEY>'
pass: '<PASSWORD>'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.url)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
###
test for isValid()
###
describe 'AccountModel.isValid()', ->
it 'should return true', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: '<PASSWORD>'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return true', () ->
auth = {
url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return false if url missing.', () ->
auth = {
# url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if apiKey is missing.', () ->
auth = {
url: 'http://github.com'
# apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if id is missing', () ->
auth = {
url: 'http://github.com'
# id: 'test_id'
pass: '<PASSWORD>'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if password is missing', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
# pass: '<PASSWORD>'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
describe 'AccountModel.parseProjectList()', ->
_auth = {
url: 'http://github.com'
apiKey: 'api key'
}
it 'should return [1, 2, 3]', () ->
str = "1, 2, 3"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1,2,3])
it 'should return [1]', () ->
str = "1"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1])
it 'should return [100000000000000000]', () ->
str = "100000000000000000"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([100000000000000000])
# Range is not support.
# it 'should return [[1-3]]', () ->
# str = "1-3"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1,3]])
# it 'should return [1, [3-5], 10]', () ->
# str = "1, 3-5, 10"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([1, [3, 5], 10])
# it 'should return [[1-3], [100-102]]', () ->
# str = "1-3, 100-102"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1, 3], [100, 102]])
it 'should return null', () ->
str = null
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.be.null
###
test for load()
###
describe 'load', ->
it 'should not returns accounts.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts).to.be.empty
done()
it 'should returns a account.', (done) ->
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: '<PASSWORD>'
apiKey: '<KEY>'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj = Account.create(auth).encrypt()
deferred.resolve([obj])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth.id)
expect(accounts[0].apiKey).to.equal(auth.apiKey)
expect(accounts[0].pass).to.equal(auth.pass)
done()
it 'should returns two accounts.', (done) ->
auth1 = {
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: '<PASSWORD>'
}
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: '<PASSWORD>'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj1 = Account.create(auth1).encrypt()
obj2 = Account.create(auth2).encrypt()
deferred.resolve([obj1, obj2])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth1.id)
expect(accounts[0].pass).to.equal(auth1.pass)
expect(accounts[1].id).to.equal(auth2.id)
expect(accounts[1].pass).to.equal(auth2.pass)
done()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
Account.load().then () ->
done(new Error())
, () ->
done()
###*
test for addAccount(account)
###
describe 'addAccount(account)', ->
it 'should save a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: '<PASSWORD>'
apiKey: '<KEY>'
}
# exec
Account.addAccount(auth).then (res) ->
expect(res.url).to.equal(auth.url)
expect(res.id).to.equal(auth.id)
expect(res.apiKey).to.equal(auth.apiKey)
expect(res.pass).to.equal(auth.pass)
done()
it 'should overwrite a account to same url account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: '<PASSWORD>'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2b'
pass: '<PASSWORD>'
}]
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: '<PASSWORD>'
}
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auth2))
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then (res) ->
expect(res.url).to.equal(auths[1].url)
expect(res.id).to.equal(auths[1].id)
expect(res.pass).to.equal(auths[1].pass)
accounts = Account.getAccounts()
for a, i in auths
data = accounts[i]
expect(data.url).to.equal(a.url)
expect(data.id).to.equal(a.id)
expect(data.pass).to.equal(a.pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: '<PASSWORD>'
apiKey: '<KEY>'
}
# exec
Account.addAccount(auth).then () ->
done(new Error())
, () ->
done()
###*
test for removeAccount(url)
###
describe 'removeAccount(url)', ->
it 'should remove a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: '<PASSWORD>'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: '<PASSWORD>'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.removeAccount('http://demo.redmine.org1'))
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(1)
expect(accounts[0].url).to.equal(auths[1].url)
expect(accounts[0].id).to.equal(auths[1].id)
expect(accounts[0].pass).to.equal(auths[1].pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: '<PASSWORD>'
apiKey: '<KEY>'
}
# exec
Account.addAccount(auth)
.then(-> Account.removeAccount('pass_RedmineTimeTracker<PASSWORD>'))
.then () ->
done(new Error())
, () ->
done()
###*
test for clearAccount()
###
describe 'clearAccount()', ->
it 'should remove all account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: '<PASSWORD>'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: '<PASSWORD>'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.clearAccount())
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(0)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
# exec
Account.clearAccount().then () ->
done(new Error())
, () ->
done()
| true | expect = chai.expect
describe 'account.coffee', ->
Account = null
$rootScope = null
$q = null
Platform = null
_auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_Account_, _$rootScope_, _$q_, _Platform_) ->
Account = _Account_
$rootScope = _$rootScope_
$q = _$q_
Platform = _Platform_
###
test for isValid()
###
describe 'create(param)', ->
it 'should return AccountModel which has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: 'test_apiKey'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
name: 'PI:NAME:<NAME>END_PI_name'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.name)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
it 'should return AccountModel which doesn\'t has name.', () ->
param = {
url: 'http://github.com'
id: 'test_id'
apiKey: 'PI:KEY:<KEY>END_PI'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
numProjects: 1
projectList: '1,2,5,8,11'
}
model = Account.create(param)
expect(model.url).to.equal(param.url)
expect(model.id).to.equal(param.id)
expect(model.apiKey).to.equal(param.apiKey)
expect(model.pass).to.equal(param.pass)
expect(model.name).to.equal(param.url)
expect(model.numProjects).to.equal(param.numProjects)
expect(model.projectList).to.eql([1,2,5,8,11])
###
test for isValid()
###
describe 'AccountModel.isValid()', ->
it 'should return true', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return true', () ->
auth = {
url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.true
it 'should return false if url missing.', () ->
auth = {
# url: 'http://github.com'
apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if apiKey is missing.', () ->
auth = {
url: 'http://github.com'
# apiKey: 'api key'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if id is missing', () ->
auth = {
url: 'http://github.com'
# id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
it 'should return false if password is missing', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
# pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
model = Account.create(auth)
expect(model.isValid()).to.be.false
describe 'AccountModel.parseProjectList()', ->
_auth = {
url: 'http://github.com'
apiKey: 'api key'
}
it 'should return [1, 2, 3]', () ->
str = "1, 2, 3"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1,2,3])
it 'should return [1]', () ->
str = "1"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([1])
it 'should return [100000000000000000]', () ->
str = "100000000000000000"
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.eql([100000000000000000])
# Range is not support.
# it 'should return [[1-3]]', () ->
# str = "1-3"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1,3]])
# it 'should return [1, [3-5], 10]', () ->
# str = "1, 3-5, 10"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([1, [3, 5], 10])
# it 'should return [[1-3], [100-102]]', () ->
# str = "1-3, 100-102"
# model = Account.create(_auth)
# parsed = model.parseProjectList(str)
# expect(parsed).to.eql([[1, 3], [100, 102]])
it 'should return null', () ->
str = null
model = Account.create(_auth)
parsed = model.parseProjectList(str)
expect(parsed).to.be.null
###
test for load()
###
describe 'load', ->
it 'should not returns accounts.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts).to.be.empty
done()
it 'should returns a account.', (done) ->
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
apiKey: 'PI:KEY:<KEY>END_PI'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj = Account.create(auth).encrypt()
deferred.resolve([obj])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth.id)
expect(accounts[0].apiKey).to.equal(auth.apiKey)
expect(accounts[0].pass).to.equal(auth.pass)
done()
it 'should returns two accounts.', (done) ->
auth1 = {
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
obj1 = Account.create(auth1).encrypt()
obj2 = Account.create(auth2).encrypt()
deferred.resolve([obj1, obj2])
$rootScope.$apply()
# exec
Account.load().then (accounts) ->
expect(accounts[0].id).to.equal(auth1.id)
expect(accounts[0].pass).to.equal(auth1.pass)
expect(accounts[1].id).to.equal(auth2.id)
expect(accounts[1].pass).to.equal(auth2.pass)
done()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "load").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
Account.load().then () ->
done(new Error())
, () ->
done()
###*
test for addAccount(account)
###
describe 'addAccount(account)', ->
it 'should save a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
apiKey: 'PI:KEY:<KEY>END_PI'
}
# exec
Account.addAccount(auth).then (res) ->
expect(res.url).to.equal(auth.url)
expect(res.id).to.equal(auth.id)
expect(res.apiKey).to.equal(auth.apiKey)
expect(res.pass).to.equal(auth.pass)
done()
it 'should overwrite a account to same url account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2b'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}]
auth2 = {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auth2))
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then (res) ->
expect(res.url).to.equal(auths[1].url)
expect(res.id).to.equal(auths[1].id)
expect(res.pass).to.equal(auths[1].pass)
accounts = Account.getAccounts()
for a, i in auths
data = accounts[i]
expect(data.url).to.equal(a.url)
expect(data.id).to.equal(a.id)
expect(data.pass).to.equal(a.pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
apiKey: 'PI:KEY:<KEY>END_PI'
}
# exec
Account.addAccount(auth).then () ->
done(new Error())
, () ->
done()
###*
test for removeAccount(url)
###
describe 'removeAccount(url)', ->
it 'should remove a account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.removeAccount('http://demo.redmine.org1'))
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(1)
expect(accounts[0].url).to.equal(auths[1].url)
expect(accounts[0].id).to.equal(auths[1].id)
expect(accounts[0].pass).to.equal(auths[1].pass)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
auth = {
url: 'http://demo.redmine.org'
id: 'id_RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
apiKey: 'PI:KEY:<KEY>END_PI'
}
# exec
Account.addAccount(auth)
.then(-> Account.removeAccount('pass_RedmineTimeTrackerPI:PASSWORD:<PASSWORD>END_PI'))
.then () ->
done(new Error())
, () ->
done()
###*
test for clearAccount()
###
describe 'clearAccount()', ->
it 'should remove all account.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.resolve()
$rootScope.$apply()
auths = [{
url: 'http://demo.redmine.org1'
id: 'id_RedmineTimeTracker1'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}, {
url: 'http://demo.redmine.org2'
id: 'id_RedmineTimeTracker2'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}]
# exec
d = $q.defer()
d.promise
.then(-> Account.addAccount(auths[0]))
.then(-> Account.addAccount(auths[1]))
.then(-> Account.clearAccount())
.then ->
accounts = Account.getAccounts()
expect(accounts).to.have.lengthOf(0)
done()
d.resolve()
it 'should reject if Platform has anything error.', (done) ->
deferred = $q.defer()
sinon.stub(Platform, "save").returns(deferred.promise)
setTimeout () ->
deferred.reject()
$rootScope.$apply()
# exec
Account.clearAccount().then () ->
done(new Error())
, () ->
done()
|
[
{
"context": "t = chai.expect\n\nprocess.env.GIST_ACCESS_TOKEN = \"ee33\"\n\nhelper = new Helper('../src/gist-me.coffee')\n\nd",
"end": 251,
"score": 0.9944036602973938,
"start": 247,
"tag": "PASSWORD",
"value": "ee33"
},
{
"context": "e TOKEN, short text\", ->\n \n room.user.... | test/gist-me_test.coffee | n3tr/hubot-gist-me | 1 | # Hubot classes
Helper = require('hubot-test-helper')
# Load assertion
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
path = require 'path'
chai.use require 'sinon-chai'
expect = chai.expect
process.env.GIST_ACCESS_TOKEN = "ee33"
helper = new Helper('../src/gist-me.coffee')
describe 'gistme', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
require('../src/gist-me.coffee')(@robot)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gistme (\S*\.\S+)?((.*\s*)+)/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:reset/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:set (.*)/i)
describe 'gistme create gist', ->
room = null
beforeEach (done) ->
room = helper.createRoom()
done()
afterEach ->
it "responsd when provide TOKEN, short text", ->
room.user.say 'alice', 'hubot gistme 33ee'
expect(room.messages).to.eql [
['alice', 'hubot gistme 33ee']
['hubot', 'Code is too short, Please try again.']
]
| 121367 | # Hubot classes
Helper = require('hubot-test-helper')
# Load assertion
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
path = require 'path'
chai.use require 'sinon-chai'
expect = chai.expect
process.env.GIST_ACCESS_TOKEN = "<PASSWORD>"
helper = new Helper('../src/gist-me.coffee')
describe 'gistme', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
require('../src/gist-me.coffee')(@robot)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gistme (\S*\.\S+)?((.*\s*)+)/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:reset/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:set (.*)/i)
describe 'gistme create gist', ->
room = null
beforeEach (done) ->
room = helper.createRoom()
done()
afterEach ->
it "responsd when provide TOKEN, short text", ->
room.user.say 'alice', 'hubot gistme 33ee'
expect(room.messages).to.eql [
['alice', 'hubot gistme 33ee']
['hubot', 'Code is too short, Please try again.']
]
| true | # Hubot classes
Helper = require('hubot-test-helper')
# Load assertion
nock = require 'nock'
chai = require 'chai'
sinon = require 'sinon'
path = require 'path'
chai.use require 'sinon-chai'
expect = chai.expect
process.env.GIST_ACCESS_TOKEN = "PI:PASSWORD:<PASSWORD>END_PI"
helper = new Helper('../src/gist-me.coffee')
describe 'gistme', ->
beforeEach ->
@robot =
respond: sinon.spy()
hear: sinon.spy()
require('../src/gist-me.coffee')(@robot)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gistme (\S*\.\S+)?((.*\s*)+)/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:reset/i)
it 'registers a respond listener', ->
expect(@robot.respond).to.have.been.calledWith(/gist-token:set (.*)/i)
describe 'gistme create gist', ->
room = null
beforeEach (done) ->
room = helper.createRoom()
done()
afterEach ->
it "responsd when provide TOKEN, short text", ->
room.user.say 'alice', 'hubot gistme 33ee'
expect(room.messages).to.eql [
['alice', 'hubot gistme 33ee']
['hubot', 'Code is too short, Please try again.']
]
|
[
{
"context": " .set(\"hostname\",\"${2:192.168.0.106}\")#provide windows Server/System hostname.\n ",
"end": 1002,
"score": 0.998449444770813,
"start": 989,
"tag": "IP_ADDRESS",
"value": "192.168.0.106"
},
{
"context": "e \n ... | snippets/flint-windows-connector.cson | manoj-dhadke/flint-atom | 0 |
##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
'.source.ruby':
'flint-windows-connector-template':
'prefix': 'connwindows'
'body': """
@log.trace("Calling Windows Connector...")
${1:connector_name}_response = @call.connector("${1:connector_name}")
.set("hostname","${2:192.168.0.106}")#provide windows Server/System hostname.
.set("protocol","${3:http}") #must be specified as http or https depending on unsecure/secure communication
.set("command",${4:command})#command to be executed on windows machine
.set("user","${5:ABC}")# username associated with windows machine
.set("password","${6:XXXX}")#provide valid password
.set("port",${7:5985})#port number on which the windows System/Server is listening(default:http:5985/https:5986)
.set("timeout",${8:200})#timeout in milliseconds
.sync
#Windows Connector Response Meta Parameters
${9:response_exitcode}=${1:connector_name}_response.exitcode #Exit status code
${10:response_message} = ${1:connector_name}_response.message #Execution status messages
#Windows Connector Response Parameters
${11:response_output} = response.get("output") #command execution results
${12:response_exit_status} = response.get("exit-status") #command execution exit status
${13:response_error} = response.get("error") #error cause
if ${9:response_exitcode} == 0
@log.info('SUCCESS in executing Windows Connector where, exitcode ::' +${9:response_exitcode} '|
message ::' +${10:response_message})
@log.info('Command execution results :: output:: ' +${11:response_output} '|
exit-status:: ' +${12:response_exit_status} '|
error::' + ${13:+response_error})
else
@log.error('ERROR in executing Windows connector where, exitcode ::' +${9:response_exitcode} '|
message :: ' +${6:response_message})
@output.exit(1,${10:response_message})
end
""" | 184888 |
##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
'.source.ruby':
'flint-windows-connector-template':
'prefix': 'connwindows'
'body': """
@log.trace("Calling Windows Connector...")
${1:connector_name}_response = @call.connector("${1:connector_name}")
.set("hostname","${2:192.168.0.106}")#provide windows Server/System hostname.
.set("protocol","${3:http}") #must be specified as http or https depending on unsecure/secure communication
.set("command",${4:command})#command to be executed on windows machine
.set("user","${5:ABC}")# username associated with windows machine
.set("password","<PASSWORD>}")#provide valid password
.set("port",${7:5985})#port number on which the windows System/Server is listening(default:http:5985/https:5986)
.set("timeout",${8:200})#timeout in milliseconds
.sync
#Windows Connector Response Meta Parameters
${9:response_exitcode}=${1:connector_name}_response.exitcode #Exit status code
${10:response_message} = ${1:connector_name}_response.message #Execution status messages
#Windows Connector Response Parameters
${11:response_output} = response.get("output") #command execution results
${12:response_exit_status} = response.get("exit-status") #command execution exit status
${13:response_error} = response.get("error") #error cause
if ${9:response_exitcode} == 0
@log.info('SUCCESS in executing Windows Connector where, exitcode ::' +${9:response_exitcode} '|
message ::' +${10:response_message})
@log.info('Command execution results :: output:: ' +${11:response_output} '|
exit-status:: ' +${12:response_exit_status} '|
error::' + ${13:+response_error})
else
@log.error('ERROR in executing Windows connector where, exitcode ::' +${9:response_exitcode} '|
message :: ' +${6:response_message})
@output.exit(1,${10:response_message})
end
""" | true |
##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
'.source.ruby':
'flint-windows-connector-template':
'prefix': 'connwindows'
'body': """
@log.trace("Calling Windows Connector...")
${1:connector_name}_response = @call.connector("${1:connector_name}")
.set("hostname","${2:192.168.0.106}")#provide windows Server/System hostname.
.set("protocol","${3:http}") #must be specified as http or https depending on unsecure/secure communication
.set("command",${4:command})#command to be executed on windows machine
.set("user","${5:ABC}")# username associated with windows machine
.set("password","PI:PASSWORD:<PASSWORD>END_PI}")#provide valid password
.set("port",${7:5985})#port number on which the windows System/Server is listening(default:http:5985/https:5986)
.set("timeout",${8:200})#timeout in milliseconds
.sync
#Windows Connector Response Meta Parameters
${9:response_exitcode}=${1:connector_name}_response.exitcode #Exit status code
${10:response_message} = ${1:connector_name}_response.message #Execution status messages
#Windows Connector Response Parameters
${11:response_output} = response.get("output") #command execution results
${12:response_exit_status} = response.get("exit-status") #command execution exit status
${13:response_error} = response.get("error") #error cause
if ${9:response_exitcode} == 0
@log.info('SUCCESS in executing Windows Connector where, exitcode ::' +${9:response_exitcode} '|
message ::' +${10:response_message})
@log.info('Command execution results :: output:: ' +${11:response_output} '|
exit-status:: ' +${12:response_exit_status} '|
error::' + ${13:+response_error})
else
@log.error('ERROR in executing Windows connector where, exitcode ::' +${9:response_exitcode} '|
message :: ' +${6:response_message})
@output.exit(1,${10:response_message})
end
""" |
[
{
"context": "ord: \"Mettre à jour le mot de passe\"\n password: \"Mot de passe\"\n usernameOrEmail: \"Nom d'utilisateur ou email\"\n",
"end": 462,
"score": 0.9847304224967957,
"start": 450,
"tag": "PASSWORD",
"value": "Mot de passe"
},
{
"context": "nregistrer avec votre adresse e... | client/t9n/french.coffee | NitroLabs/accounts-entry | 0 | fr =
signIn: "Se Connecter"
signin: "se connecter"
signOut: "Se Deconnecter"
signUp: "S'enregistrer"
OR: "OU"
forgotPassword: "Vous avez oublié votre mot de passe ?"
emailAddress: "Adresse Email"
emailResetLink: "Adresse pour reinitialiser votre mot de passe"
dontHaveAnAccount: "Vous n'avez pas de compte ?"
resetYourPassword: "Reinitialiser votre mot de passe"
updateYourPassword: "Mettre à jour le mot de passe"
password: "Mot de passe"
usernameOrEmail: "Nom d'utilisateur ou email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Si vous avez déjà un compte"
signUpWithYourEmailAddress: "S'enregistrer avec votre adresse email"
username: "Nom d'utilisateur"
optional: "Optionnel"
signupCode: "Code d'inscription"
clickAgree: "En cliquant sur S'enregistrer, vous acceptez notre"
privacyPolicy: "Politique de confidentialité"
terms: "Conditions d'utilisation"
sign: "S'enregistrer"
configure: "Configurer"
with: "avec"
createAccount: "Créer un compte"
and: "et"
error:
minChar: "Votre mot de passe doit contenir au minimum 7 caractères."
pwOneLetter: "Votre mot de passe doit contenir au moins une lettre."
pwOneDigit: "Votre mot de passe doit contenir au moins un chiffre."
usernameRequired: "Un nom d'utilisateur est requis."
emailRequired: "Un email est requis."
signupCodeRequired: "Un code d'inscription est requis."
signupCodeIncorrect: "Le code d'enregistrement est incorrect."
signupEmailDuplicated: "Le courrier électronique d'inscription a déjà été pris."
signInRequired: "Vous devez être connecté pour continuer."
usernameIsEmail: "Le nom d'utilisateur ne peut être le même que l'adresse email."
T9n.map "fr", fr
| 46726 | fr =
signIn: "Se Connecter"
signin: "se connecter"
signOut: "Se Deconnecter"
signUp: "S'enregistrer"
OR: "OU"
forgotPassword: "Vous avez oublié votre mot de passe ?"
emailAddress: "Adresse Email"
emailResetLink: "Adresse pour reinitialiser votre mot de passe"
dontHaveAnAccount: "Vous n'avez pas de compte ?"
resetYourPassword: "Reinitialiser votre mot de passe"
updateYourPassword: "Mettre à jour le mot de passe"
password: "<PASSWORD>"
usernameOrEmail: "Nom d'utilisateur ou email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Si vous avez déjà un compte"
signUpWithYourEmailAddress: "S'enregistrer avec votre adresse email"
username: "Nom d'utilisateur"
optional: "Optionnel"
signupCode: "Code d'inscription"
clickAgree: "En cliquant sur S'enregistrer, vous acceptez notre"
privacyPolicy: "Politique de confidentialité"
terms: "Conditions d'utilisation"
sign: "S'enregistrer"
configure: "Configurer"
with: "avec"
createAccount: "Créer un compte"
and: "et"
error:
minChar: "Votre mot de passe doit contenir au minimum 7 caractères."
pwOneLetter: "Votre mot de passe doit contenir au moins une lettre."
pwOneDigit: "Votre mot de passe doit contenir au moins un chiffre."
usernameRequired: "Un nom d'utilisateur est requis."
emailRequired: "Un email est requis."
signupCodeRequired: "Un code d'inscription est requis."
signupCodeIncorrect: "Le code d'enregistrement est incorrect."
signupEmailDuplicated: "Le courrier électronique d'inscription a déjà été pris."
signInRequired: "Vous devez être connecté pour continuer."
usernameIsEmail: "Le nom d'utilisateur ne peut être le même que l'adresse email."
T9n.map "fr", fr
| true | fr =
signIn: "Se Connecter"
signin: "se connecter"
signOut: "Se Deconnecter"
signUp: "S'enregistrer"
OR: "OU"
forgotPassword: "Vous avez oublié votre mot de passe ?"
emailAddress: "Adresse Email"
emailResetLink: "Adresse pour reinitialiser votre mot de passe"
dontHaveAnAccount: "Vous n'avez pas de compte ?"
resetYourPassword: "Reinitialiser votre mot de passe"
updateYourPassword: "Mettre à jour le mot de passe"
password: "PI:PASSWORD:<PASSWORD>END_PI"
usernameOrEmail: "Nom d'utilisateur ou email"
email: "Email"
ifYouAlreadyHaveAnAccount: "Si vous avez déjà un compte"
signUpWithYourEmailAddress: "S'enregistrer avec votre adresse email"
username: "Nom d'utilisateur"
optional: "Optionnel"
signupCode: "Code d'inscription"
clickAgree: "En cliquant sur S'enregistrer, vous acceptez notre"
privacyPolicy: "Politique de confidentialité"
terms: "Conditions d'utilisation"
sign: "S'enregistrer"
configure: "Configurer"
with: "avec"
createAccount: "Créer un compte"
and: "et"
error:
minChar: "Votre mot de passe doit contenir au minimum 7 caractères."
pwOneLetter: "Votre mot de passe doit contenir au moins une lettre."
pwOneDigit: "Votre mot de passe doit contenir au moins un chiffre."
usernameRequired: "Un nom d'utilisateur est requis."
emailRequired: "Un email est requis."
signupCodeRequired: "Un code d'inscription est requis."
signupCodeIncorrect: "Le code d'enregistrement est incorrect."
signupEmailDuplicated: "Le courrier électronique d'inscription a déjà été pris."
signInRequired: "Vous devez être connecté pour continuer."
usernameIsEmail: "Le nom d'utilisateur ne peut être le même que l'adresse email."
T9n.map "fr", fr
|
[
{
"context": ".\"\n $scope.settings = {\n workspaceDirsKeys: ['gamesDir', 'localLib', 'modelRepository']\n workspaceDir",
"end": 276,
"score": 0.8095805644989014,
"start": 268,
"tag": "KEY",
"value": "gamesDir"
},
{
"context": "settings = {\n workspaceDirsKeys: ['gamesDir',... | tools/settings/controller.coffee | mess110/coffee-engine | 1 | app.controller 'SettingsController', ['$scope', ($scope) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'Settings'
$scope.ui.updateOutput = "Update means stash/pull/stash pop.\nUse if extremely lazy."
$scope.settings = {
workspaceDirsKeys: ['gamesDir', 'localLib', 'modelRepository']
workspaceDirs: []
}
for key in $scope.settings.workspaceDirsKeys
methodName = "load_#{key}"
$scope[methodName]= (event) ->
s = event.target.files[0].path
$scope.workspace[key] = s
Persist.setJson('workspace', $scope.workspace)
$scope.$apply()
$scope.settings.workspaceDirs.push {
key: key, method: $scope[methodName]
}
$scope.updateCE = ->
$scope.ui.updating = true
$scope.ui.updateOutput = "Update started. Please wait."
$scope.ui.updateError = ''
puts = (error, stdout, stderr) ->
$scope.ui.updateOutput = stdout
$scope.ui.updateError = stderr
$scope.ui.updating = false
$scope.$apply()
return
exec 'grunt shell:update', puts
]
| 81221 | app.controller 'SettingsController', ['$scope', ($scope) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'Settings'
$scope.ui.updateOutput = "Update means stash/pull/stash pop.\nUse if extremely lazy."
$scope.settings = {
workspaceDirsKeys: ['<KEY>', '<KEY> '<KEY>']
workspaceDirs: []
}
for key in $scope.settings.workspaceDirsKeys
methodName = "load_#{key}"
$scope[methodName]= (event) ->
s = event.target.files[0].path
$scope.workspace[key] = s
Persist.setJson('workspace', $scope.workspace)
$scope.$apply()
$scope.settings.workspaceDirs.push {
key: key, method: $scope[methodName]
}
$scope.updateCE = ->
$scope.ui.updating = true
$scope.ui.updateOutput = "Update started. Please wait."
$scope.ui.updateError = ''
puts = (error, stdout, stderr) ->
$scope.ui.updateOutput = stdout
$scope.ui.updateError = stderr
$scope.ui.updating = false
$scope.$apply()
return
exec 'grunt shell:update', puts
]
| true | app.controller 'SettingsController', ['$scope', ($scope) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'Settings'
$scope.ui.updateOutput = "Update means stash/pull/stash pop.\nUse if extremely lazy."
$scope.settings = {
workspaceDirsKeys: ['PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI']
workspaceDirs: []
}
for key in $scope.settings.workspaceDirsKeys
methodName = "load_#{key}"
$scope[methodName]= (event) ->
s = event.target.files[0].path
$scope.workspace[key] = s
Persist.setJson('workspace', $scope.workspace)
$scope.$apply()
$scope.settings.workspaceDirs.push {
key: key, method: $scope[methodName]
}
$scope.updateCE = ->
$scope.ui.updating = true
$scope.ui.updateOutput = "Update started. Please wait."
$scope.ui.updateError = ''
puts = (error, stdout, stderr) ->
$scope.ui.updateOutput = stdout
$scope.ui.updateError = stderr
$scope.ui.updating = false
$scope.$apply()
return
exec 'grunt shell:update', puts
]
|
[
{
"context": ":\n uuid: 'the-nowmen-uuid'\n token: 'the-nowmen-token'\n hostname: 'localhost'\n ",
"end": 952,
"score": 0.7087734341621399,
"start": 949,
"tag": "PASSWORD",
"value": "the"
},
{
"context": " uuid: 'the-nowmen-uuid'\n token: 'the-no... | test/worker-spec.coffee | octoblu/now-man-worker | 0 | Worker = require '../src/worker'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
MeshbluConfig = require 'meshblu-config'
shmock = require 'shmock'
mongojs = require 'mongojs'
{ ObjectId } = require 'mongojs'
enableDestroy = require 'server-destroy'
describe 'Worker', ->
beforeEach (done) ->
client = new Redis 'localhost', { dropBufferSupport: true }
client.on 'ready', =>
@client = new RedisNS 'test-nowmen-worker', client
@client.del 'work'
done()
beforeEach ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
database = mongojs 'the-nowmen-worker-test', ['soldiers']
@collection = database.collection 'soldiers'
queueName = 'work'
queueTimeout = 1
@consoleError = sinon.spy()
@sut = new Worker {
disableSendTimestamp: true
sendUnixTimestamp: true
meshbluConfig:
uuid: 'the-nowmen-uuid'
token: 'the-nowmen-token'
hostname: 'localhost'
port: 0xd00d
protocol: 'http'
@client,
database
queueName,
queueTimeout,
requestTimeout: 1000,
@consoleError,
}
beforeEach (done) ->
@collection.remove done
afterEach ->
@meshblu.destroy()
describe '->doAndDrain', ->
describe 'when a job queued', ->
describe 'when a transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when a uuid is passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when a uuid and recordId are passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid',@recordId}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when the requests times out', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.delay 1100
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message timeout', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 500', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 500
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 500', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 503', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 503
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 503', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the record has been sent in the past', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
lastSent: 'some-old-time'
totalSent: 3
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.equal 'some-old-time'
expect(record.metadata.totalSent).to.equal 4
done()
describe 'when no transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when it is a fireOnce record', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
fireOnce: true
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should delete the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record).to.not.exist
done()
describe 'when a deleted job queued', ->
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({recordId:new ObjectId(),timestamp:'who-cares'}), done
return # stupid promises
beforeEach (done) ->
@sut.doAndDrain (@error) => done()
it 'should not blow up', ->
expect(@error).to.not.exist
| 6577 | Worker = require '../src/worker'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
MeshbluConfig = require 'meshblu-config'
shmock = require 'shmock'
mongojs = require 'mongojs'
{ ObjectId } = require 'mongojs'
enableDestroy = require 'server-destroy'
describe 'Worker', ->
beforeEach (done) ->
client = new Redis 'localhost', { dropBufferSupport: true }
client.on 'ready', =>
@client = new RedisNS 'test-nowmen-worker', client
@client.del 'work'
done()
beforeEach ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
database = mongojs 'the-nowmen-worker-test', ['soldiers']
@collection = database.collection 'soldiers'
queueName = 'work'
queueTimeout = 1
@consoleError = sinon.spy()
@sut = new Worker {
disableSendTimestamp: true
sendUnixTimestamp: true
meshbluConfig:
uuid: 'the-nowmen-uuid'
token: '<PASSWORD> <KEY>-<PASSWORD>-<KEY>'
hostname: 'localhost'
port: 0xd00d
protocol: 'http'
@client,
database
queueName,
queueTimeout,
requestTimeout: 1000,
@consoleError,
}
beforeEach (done) ->
@collection.remove done
afterEach ->
@meshblu.destroy()
describe '->doAndDrain', ->
describe 'when a job queued', ->
describe 'when a transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: '<KEY>'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when a uuid is passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: '<PASSWORD>-<KEY>token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when a uuid and recordId are passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-<PASSWORD>-<KEY>'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: '<PASSWORD>-<KEY>'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid',@recordId}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when the requests times out', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: '<KEY>'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.delay 1100
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message timeout', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 500', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 500
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 500', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 503', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the<KEY>-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 503
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 503', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the record has been sent in the past', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
lastSent: 'some-old-time'
totalSent: 3
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.equal 'some-old-time'
expect(record.metadata.totalSent).to.equal 4
done()
describe 'when no transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: '<PASSWORD>-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when it is a fireOnce record', ->
beforeEach (done) ->
record =
metadata:
who: '<NAME>'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
fireOnce: true
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should delete the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record).to.not.exist
done()
describe 'when a deleted job queued', ->
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({recordId:new ObjectId(),timestamp:'who-cares'}), done
return # stupid promises
beforeEach (done) ->
@sut.doAndDrain (@error) => done()
it 'should not blow up', ->
expect(@error).to.not.exist
| true | Worker = require '../src/worker'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
MeshbluConfig = require 'meshblu-config'
shmock = require 'shmock'
mongojs = require 'mongojs'
{ ObjectId } = require 'mongojs'
enableDestroy = require 'server-destroy'
describe 'Worker', ->
beforeEach (done) ->
client = new Redis 'localhost', { dropBufferSupport: true }
client.on 'ready', =>
@client = new RedisNS 'test-nowmen-worker', client
@client.del 'work'
done()
beforeEach ->
@meshblu = shmock 0xd00d
enableDestroy @meshblu
database = mongojs 'the-nowmen-worker-test', ['soldiers']
@collection = database.collection 'soldiers'
queueName = 'work'
queueTimeout = 1
@consoleError = sinon.spy()
@sut = new Worker {
disableSendTimestamp: true
sendUnixTimestamp: true
meshbluConfig:
uuid: 'the-nowmen-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI-PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI'
hostname: 'localhost'
port: 0xd00d
protocol: 'http'
@client,
database
queueName,
queueTimeout,
requestTimeout: 1000,
@consoleError,
}
beforeEach (done) ->
@collection.remove done
afterEach ->
@meshblu.destroy()
describe '->doAndDrain', ->
describe 'when a job queued', ->
describe 'when a transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'PI:KEY:<KEY>END_PI'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when a uuid is passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PItoken'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when a uuid and recordId are passed', ->
beforeEach (done) ->
record =
uuid: 'some-uuid'
metadata:
who: 'cares'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'the-PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
done error
beforeEach (done) ->
record =
metadata:
who: 'not-this-one'
data:
nodeId: 'the-node-id'
transactionId: 'the-transaction-id'
uuid: 'the-interval-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@notThisId = record._id.toString()
done error
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({timestamp:'some-timestamp',uuid: 'some-uuid',@recordId}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
transactionId: 'the-transaction-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
it 'should not update this record', (done) ->
@collection.findOne { _id: new ObjectId(@notThisId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.exist
done()
describe 'when the requests times out', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'PI:PASSWORD:<KEY>END_PI'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.delay 1100
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message timeout', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 500', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 500
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 500', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the requests is a 503', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'thePI:PASSWORD:<KEY>END_PI-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 503
@sut.doAndDrain (error) =>
done error
it 'should call send message', ->
@sendMessage.done()
it 'should log the error', ->
expect(@consoleError).to.have.been.calledWith 'Send message 503', { sendTo: 'the-flow-uuid', nodeId: 'the-node-id' }
it 'should not update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.exist
expect(record.metadata.totalSent).to.not.equal 1
done()
describe 'when the record has been sent in the past', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
lastSent: 'some-old-time'
totalSent: 3
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.not.equal 'some-old-time'
expect(record.metadata.totalSent).to.equal 4
done()
describe 'when no transactionId is passed', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI-token'
sendTo: 'the-flow-uuid'
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should update the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record.metadata.lastSent).to.exist
expect(record.metadata.totalSent).to.equal 1
done()
describe 'when it is a fireOnce record', ->
beforeEach (done) ->
record =
metadata:
who: 'PI:NAME:<NAME>END_PI'
data:
nodeId: 'the-node-id'
uuid: 'the-interval-uuid'
token: 'the-interval-token'
sendTo: 'the-flow-uuid'
fireOnce: true
@collection.insert record, (error, record) =>
return done error if error?
@recordId = record._id.toString()
@client.lpush 'work', JSON.stringify({@recordId,timestamp:'some-timestamp'}), done
return # stupid promises
beforeEach (done) ->
intervalAuth = new Buffer('the-interval-uuid:the-interval-token').toString('base64')
@sendMessage = @meshblu
.post '/messages'
.set 'Authorization', "Basic #{intervalAuth}"
.send {
devices: ['the-flow-uuid']
payload:
from: 'the-node-id'
unixTimestamp: 'some-timestamp'
}
.reply 201
@sut.doAndDrain (error) =>
done error
it 'should send the message', ->
@sendMessage.done()
it 'should delete the record', (done) ->
@collection.findOne { _id: new ObjectId(@recordId) }, (error, record) =>
return done error if error?
expect(record).to.not.exist
done()
describe 'when a deleted job queued', ->
beforeEach (done) ->
@client.lpush 'work', JSON.stringify({recordId:new ObjectId(),timestamp:'who-cares'}), done
return # stupid promises
beforeEach (done) ->
@sut.doAndDrain (@error) => done()
it 'should not blow up', ->
expect(@error).to.not.exist
|
[
{
"context": " .addClass('show')\n .attr(\"data-team-name\",\"Daltons\")\n spyOn(team, 'subscribe').and.callThrough()\n",
"end": 232,
"score": 0.9907743334770203,
"start": 225,
"tag": "NAME",
"value": "Daltons"
},
{
"context": "\n expect(team.subscribe).toHaveBeenCalledWi... | spec/javascripts/contributors_spec.coffee | johan--/planning-poker | 30 | describe "Contributors", ->
team = App.Channels.Team
contributors = App.Contributors
vote = App.Vote
beforeEach ->
$('body')
.addClass('contributors')
.addClass('show')
.attr("data-team-name","Daltons")
spyOn(team, 'subscribe').and.callThrough()
it "subscribes to the team channel named from the body's data", ->
contributors.onLoad()
expect(team.subscribe).toHaveBeenCalledWith("Daltons")
it "doesn't subscribe to team channel outside contributors show view", ->
$('body')
.removeClass('contributors')
.removeClass('show')
.removeAttr("data-team-name")
contributors.onLoad()
expect(team.subscribe).not.toHaveBeenCalled() | 49140 | describe "Contributors", ->
team = App.Channels.Team
contributors = App.Contributors
vote = App.Vote
beforeEach ->
$('body')
.addClass('contributors')
.addClass('show')
.attr("data-team-name","<NAME>")
spyOn(team, 'subscribe').and.callThrough()
it "subscribes to the team channel named from the body's data", ->
contributors.onLoad()
expect(team.subscribe).toHaveBeenCalledWith("<NAME>")
it "doesn't subscribe to team channel outside contributors show view", ->
$('body')
.removeClass('contributors')
.removeClass('show')
.removeAttr("data-team-name")
contributors.onLoad()
expect(team.subscribe).not.toHaveBeenCalled() | true | describe "Contributors", ->
team = App.Channels.Team
contributors = App.Contributors
vote = App.Vote
beforeEach ->
$('body')
.addClass('contributors')
.addClass('show')
.attr("data-team-name","PI:NAME:<NAME>END_PI")
spyOn(team, 'subscribe').and.callThrough()
it "subscribes to the team channel named from the body's data", ->
contributors.onLoad()
expect(team.subscribe).toHaveBeenCalledWith("PI:NAME:<NAME>END_PI")
it "doesn't subscribe to team channel outside contributors show view", ->
$('body')
.removeClass('contributors')
.removeClass('show')
.removeAttr("data-team-name")
contributors.onLoad()
expect(team.subscribe).not.toHaveBeenCalled() |
[
{
"context": "Device', ->\n beforeEach (done) ->\n @redisKey = UUID.v1()\n database = mongojs 'unregister-device-test', ",
"end": 461,
"score": 0.912755012512207,
"start": 452,
"tag": "KEY",
"value": "UUID.v1()"
},
{
"context": ": 'thank-you-for-considering'\n token... | test/unregister-device-spec.coffee | octoblu/meshblu-core-task-unregister-device | 0 | _ = require 'lodash'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
Cache = require 'meshblu-core-cache'
redis = require 'fakeredis'
UnregisterDevice = require '../'
JobManager = require 'meshblu-core-job-manager'
UUID = require 'uuid'
RedisNS = require '@octoblu/redis-ns'
describe 'UnregisterDevice', ->
beforeEach (done) ->
@redisKey = UUID.v1()
database = mongojs 'unregister-device-test', ['devices']
@jobManager = new JobManager
client: new RedisNS 'job-manager', redis.createClient @redisKey
timeoutSeconds: 1
jobLogSampleRate: 1
@datastore = new Datastore
database: database
collection: 'devices'
database.devices.remove done
@cache = new Cache client: redis.createClient UUID.v1()
beforeEach ->
@uuidAliasResolver = resolve: (uuid, callback) => callback(null, uuid)
@sut = new UnregisterDevice {@datastore, @cache, @uuidAliasResolver, @jobManager}
describe '->do', ->
context 'when given a valid request', ->
beforeEach (done) ->
record =
uuid: 'should-be-removed-uuid'
something: 'else'
@datastore.insert record, done
beforeEach (done) ->
record =
uuid: 'should-not-be-removed-uuid'
@datastore.insert record, done
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
toUuid: 'should-be-removed-uuid'
@sut.do request, (error, @response) => done error
it 'should remove the device', (done) ->
@datastore.findOne {uuid: 'should-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device).to.not.exist
done()
it 'should not remove the other device', (done) ->
@datastore.findOne {uuid: 'should-not-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device.uuid).to.equal 'should-not-be-removed-uuid'
done()
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
describe 'JobManager gets DeliverUnregisterSent job', (done) ->
beforeEach (done) ->
@jobManager.getRequest ['request'], (error, @request) =>
done error
it 'should be a config messageType', ->
expect(@request).to.exist
message =
uuid:"should-be-removed-uuid"
something: 'else'
auth =
uuid: 'thank-you-for-considering'
token: 'the-environment'
{rawData, metadata} = @request
expect(metadata.auth).to.deep.equal uuid: 'should-be-removed-uuid'
expect(metadata.jobType).to.equal 'DeliverUnregisterSent'
expect(metadata.fromUuid).to.equal 'should-be-removed-uuid'
expect(JSON.parse rawData).to.containSubset message
context 'when given a valid invalid', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
@sut.do request, (error, @response) => done error
it 'should return a 422', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 422
status: 'Unprocessable Entity'
expect(@response).to.deep.equal expectedResponse
| 212940 | _ = require 'lodash'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
Cache = require 'meshblu-core-cache'
redis = require 'fakeredis'
UnregisterDevice = require '../'
JobManager = require 'meshblu-core-job-manager'
UUID = require 'uuid'
RedisNS = require '@octoblu/redis-ns'
describe 'UnregisterDevice', ->
beforeEach (done) ->
@redisKey = <KEY>
database = mongojs 'unregister-device-test', ['devices']
@jobManager = new JobManager
client: new RedisNS 'job-manager', redis.createClient @redisKey
timeoutSeconds: 1
jobLogSampleRate: 1
@datastore = new Datastore
database: database
collection: 'devices'
database.devices.remove done
@cache = new Cache client: redis.createClient UUID.v1()
beforeEach ->
@uuidAliasResolver = resolve: (uuid, callback) => callback(null, uuid)
@sut = new UnregisterDevice {@datastore, @cache, @uuidAliasResolver, @jobManager}
describe '->do', ->
context 'when given a valid request', ->
beforeEach (done) ->
record =
uuid: 'should-be-removed-uuid'
something: 'else'
@datastore.insert record, done
beforeEach (done) ->
record =
uuid: 'should-not-be-removed-uuid'
@datastore.insert record, done
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
toUuid: 'should-be-removed-uuid'
@sut.do request, (error, @response) => done error
it 'should remove the device', (done) ->
@datastore.findOne {uuid: 'should-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device).to.not.exist
done()
it 'should not remove the other device', (done) ->
@datastore.findOne {uuid: 'should-not-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device.uuid).to.equal 'should-not-be-removed-uuid'
done()
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
describe 'JobManager gets DeliverUnregisterSent job', (done) ->
beforeEach (done) ->
@jobManager.getRequest ['request'], (error, @request) =>
done error
it 'should be a config messageType', ->
expect(@request).to.exist
message =
uuid:"should-be-removed-uuid"
something: 'else'
auth =
uuid: 'thank-you-for-considering'
token: '<PASSWORD>'
{rawData, metadata} = @request
expect(metadata.auth).to.deep.equal uuid: 'should-be-removed-uuid'
expect(metadata.jobType).to.equal 'DeliverUnregisterSent'
expect(metadata.fromUuid).to.equal 'should-be-removed-uuid'
expect(JSON.parse rawData).to.containSubset message
context 'when given a valid invalid', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
@sut.do request, (error, @response) => done error
it 'should return a 422', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 422
status: 'Unprocessable Entity'
expect(@response).to.deep.equal expectedResponse
| true | _ = require 'lodash'
mongojs = require 'mongojs'
Datastore = require 'meshblu-core-datastore'
Cache = require 'meshblu-core-cache'
redis = require 'fakeredis'
UnregisterDevice = require '../'
JobManager = require 'meshblu-core-job-manager'
UUID = require 'uuid'
RedisNS = require '@octoblu/redis-ns'
describe 'UnregisterDevice', ->
beforeEach (done) ->
@redisKey = PI:KEY:<KEY>END_PI
database = mongojs 'unregister-device-test', ['devices']
@jobManager = new JobManager
client: new RedisNS 'job-manager', redis.createClient @redisKey
timeoutSeconds: 1
jobLogSampleRate: 1
@datastore = new Datastore
database: database
collection: 'devices'
database.devices.remove done
@cache = new Cache client: redis.createClient UUID.v1()
beforeEach ->
@uuidAliasResolver = resolve: (uuid, callback) => callback(null, uuid)
@sut = new UnregisterDevice {@datastore, @cache, @uuidAliasResolver, @jobManager}
describe '->do', ->
context 'when given a valid request', ->
beforeEach (done) ->
record =
uuid: 'should-be-removed-uuid'
something: 'else'
@datastore.insert record, done
beforeEach (done) ->
record =
uuid: 'should-not-be-removed-uuid'
@datastore.insert record, done
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
toUuid: 'should-be-removed-uuid'
@sut.do request, (error, @response) => done error
it 'should remove the device', (done) ->
@datastore.findOne {uuid: 'should-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device).to.not.exist
done()
it 'should not remove the other device', (done) ->
@datastore.findOne {uuid: 'should-not-be-removed-uuid'}, (error, device) =>
return done error if error?
expect(device.uuid).to.equal 'should-not-be-removed-uuid'
done()
it 'should return a 204', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 204
status: 'No Content'
describe 'JobManager gets DeliverUnregisterSent job', (done) ->
beforeEach (done) ->
@jobManager.getRequest ['request'], (error, @request) =>
done error
it 'should be a config messageType', ->
expect(@request).to.exist
message =
uuid:"should-be-removed-uuid"
something: 'else'
auth =
uuid: 'thank-you-for-considering'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
{rawData, metadata} = @request
expect(metadata.auth).to.deep.equal uuid: 'should-be-removed-uuid'
expect(metadata.jobType).to.equal 'DeliverUnregisterSent'
expect(metadata.fromUuid).to.equal 'should-be-removed-uuid'
expect(JSON.parse rawData).to.containSubset message
context 'when given a valid invalid', ->
beforeEach (done) ->
request =
metadata:
responseId: 'its-electric'
@sut.do request, (error, @response) => done error
it 'should return a 422', ->
expectedResponse =
metadata:
responseId: 'its-electric'
code: 422
status: 'Unprocessable Entity'
expect(@response).to.deep.equal expectedResponse
|
[
{
"context": "ndard alert dialog subclass\n#\n# Copyright (C) 2012 Nikolay Nemshilov\n#\nclass Dialog.Alert extends Dialog\n\n constructo",
"end": 79,
"score": 0.9998859167098999,
"start": 62,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | ui/dialog/src/alert.coffee | lovely-io/lovely.io-stl | 2 | #
# The standard alert dialog subclass
#
# Copyright (C) 2012 Nikolay Nemshilov
#
class Dialog.Alert extends Dialog
constructor: (options)->
options or= {}
options.title or= 'Alert'
options.icon or= 'warning-sign'
options.onlyOk = true unless 'onlyOk' in options
super(options)
@addClass 'lui-dialog-alert'
@on 'ok', 'hide' | 166344 | #
# The standard alert dialog subclass
#
# Copyright (C) 2012 <NAME>
#
class Dialog.Alert extends Dialog
constructor: (options)->
options or= {}
options.title or= 'Alert'
options.icon or= 'warning-sign'
options.onlyOk = true unless 'onlyOk' in options
super(options)
@addClass 'lui-dialog-alert'
@on 'ok', 'hide' | true | #
# The standard alert dialog subclass
#
# Copyright (C) 2012 PI:NAME:<NAME>END_PI
#
class Dialog.Alert extends Dialog
constructor: (options)->
options or= {}
options.title or= 'Alert'
options.icon or= 'warning-sign'
options.onlyOk = true unless 'onlyOk' in options
super(options)
@addClass 'lui-dialog-alert'
@on 'ok', 'hide' |
[
{
"context": " = {\n name: if body.name? then body.name else \"麻辣香锅\"\n price: if body.price? then body.price else 1",
"end": 393,
"score": 0.9997392296791077,
"start": 389,
"tag": "NAME",
"value": "麻辣香锅"
},
{
"context": "dy.end_time else \"12:00\"\n }\n\n\n# {\n# \"name\"... | bizs/foodBiz.coffee | leftjs/simple_uphall_api | 0 | jwt = require('jsonwebtoken')
db = require('./../libs/db')
config = require('./../config/config')
moment = require 'moment'
md5Util = require('./../utils/md5Util')
Utils = require './../utils/Utils'
commonBiz = require './commonBiz'
_ = require('underscore')
buildFoodWithBody = (body) ->
moment.locale('zh_cn')
return postData = {
name: if body.name? then body.name else "麻辣香锅"
price: if body.price? then body.price else 10
discount: if body.discount? then body.discount else 1
is_recommended: if (body.is_recommended? && typeof body.is_recommended is 'boolean' && body.is_recommended is true) then true else false
is_hot: if(body.is_hot? && typeof body.is_hot is "boolean" && body.is_hot is true) then true else false
is_breakfast: if(body.is_breakfast? && typeof body.is_breakfast && body.is_breakfast is true) then true else false
is_lunch: if(body.is_lunch? && typeof body.is_lunch && body.is_lunch is true) then true else false
is_dinner: if(body.is_dinner? && typeof body.is_dinner && body.is_dinner is true) then true else false
address: if body.address? then body.address else "食堂"
pic_url: if body.pic_url? then body.pic_url else ""
like: if body.like? then body.like else 0
description: if body.description? then body.description else "位于食堂的美食,永远有让人回味无穷的感觉"
orderCount: 0
start_time: if body.start_time? then body.start_time else "12:00"
end_time: if body.end_time? then body.end_time else "12:00"
}
# {
# "name": "麻辣香锅",
# "price": 10,
# "discount": 0.4,
# "is_recommended": true,
# "is_hot": true,
# "is_breakfast": true,
# "is_lunch": true,
# "is_dinner": true,
# "address": "食堂三楼",
# "start_time": "2012-12-12 12:33",
# "end_time": "2012-12-12 12:33"
# }
publishFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
db.foods.insert(postData,(err,food) ->
return next(err) if err
res.json(food)
)
updateFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId}, (err,food) ->
return next(err) if err
return next(commonBiz.customError(404, "所需更改的菜品不存在")) if not food
db.foods.update({_id: food._id}, {$set: postData},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400, "更新失败")) if numReplaced is 0
res.json({msg: "更新成功"})
)
)
getFood = (req,res,next) ->
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId},(err,food) ->
return next(err) if err
return next(commonBiz.customError(404, '该菜品没有找到')) if not food
res.json(food)
)
getFoods = (req,res,next) ->
db.foods.find({}, (err,list) ->
return next(err) if err
res.json(list)
)
deleteFood = (req,res,next) ->
db.foods.remove({_id: req.params["foodId"]},(err,numRemoved) ->
return next(err) if err
return next(commonBiz.customError(404, "该商品不存在")) if numRemoved is 0
res.json({msg: "删除成功"})
)
likeFood = (req,res,next) ->
db.foods.update({_id: req.params['foodId']},{$inc: {like: 1}},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400,"喜欢失败")) if numReplaced is 0
res.json({msg: "喜欢成功"})
)
module.exports = {
publishFood: publishFood
updateFood: updateFood
getFood: getFood
getFoods: getFoods
deleteFood: deleteFood
likeFood: likeFood
} | 186791 | jwt = require('jsonwebtoken')
db = require('./../libs/db')
config = require('./../config/config')
moment = require 'moment'
md5Util = require('./../utils/md5Util')
Utils = require './../utils/Utils'
commonBiz = require './commonBiz'
_ = require('underscore')
buildFoodWithBody = (body) ->
moment.locale('zh_cn')
return postData = {
name: if body.name? then body.name else "<NAME>"
price: if body.price? then body.price else 10
discount: if body.discount? then body.discount else 1
is_recommended: if (body.is_recommended? && typeof body.is_recommended is 'boolean' && body.is_recommended is true) then true else false
is_hot: if(body.is_hot? && typeof body.is_hot is "boolean" && body.is_hot is true) then true else false
is_breakfast: if(body.is_breakfast? && typeof body.is_breakfast && body.is_breakfast is true) then true else false
is_lunch: if(body.is_lunch? && typeof body.is_lunch && body.is_lunch is true) then true else false
is_dinner: if(body.is_dinner? && typeof body.is_dinner && body.is_dinner is true) then true else false
address: if body.address? then body.address else "食堂"
pic_url: if body.pic_url? then body.pic_url else ""
like: if body.like? then body.like else 0
description: if body.description? then body.description else "位于食堂的美食,永远有让人回味无穷的感觉"
orderCount: 0
start_time: if body.start_time? then body.start_time else "12:00"
end_time: if body.end_time? then body.end_time else "12:00"
}
# {
# "name": "<NAME>",
# "price": 10,
# "discount": 0.4,
# "is_recommended": true,
# "is_hot": true,
# "is_breakfast": true,
# "is_lunch": true,
# "is_dinner": true,
# "address": "食堂三楼",
# "start_time": "2012-12-12 12:33",
# "end_time": "2012-12-12 12:33"
# }
publishFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
db.foods.insert(postData,(err,food) ->
return next(err) if err
res.json(food)
)
updateFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId}, (err,food) ->
return next(err) if err
return next(commonBiz.customError(404, "所需更改的菜品不存在")) if not food
db.foods.update({_id: food._id}, {$set: postData},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400, "更新失败")) if numReplaced is 0
res.json({msg: "更新成功"})
)
)
getFood = (req,res,next) ->
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId},(err,food) ->
return next(err) if err
return next(commonBiz.customError(404, '该菜品没有找到')) if not food
res.json(food)
)
getFoods = (req,res,next) ->
db.foods.find({}, (err,list) ->
return next(err) if err
res.json(list)
)
deleteFood = (req,res,next) ->
db.foods.remove({_id: req.params["foodId"]},(err,numRemoved) ->
return next(err) if err
return next(commonBiz.customError(404, "该商品不存在")) if numRemoved is 0
res.json({msg: "删除成功"})
)
likeFood = (req,res,next) ->
db.foods.update({_id: req.params['foodId']},{$inc: {like: 1}},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400,"喜欢失败")) if numReplaced is 0
res.json({msg: "喜欢成功"})
)
module.exports = {
publishFood: publishFood
updateFood: updateFood
getFood: getFood
getFoods: getFoods
deleteFood: deleteFood
likeFood: likeFood
} | true | jwt = require('jsonwebtoken')
db = require('./../libs/db')
config = require('./../config/config')
moment = require 'moment'
md5Util = require('./../utils/md5Util')
Utils = require './../utils/Utils'
commonBiz = require './commonBiz'
_ = require('underscore')
buildFoodWithBody = (body) ->
moment.locale('zh_cn')
return postData = {
name: if body.name? then body.name else "PI:NAME:<NAME>END_PI"
price: if body.price? then body.price else 10
discount: if body.discount? then body.discount else 1
is_recommended: if (body.is_recommended? && typeof body.is_recommended is 'boolean' && body.is_recommended is true) then true else false
is_hot: if(body.is_hot? && typeof body.is_hot is "boolean" && body.is_hot is true) then true else false
is_breakfast: if(body.is_breakfast? && typeof body.is_breakfast && body.is_breakfast is true) then true else false
is_lunch: if(body.is_lunch? && typeof body.is_lunch && body.is_lunch is true) then true else false
is_dinner: if(body.is_dinner? && typeof body.is_dinner && body.is_dinner is true) then true else false
address: if body.address? then body.address else "食堂"
pic_url: if body.pic_url? then body.pic_url else ""
like: if body.like? then body.like else 0
description: if body.description? then body.description else "位于食堂的美食,永远有让人回味无穷的感觉"
orderCount: 0
start_time: if body.start_time? then body.start_time else "12:00"
end_time: if body.end_time? then body.end_time else "12:00"
}
# {
# "name": "PI:NAME:<NAME>END_PI",
# "price": 10,
# "discount": 0.4,
# "is_recommended": true,
# "is_hot": true,
# "is_breakfast": true,
# "is_lunch": true,
# "is_dinner": true,
# "address": "食堂三楼",
# "start_time": "2012-12-12 12:33",
# "end_time": "2012-12-12 12:33"
# }
publishFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
db.foods.insert(postData,(err,food) ->
return next(err) if err
res.json(food)
)
updateFood = (req,res,next) ->
body = req.body
postData = buildFoodWithBody(body)
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId}, (err,food) ->
return next(err) if err
return next(commonBiz.customError(404, "所需更改的菜品不存在")) if not food
db.foods.update({_id: food._id}, {$set: postData},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400, "更新失败")) if numReplaced is 0
res.json({msg: "更新成功"})
)
)
getFood = (req,res,next) ->
foodId = req.params["foodId"]
db.foods.findOne({_id: foodId},(err,food) ->
return next(err) if err
return next(commonBiz.customError(404, '该菜品没有找到')) if not food
res.json(food)
)
getFoods = (req,res,next) ->
db.foods.find({}, (err,list) ->
return next(err) if err
res.json(list)
)
deleteFood = (req,res,next) ->
db.foods.remove({_id: req.params["foodId"]},(err,numRemoved) ->
return next(err) if err
return next(commonBiz.customError(404, "该商品不存在")) if numRemoved is 0
res.json({msg: "删除成功"})
)
likeFood = (req,res,next) ->
db.foods.update({_id: req.params['foodId']},{$inc: {like: 1}},(err,numReplaced) ->
return next(err) if err
return next(commonBiz.customError(400,"喜欢失败")) if numReplaced is 0
res.json({msg: "喜欢成功"})
)
module.exports = {
publishFood: publishFood
updateFood: updateFood
getFood: getFood
getFoods: getFoods
deleteFood: deleteFood
likeFood: likeFood
} |
[
{
"context": "students = new studentCollection [ \n {name: 'John'}\n {name: 'Steve'}\n {name: 'Laura'}\n ",
"end": 513,
"score": 0.9998823404312134,
"start": 509,
"tag": "NAME",
"value": "John"
},
{
"context": "tCollection [ \n {name: 'John'}\n {name: 'Stev... | app/source/collections.coffee | alexserver/learning-backbone | 0 | # Collections testing
# At this point I'm doing sort of BDD describes, next step, do real BDD.
class studentModel extends Backbone.Model
defaults:
name: 'Unknown'
age: null
sex: ''
class studentCollection extends Backbone.Collection
url: '/students'
model: studentModel
collectionCreation = ->
# Creating a collection
students = new studentCollection
console.log 'Collection created ', students
collectionCreationWithModels = ->
students = new studentCollection [
{name: 'John'}
{name: 'Steve'}
{name: 'Laura'}
]
console.log 'Collection created with models ', students.toJSON()
collectionFetching = ->
# Creating a collection
students = new studentCollection
# Fetch existing students
students.fetch
success: (coll, data) ->
console.log 'Collection Student - fetched ', data
collectionUnderscore = ->
students = new studentCollection
# Fetch students
students.fetch()
students.on 'sync', (coll, data) ->
console.log 'Collection Student - Running an each method'
coll.each (model) ->
console.log 'Model Student: ', model.attributes
console.log 'Collection Student - Running a map method'
studentNames = coll.map (model) ->
model.get 'name'
console.log "Model Student, Name: #{name}" for name in studentNames
collectionAdd = ->
students = new studentCollection
students.on 'add', (model, coll) ->
console.log "Collection Student - Model Added: #{model.get('name')}, Coll Size: #{coll.length}"
students.add [
{name: 'Jim Carrey'}
{name: 'Jimmy Fallon'}
{name: 'Will Ferrell'}
]
collectionRemove = ->
students = new studentCollection
students.add [
{name: 'Jim Carrey'}
{name: 'Jimmy Fallon'}
{name: 'Will Ferrell'}
]
students.on 'remove', (model, coll) ->
console.log "Collection Student - Model Removed: #{model.get 'name'}, Coll Size: #{coll.length}"
jim = students.find (student) ->
(student.get 'name') == 'Jim Carrey'
console.log 'Collection - Jim was found! ', jim
students.remove [jim]
collectionReset = ->
students = new studentCollection
students.add [
{name: 'Jim'}
{name: 'Alonso'}
{name: 'Patricia'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.reset()
console.log 'Collection Student - Collection Reset', students.toJSON()
collectionUpdate = ->
students = new studentCollection
students.add [
{name: 'Jim'}
{name: 'Alonso'}
{name: 'Patricia'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.update [
{name: 'Jim'}
{name: 'Lawrence'}
{name: 'Sofia'}
]
console.log 'Collection Student - Model Updated', students.toJSON()
collectionPluck = ->
students = new studentCollection
students.fetch
success: (coll, data) ->
console.log 'Collection Student - Fetch successful: ', data
console.log "Collection Student - Model Name: #{name}" for name in coll.pluck 'name'
collectionWhere = ->
students = new studentCollection
students.add [
{name: 'Jim', sex: 'M', age: 34}
{name: 'Alonso', sex: 'M', age: 28}
{name: 'Patricia', sex: 'F', age: 24}
]
maleStudents = students.where {sex: 'M'}
console.log "Collection Student - Select only male students: #{s.get 'name'}, #{s.get 'age'}" for s in maleStudents
collectionCreation()
collectionCreationWithModels()
collectionFetching()
collectionUnderscore()
collectionAdd()
collectionRemove()
collectionReset()
collectionUpdate()
collectionPluck()
collectionWhere() | 7913 | # Collections testing
# At this point I'm doing sort of BDD describes, next step, do real BDD.
class studentModel extends Backbone.Model
defaults:
name: 'Unknown'
age: null
sex: ''
class studentCollection extends Backbone.Collection
url: '/students'
model: studentModel
collectionCreation = ->
# Creating a collection
students = new studentCollection
console.log 'Collection created ', students
collectionCreationWithModels = ->
students = new studentCollection [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
console.log 'Collection created with models ', students.toJSON()
collectionFetching = ->
# Creating a collection
students = new studentCollection
# Fetch existing students
students.fetch
success: (coll, data) ->
console.log 'Collection Student - fetched ', data
collectionUnderscore = ->
students = new studentCollection
# Fetch students
students.fetch()
students.on 'sync', (coll, data) ->
console.log 'Collection Student - Running an each method'
coll.each (model) ->
console.log 'Model Student: ', model.attributes
console.log 'Collection Student - Running a map method'
studentNames = coll.map (model) ->
model.get 'name'
console.log "Model Student, Name: #{name}" for name in studentNames
collectionAdd = ->
students = new studentCollection
students.on 'add', (model, coll) ->
console.log "Collection Student - Model Added: #{model.get('name')}, Coll Size: #{coll.length}"
students.add [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
collectionRemove = ->
students = new studentCollection
students.add [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
students.on 'remove', (model, coll) ->
console.log "Collection Student - Model Removed: #{model.get 'name'}, Coll Size: #{coll.length}"
jim = students.find (student) ->
(student.get 'name') == '<NAME>'
console.log 'Collection - Jim was found! ', jim
students.remove [jim]
collectionReset = ->
students = new studentCollection
students.add [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.reset()
console.log 'Collection Student - Collection Reset', students.toJSON()
collectionUpdate = ->
students = new studentCollection
students.add [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.update [
{name: '<NAME>'}
{name: '<NAME>'}
{name: '<NAME>'}
]
console.log 'Collection Student - Model Updated', students.toJSON()
collectionPluck = ->
students = new studentCollection
students.fetch
success: (coll, data) ->
console.log 'Collection Student - Fetch successful: ', data
console.log "Collection Student - Model Name: #{name}" for name in coll.pluck 'name'
collectionWhere = ->
students = new studentCollection
students.add [
{name: '<NAME>', sex: 'M', age: 34}
{name: '<NAME>', sex: 'M', age: 28}
{name: '<NAME>', sex: 'F', age: 24}
]
maleStudents = students.where {sex: 'M'}
console.log "Collection Student - Select only male students: #{s.get 'name'}, #{s.get 'age'}" for s in maleStudents
collectionCreation()
collectionCreationWithModels()
collectionFetching()
collectionUnderscore()
collectionAdd()
collectionRemove()
collectionReset()
collectionUpdate()
collectionPluck()
collectionWhere() | true | # Collections testing
# At this point I'm doing sort of BDD describes, next step, do real BDD.
class studentModel extends Backbone.Model
defaults:
name: 'Unknown'
age: null
sex: ''
class studentCollection extends Backbone.Collection
url: '/students'
model: studentModel
collectionCreation = ->
# Creating a collection
students = new studentCollection
console.log 'Collection created ', students
collectionCreationWithModels = ->
students = new studentCollection [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
console.log 'Collection created with models ', students.toJSON()
collectionFetching = ->
# Creating a collection
students = new studentCollection
# Fetch existing students
students.fetch
success: (coll, data) ->
console.log 'Collection Student - fetched ', data
collectionUnderscore = ->
students = new studentCollection
# Fetch students
students.fetch()
students.on 'sync', (coll, data) ->
console.log 'Collection Student - Running an each method'
coll.each (model) ->
console.log 'Model Student: ', model.attributes
console.log 'Collection Student - Running a map method'
studentNames = coll.map (model) ->
model.get 'name'
console.log "Model Student, Name: #{name}" for name in studentNames
collectionAdd = ->
students = new studentCollection
students.on 'add', (model, coll) ->
console.log "Collection Student - Model Added: #{model.get('name')}, Coll Size: #{coll.length}"
students.add [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
collectionRemove = ->
students = new studentCollection
students.add [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
students.on 'remove', (model, coll) ->
console.log "Collection Student - Model Removed: #{model.get 'name'}, Coll Size: #{coll.length}"
jim = students.find (student) ->
(student.get 'name') == 'PI:NAME:<NAME>END_PI'
console.log 'Collection - Jim was found! ', jim
students.remove [jim]
collectionReset = ->
students = new studentCollection
students.add [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.reset()
console.log 'Collection Student - Collection Reset', students.toJSON()
collectionUpdate = ->
students = new studentCollection
students.add [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
console.log 'Collection Student - Model Added', students.toJSON()
students.update [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
console.log 'Collection Student - Model Updated', students.toJSON()
collectionPluck = ->
students = new studentCollection
students.fetch
success: (coll, data) ->
console.log 'Collection Student - Fetch successful: ', data
console.log "Collection Student - Model Name: #{name}" for name in coll.pluck 'name'
collectionWhere = ->
students = new studentCollection
students.add [
{name: 'PI:NAME:<NAME>END_PI', sex: 'M', age: 34}
{name: 'PI:NAME:<NAME>END_PI', sex: 'M', age: 28}
{name: 'PI:NAME:<NAME>END_PI', sex: 'F', age: 24}
]
maleStudents = students.where {sex: 'M'}
console.log "Collection Student - Select only male students: #{s.get 'name'}, #{s.get 'age'}" for s in maleStudents
collectionCreation()
collectionCreationWithModels()
collectionFetching()
collectionUnderscore()
collectionAdd()
collectionRemove()
collectionReset()
collectionUpdate()
collectionPluck()
collectionWhere() |
[
{
"context": "##\n# ga\n# https://github.com/mickey/videojs-ga\n#\n# Copyright (c) 2013 Michael Bensous",
"end": 35,
"score": 0.9995578527450562,
"start": 29,
"tag": "USERNAME",
"value": "mickey"
},
{
"context": "ithub.com/mickey/videojs-ga\n#\n# Copyright (c) 2013 Michael Bensoussan... | src/videojs.ga.coffee | ChannelMeter/videojs-ga | 0 | ##
# ga
# https://github.com/mickey/videojs-ga
#
# Copyright (c) 2013 Michael Bensoussan
# Licensed under the MIT license.
##
videojs.plugin 'ga', (options) ->
# this loads options from the data-setup attribute of the video tag
dataSetupOptions = {}
if @options()["data-setup"]
parsedOptions = JSON.parse(@options()["data-setup"])
dataSetupOptions = parsedOptions.ga if parsedOptions.ga
defaultsEventsToTrack = [
'loaded', 'percentsPlayed', 'start', 'srcType'
'end', 'seek', 'play', 'pause', 'resize',
'volumeChange', 'error', 'fullscreen'
]
eventsToTrack = options.eventsToTrack || dataSetupOptions.eventsToTrack || defaultsEventsToTrack
percentsPlayedInterval = options.percentsPlayedInterval || dataSetupOptions.percentsPlayedInterval || 10
eventCategory = options.eventCategory || dataSetupOptions.eventCategory || 'Video'
# if you didn't specify a name, it will be 'guessed' from the video src after metadatas are loaded
eventLabel = options.eventLabel || dataSetupOptions.eventLabel
# determine if we are using ga.js or analytics.js
gaLibrary = options.gaLibrary || dataSetupOptions.gaLibrary || 'ga.js'
# init a few variables
percentsAlreadyTracked = []
seekStart = seekEnd = 0
seeking = false
loaded = ->
unless eventLabel
eventLabel = @currentSrc().split("/").slice(-1)[0].replace(/\.(\w{3,4})(\?.*)?$/i,'')
if "loadedmetadata" in eventsToTrack
sendbeacon( 'loadedmetadata', true )
if "srcType" in eventsToTrack
tmpSrcArray = @currentSrc().split(".")
sourceType = tmpSrcArray[tmpSrcArray.length - 1]
sendbeacon( 'source type - ' + "#{@techName}/#{sourceType}", true )
return
timeupdate = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
percentPlayed = Math.round(currentTime/duration*100)
for percent in [0..99] by percentsPlayedInterval
if percentPlayed >= percent && percent not in percentsAlreadyTracked
if "start" in eventsToTrack && percent == 0 && percentPlayed > 0
sendbeacon( 'start', true )
else if "percentsPlayed" in eventsToTrack && percentPlayed != 0
sendbeacon( 'percent played', true, percent )
if percentPlayed > 0
percentsAlreadyTracked.push(percent)
if "seek" in eventsToTrack
seekStart = seekEnd
seekEnd = currentTime
# if the difference between the start and the end are greater than 1 it's a seek.
if Math.abs(seekStart - seekEnd) > 1
seeking = true
sendbeacon( 'seek start', false, seekStart )
sendbeacon( 'seek end', false, seekEnd )
return
end = ->
sendbeacon( 'end', true )
return
play = ->
currentTime = Math.round(@currentTime())
if currentTime > 0 && !seeking
sendbeacon( 'play', true, currentTime )
seeking = true
return
pause = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
if currentTime != duration && !seeking
sendbeacon( 'pause', false, currentTime )
return
# value between 0 (muted) and 1
volumeChange = ->
volume = if @muted() == true then 0 else @volume()
sendbeacon( 'volume change', false, volume )
return
resize = ->
sendbeacon( 'resize - ' + @width() + "*" + @height(), true )
return
error = ->
currentTime = Math.round(@currentTime())
# XXX: Is there some informations about the error somewhere ?
sendbeacon( 'error', true, currentTime )
return
fullscreen = ->
currentTime = Math.round(@currentTime())
if @isFullScreen
sendbeacon( 'enter fullscreen', false, currentTime )
else
sendbeacon( 'exit fullscreen', false, currentTime )
return
sendbeacon = ( action, nonInteraction, value ) ->
try
if 'analytics.js' == gaLibrary
ga('send', 'event', {
'eventCategory' : eventCategory,
'eventAction' : action,
'eventLabel' : eventLabel,
'eventValue' : value,
'nonInteraction' : nonInteraction
});
else
_gaq.push(['_trackEvent', eventCategory, action, eventLabel, value, nonInteraction])
return
@on("loadedmetadata", loaded)
@on("timeupdate", timeupdate)
@on("ended", end) if "end" in eventsToTrack
@on("play", play) if "play" in eventsToTrack
@on("pause", pause) if "pause" in eventsToTrack
@on("volumechange", volumeChange) if "volumeChange" in eventsToTrack
@on("resize", resize) if "resize" in eventsToTrack
@on("error", error) if "error" in eventsToTrack
@on("fullscreenchange", fullscreen) if "fullscreen" in eventsToTrack
return
| 9126 | ##
# ga
# https://github.com/mickey/videojs-ga
#
# Copyright (c) 2013 <NAME>
# Licensed under the MIT license.
##
videojs.plugin 'ga', (options) ->
# this loads options from the data-setup attribute of the video tag
dataSetupOptions = {}
if @options()["data-setup"]
parsedOptions = JSON.parse(@options()["data-setup"])
dataSetupOptions = parsedOptions.ga if parsedOptions.ga
defaultsEventsToTrack = [
'loaded', 'percentsPlayed', 'start', 'srcType'
'end', 'seek', 'play', 'pause', 'resize',
'volumeChange', 'error', 'fullscreen'
]
eventsToTrack = options.eventsToTrack || dataSetupOptions.eventsToTrack || defaultsEventsToTrack
percentsPlayedInterval = options.percentsPlayedInterval || dataSetupOptions.percentsPlayedInterval || 10
eventCategory = options.eventCategory || dataSetupOptions.eventCategory || 'Video'
# if you didn't specify a name, it will be 'guessed' from the video src after metadatas are loaded
eventLabel = options.eventLabel || dataSetupOptions.eventLabel
# determine if we are using ga.js or analytics.js
gaLibrary = options.gaLibrary || dataSetupOptions.gaLibrary || 'ga.js'
# init a few variables
percentsAlreadyTracked = []
seekStart = seekEnd = 0
seeking = false
loaded = ->
unless eventLabel
eventLabel = @currentSrc().split("/").slice(-1)[0].replace(/\.(\w{3,4})(\?.*)?$/i,'')
if "loadedmetadata" in eventsToTrack
sendbeacon( 'loadedmetadata', true )
if "srcType" in eventsToTrack
tmpSrcArray = @currentSrc().split(".")
sourceType = tmpSrcArray[tmpSrcArray.length - 1]
sendbeacon( 'source type - ' + "#{@techName}/#{sourceType}", true )
return
timeupdate = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
percentPlayed = Math.round(currentTime/duration*100)
for percent in [0..99] by percentsPlayedInterval
if percentPlayed >= percent && percent not in percentsAlreadyTracked
if "start" in eventsToTrack && percent == 0 && percentPlayed > 0
sendbeacon( 'start', true )
else if "percentsPlayed" in eventsToTrack && percentPlayed != 0
sendbeacon( 'percent played', true, percent )
if percentPlayed > 0
percentsAlreadyTracked.push(percent)
if "seek" in eventsToTrack
seekStart = seekEnd
seekEnd = currentTime
# if the difference between the start and the end are greater than 1 it's a seek.
if Math.abs(seekStart - seekEnd) > 1
seeking = true
sendbeacon( 'seek start', false, seekStart )
sendbeacon( 'seek end', false, seekEnd )
return
end = ->
sendbeacon( 'end', true )
return
play = ->
currentTime = Math.round(@currentTime())
if currentTime > 0 && !seeking
sendbeacon( 'play', true, currentTime )
seeking = true
return
pause = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
if currentTime != duration && !seeking
sendbeacon( 'pause', false, currentTime )
return
# value between 0 (muted) and 1
volumeChange = ->
volume = if @muted() == true then 0 else @volume()
sendbeacon( 'volume change', false, volume )
return
resize = ->
sendbeacon( 'resize - ' + @width() + "*" + @height(), true )
return
error = ->
currentTime = Math.round(@currentTime())
# XXX: Is there some informations about the error somewhere ?
sendbeacon( 'error', true, currentTime )
return
fullscreen = ->
currentTime = Math.round(@currentTime())
if @isFullScreen
sendbeacon( 'enter fullscreen', false, currentTime )
else
sendbeacon( 'exit fullscreen', false, currentTime )
return
sendbeacon = ( action, nonInteraction, value ) ->
try
if 'analytics.js' == gaLibrary
ga('send', 'event', {
'eventCategory' : eventCategory,
'eventAction' : action,
'eventLabel' : eventLabel,
'eventValue' : value,
'nonInteraction' : nonInteraction
});
else
_gaq.push(['_trackEvent', eventCategory, action, eventLabel, value, nonInteraction])
return
@on("loadedmetadata", loaded)
@on("timeupdate", timeupdate)
@on("ended", end) if "end" in eventsToTrack
@on("play", play) if "play" in eventsToTrack
@on("pause", pause) if "pause" in eventsToTrack
@on("volumechange", volumeChange) if "volumeChange" in eventsToTrack
@on("resize", resize) if "resize" in eventsToTrack
@on("error", error) if "error" in eventsToTrack
@on("fullscreenchange", fullscreen) if "fullscreen" in eventsToTrack
return
| true | ##
# ga
# https://github.com/mickey/videojs-ga
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PI
# Licensed under the MIT license.
##
videojs.plugin 'ga', (options) ->
# this loads options from the data-setup attribute of the video tag
dataSetupOptions = {}
if @options()["data-setup"]
parsedOptions = JSON.parse(@options()["data-setup"])
dataSetupOptions = parsedOptions.ga if parsedOptions.ga
defaultsEventsToTrack = [
'loaded', 'percentsPlayed', 'start', 'srcType'
'end', 'seek', 'play', 'pause', 'resize',
'volumeChange', 'error', 'fullscreen'
]
eventsToTrack = options.eventsToTrack || dataSetupOptions.eventsToTrack || defaultsEventsToTrack
percentsPlayedInterval = options.percentsPlayedInterval || dataSetupOptions.percentsPlayedInterval || 10
eventCategory = options.eventCategory || dataSetupOptions.eventCategory || 'Video'
# if you didn't specify a name, it will be 'guessed' from the video src after metadatas are loaded
eventLabel = options.eventLabel || dataSetupOptions.eventLabel
# determine if we are using ga.js or analytics.js
gaLibrary = options.gaLibrary || dataSetupOptions.gaLibrary || 'ga.js'
# init a few variables
percentsAlreadyTracked = []
seekStart = seekEnd = 0
seeking = false
loaded = ->
unless eventLabel
eventLabel = @currentSrc().split("/").slice(-1)[0].replace(/\.(\w{3,4})(\?.*)?$/i,'')
if "loadedmetadata" in eventsToTrack
sendbeacon( 'loadedmetadata', true )
if "srcType" in eventsToTrack
tmpSrcArray = @currentSrc().split(".")
sourceType = tmpSrcArray[tmpSrcArray.length - 1]
sendbeacon( 'source type - ' + "#{@techName}/#{sourceType}", true )
return
timeupdate = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
percentPlayed = Math.round(currentTime/duration*100)
for percent in [0..99] by percentsPlayedInterval
if percentPlayed >= percent && percent not in percentsAlreadyTracked
if "start" in eventsToTrack && percent == 0 && percentPlayed > 0
sendbeacon( 'start', true )
else if "percentsPlayed" in eventsToTrack && percentPlayed != 0
sendbeacon( 'percent played', true, percent )
if percentPlayed > 0
percentsAlreadyTracked.push(percent)
if "seek" in eventsToTrack
seekStart = seekEnd
seekEnd = currentTime
# if the difference between the start and the end are greater than 1 it's a seek.
if Math.abs(seekStart - seekEnd) > 1
seeking = true
sendbeacon( 'seek start', false, seekStart )
sendbeacon( 'seek end', false, seekEnd )
return
end = ->
sendbeacon( 'end', true )
return
play = ->
currentTime = Math.round(@currentTime())
if currentTime > 0 && !seeking
sendbeacon( 'play', true, currentTime )
seeking = true
return
pause = ->
currentTime = Math.round(@currentTime())
duration = Math.round(@duration())
if currentTime != duration && !seeking
sendbeacon( 'pause', false, currentTime )
return
# value between 0 (muted) and 1
volumeChange = ->
volume = if @muted() == true then 0 else @volume()
sendbeacon( 'volume change', false, volume )
return
resize = ->
sendbeacon( 'resize - ' + @width() + "*" + @height(), true )
return
error = ->
currentTime = Math.round(@currentTime())
# XXX: Is there some informations about the error somewhere ?
sendbeacon( 'error', true, currentTime )
return
fullscreen = ->
currentTime = Math.round(@currentTime())
if @isFullScreen
sendbeacon( 'enter fullscreen', false, currentTime )
else
sendbeacon( 'exit fullscreen', false, currentTime )
return
sendbeacon = ( action, nonInteraction, value ) ->
try
if 'analytics.js' == gaLibrary
ga('send', 'event', {
'eventCategory' : eventCategory,
'eventAction' : action,
'eventLabel' : eventLabel,
'eventValue' : value,
'nonInteraction' : nonInteraction
});
else
_gaq.push(['_trackEvent', eventCategory, action, eventLabel, value, nonInteraction])
return
@on("loadedmetadata", loaded)
@on("timeupdate", timeupdate)
@on("ended", end) if "end" in eventsToTrack
@on("play", play) if "play" in eventsToTrack
@on("pause", pause) if "pause" in eventsToTrack
@on("volumechange", volumeChange) if "volumeChange" in eventsToTrack
@on("resize", resize) if "resize" in eventsToTrack
@on("error", error) if "error" in eventsToTrack
@on("fullscreenchange", fullscreen) if "fullscreen" in eventsToTrack
return
|
[
{
"context": "nt\n\t@id: \"thanksgivingLoginAchievement\"\n\t@title: \"HAPPY THANKSGIVING\"\n\t@description: \"WE'RE THANKFUL TODAY FOR OUR LOV",
"end": 296,
"score": 0.9356926679611206,
"start": 278,
"tag": "NAME",
"value": "HAPPY THANKSGIVING"
}
] | app/sdk/achievements/loginBasedAchievements/thanksgivingLoginAchievement.coffee | willroberts/duelyst | 5 | Achievement = require 'app/sdk/achievements/achievement'
moment = require 'moment'
GiftCrateLookup = require 'app/sdk/giftCrates/giftCrateLookup'
i18next = require('i18next')
class ThanksgivingLoginAchievement extends Achievement
@id: "thanksgivingLoginAchievement"
@title: "HAPPY THANKSGIVING"
@description: "WE'RE THANKFUL TODAY FOR OUR LOVING FANS, SO WE'RE GIVING BACK WITH A SPECIAL GIFT"
@progressRequired: 1
@rewards:
giftChests: [GiftCrateLookup.ThanksgivingLogin]
@enabled: true
@progressForLoggingIn: (currentLoginMoment) ->
if currentLoginMoment != null && currentLoginMoment.isAfter(moment.utc("2018-11-16T11:00-08:00")) and currentLoginMoment.isBefore(moment.utc("2018-11-23T11:00-08:00"))
return 1
else
return 0
@getLoginAchievementStartsMoment: () ->
return moment.utc("2018-11-16T11:00-08:00")
module.exports = ThanksgivingLoginAchievement
| 52782 | Achievement = require 'app/sdk/achievements/achievement'
moment = require 'moment'
GiftCrateLookup = require 'app/sdk/giftCrates/giftCrateLookup'
i18next = require('i18next')
class ThanksgivingLoginAchievement extends Achievement
@id: "thanksgivingLoginAchievement"
@title: "<NAME>"
@description: "WE'RE THANKFUL TODAY FOR OUR LOVING FANS, SO WE'RE GIVING BACK WITH A SPECIAL GIFT"
@progressRequired: 1
@rewards:
giftChests: [GiftCrateLookup.ThanksgivingLogin]
@enabled: true
@progressForLoggingIn: (currentLoginMoment) ->
if currentLoginMoment != null && currentLoginMoment.isAfter(moment.utc("2018-11-16T11:00-08:00")) and currentLoginMoment.isBefore(moment.utc("2018-11-23T11:00-08:00"))
return 1
else
return 0
@getLoginAchievementStartsMoment: () ->
return moment.utc("2018-11-16T11:00-08:00")
module.exports = ThanksgivingLoginAchievement
| true | Achievement = require 'app/sdk/achievements/achievement'
moment = require 'moment'
GiftCrateLookup = require 'app/sdk/giftCrates/giftCrateLookup'
i18next = require('i18next')
class ThanksgivingLoginAchievement extends Achievement
@id: "thanksgivingLoginAchievement"
@title: "PI:NAME:<NAME>END_PI"
@description: "WE'RE THANKFUL TODAY FOR OUR LOVING FANS, SO WE'RE GIVING BACK WITH A SPECIAL GIFT"
@progressRequired: 1
@rewards:
giftChests: [GiftCrateLookup.ThanksgivingLogin]
@enabled: true
@progressForLoggingIn: (currentLoginMoment) ->
if currentLoginMoment != null && currentLoginMoment.isAfter(moment.utc("2018-11-16T11:00-08:00")) and currentLoginMoment.isBefore(moment.utc("2018-11-23T11:00-08:00"))
return 1
else
return 0
@getLoginAchievementStartsMoment: () ->
return moment.utc("2018-11-16T11:00-08:00")
module.exports = ThanksgivingLoginAchievement
|
[
{
"context": " realName: my.username,\n password: my.oauth_token,\n channels: _.union(my.channel, db.get '",
"end": 330,
"score": 0.9991192817687988,
"start": 316,
"tag": "PASSWORD",
"value": "my.oauth_token"
}
] | inc/chat.coffee | glacials/housebot | 4 | _ = require 'underscore'
irc = require 'irc'
db = require './db'
my = require './my'
module.exports =
connect_to: (chat_server) ->
process.setMaxListeners 0
this.client = new irc.Client chat_server, my.username, {
userName: my.username,
realName: my.username,
password: my.oauth_token,
channels: _.union(my.channel, db.get 'channels').map((channel) -> '#'+channel)
debug: true,
showErrors: true
}
disconnect: ->
this.client.disconnect
join: (channel) ->
this.client.join '#'+channel
db.union_with 'channels', channel
leave: (channel) ->
this.client.part '#'+channel
db.remove_from 'channels', channel
say_in: (channel, message) ->
this.client.say '#'+channel, message
in: (channel) ->
_.contains(_.union(my.channel, db.get 'channels'), channel)
# callback should take args (channel, user, match)
on: (regex, callback) ->
callback_wrapper = (user, channel, text) ->
if regex.test text
callback channel.slice(1), user, text.match regex
this.client.addListener 'message#', callback_wrapper
| 146365 | _ = require 'underscore'
irc = require 'irc'
db = require './db'
my = require './my'
module.exports =
connect_to: (chat_server) ->
process.setMaxListeners 0
this.client = new irc.Client chat_server, my.username, {
userName: my.username,
realName: my.username,
password: <PASSWORD>,
channels: _.union(my.channel, db.get 'channels').map((channel) -> '#'+channel)
debug: true,
showErrors: true
}
disconnect: ->
this.client.disconnect
join: (channel) ->
this.client.join '#'+channel
db.union_with 'channels', channel
leave: (channel) ->
this.client.part '#'+channel
db.remove_from 'channels', channel
say_in: (channel, message) ->
this.client.say '#'+channel, message
in: (channel) ->
_.contains(_.union(my.channel, db.get 'channels'), channel)
# callback should take args (channel, user, match)
on: (regex, callback) ->
callback_wrapper = (user, channel, text) ->
if regex.test text
callback channel.slice(1), user, text.match regex
this.client.addListener 'message#', callback_wrapper
| true | _ = require 'underscore'
irc = require 'irc'
db = require './db'
my = require './my'
module.exports =
connect_to: (chat_server) ->
process.setMaxListeners 0
this.client = new irc.Client chat_server, my.username, {
userName: my.username,
realName: my.username,
password: PI:PASSWORD:<PASSWORD>END_PI,
channels: _.union(my.channel, db.get 'channels').map((channel) -> '#'+channel)
debug: true,
showErrors: true
}
disconnect: ->
this.client.disconnect
join: (channel) ->
this.client.join '#'+channel
db.union_with 'channels', channel
leave: (channel) ->
this.client.part '#'+channel
db.remove_from 'channels', channel
say_in: (channel, message) ->
this.client.say '#'+channel, message
in: (channel) ->
_.contains(_.union(my.channel, db.get 'channels'), channel)
# callback should take args (channel, user, match)
on: (regex, callback) ->
callback_wrapper = (user, channel, text) ->
if regex.test text
callback channel.slice(1), user, text.match regex
this.client.addListener 'message#', callback_wrapper
|
[
{
"context": "r in [\"visibleContent\", \"hiddenContent\"]\r\n\t\t\tkey = \"_#{container}\"\r\n\t\t\tif @[key]?.length\r\n\t\t\t\t@_render(",
"end": 1268,
"score": 0.5412507653236389,
"start": 1266,
"tag": "KEY",
"value": "\"_"
}
] | src/widget/base/reveal.coffee | homeant/cola-ui | 90 | ###
Reveal 组件
###
class cola.Reveal extends cola.Widget
@tagName: "c-reveal"
@className: "ui reveal"
@attributes:
type:
refreshDom: true
defaultValue: "fade"
enum: ["fade", "move", "rotate"]
setter: (value)->
oldValue = @["_type"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_type"] = value
return
direction:
refreshDom: true
enum: ["left", "right", "up", "down"]
defaultValue: "left"
setter: (value)->
oldValue = @["_direction"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_direction"] = value
return
active:
type: "boolean"
refreshDom: true
defaultValue: false
instant:
type: "boolean"
refreshDom: true
defaultValue: false
disabled:
type: "boolean"
refreshDom: true
defaultValue: false
visibleContent:
refreshDom: true
setter: (value)->
@_setContent(value, "visibleContent")
return @
hiddenContent:
refreshDom: true
setter: (value)->
@_setContent(value, "hiddenContent")
return @
_initDom: (dom)->
super(dom)
for container in ["visibleContent", "hiddenContent"]
key = "_#{container}"
if @[key]?.length
@_render(el, container) for el in @[key]
return
_parseDom: (dom)->
return unless dom
@_doms ?= {}
child = dom.firstElementChild
while child
if child.nodeType == 1
widget = cola.widget(child)
if widget
widget$Dom = widget.get$Dom()
@_visibleContent = widget if widget$Dom.has("visible content")
@_hiddenContent = widget if widget$Dom.has("hidden content")
else
$child = $(child)
@_doms.visibleContent = widget if $child.has("visible content")
@_doms.hiddenContent = widget if $child.has("hidden content")
child = child.nextElementSibling
_clearContent: (target)->
old = @["_#{target}"]
if old
for el in old
el.destroy() if el instanceof cola.widget
@["_#{target}"] = []
@_doms ?= {}
$fly(@_doms[target]).empty() if @_doms[target]
return
_setContent: (value, target)->
@_clearContent(target)
if value instanceof Array
for el in value
result = cola.xRender(el, @_scope)
@_addContentElement(result, target) if result
else
result = cola.xRender(value, @_scope)
@_addContentElement(result, target) if result
return
_makeContentDom: (target)->
@_doms ?= {}
if not @_doms[target]
@_doms[target] = document.createElement("div")
@_doms[target].className = "#{if target is "visibleContent" then "visible" else "hidden"} content"
@_dom.appendChild(@_doms[target])
return @_doms[target]
_addContentElement: (element, target)->
name = "_#{target}"
@[name] ?= []
targetList = @[name]
targetList.push(element)
@_render(element, target) if element and @_dom
return
_render: (node, target)->
@_doms ?= {}
@_makeContentDom(target) unless @_doms[target]
dom = node
if node instanceof cola.Widget
dom = node.getDom()
@_doms[target].appendChild(dom) if dom.parentNode isnt @_doms[target]
return
_doRefreshDom: ()->
return unless @_dom
super()
classNamePool = @_classNamePool
["active", "instant", "disabled"].forEach((property)=>
value = @get(property)
classNamePool.toggle(property, !!value)
)
type = @get("type")
classNamePool.add(type) if type
direction = @get("direction")
classNamePool.add(direction) if direction
return
_getContentContainer: (target)->
return unless @_dom
@_makeContentDom(target) unless @_doms[target]
return @_doms[target]
getVisibleContentContainer: ()-> return @_getContentContainer("visible")
getHiddenContentContainer: ()-> return @_getContentContainer("hidden")
cola.registerWidget(cola.Reveal) | 135814 | ###
Reveal 组件
###
class cola.Reveal extends cola.Widget
@tagName: "c-reveal"
@className: "ui reveal"
@attributes:
type:
refreshDom: true
defaultValue: "fade"
enum: ["fade", "move", "rotate"]
setter: (value)->
oldValue = @["_type"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_type"] = value
return
direction:
refreshDom: true
enum: ["left", "right", "up", "down"]
defaultValue: "left"
setter: (value)->
oldValue = @["_direction"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_direction"] = value
return
active:
type: "boolean"
refreshDom: true
defaultValue: false
instant:
type: "boolean"
refreshDom: true
defaultValue: false
disabled:
type: "boolean"
refreshDom: true
defaultValue: false
visibleContent:
refreshDom: true
setter: (value)->
@_setContent(value, "visibleContent")
return @
hiddenContent:
refreshDom: true
setter: (value)->
@_setContent(value, "hiddenContent")
return @
_initDom: (dom)->
super(dom)
for container in ["visibleContent", "hiddenContent"]
key = <KEY>#{container}"
if @[key]?.length
@_render(el, container) for el in @[key]
return
_parseDom: (dom)->
return unless dom
@_doms ?= {}
child = dom.firstElementChild
while child
if child.nodeType == 1
widget = cola.widget(child)
if widget
widget$Dom = widget.get$Dom()
@_visibleContent = widget if widget$Dom.has("visible content")
@_hiddenContent = widget if widget$Dom.has("hidden content")
else
$child = $(child)
@_doms.visibleContent = widget if $child.has("visible content")
@_doms.hiddenContent = widget if $child.has("hidden content")
child = child.nextElementSibling
_clearContent: (target)->
old = @["_#{target}"]
if old
for el in old
el.destroy() if el instanceof cola.widget
@["_#{target}"] = []
@_doms ?= {}
$fly(@_doms[target]).empty() if @_doms[target]
return
_setContent: (value, target)->
@_clearContent(target)
if value instanceof Array
for el in value
result = cola.xRender(el, @_scope)
@_addContentElement(result, target) if result
else
result = cola.xRender(value, @_scope)
@_addContentElement(result, target) if result
return
_makeContentDom: (target)->
@_doms ?= {}
if not @_doms[target]
@_doms[target] = document.createElement("div")
@_doms[target].className = "#{if target is "visibleContent" then "visible" else "hidden"} content"
@_dom.appendChild(@_doms[target])
return @_doms[target]
_addContentElement: (element, target)->
name = "_#{target}"
@[name] ?= []
targetList = @[name]
targetList.push(element)
@_render(element, target) if element and @_dom
return
_render: (node, target)->
@_doms ?= {}
@_makeContentDom(target) unless @_doms[target]
dom = node
if node instanceof cola.Widget
dom = node.getDom()
@_doms[target].appendChild(dom) if dom.parentNode isnt @_doms[target]
return
_doRefreshDom: ()->
return unless @_dom
super()
classNamePool = @_classNamePool
["active", "instant", "disabled"].forEach((property)=>
value = @get(property)
classNamePool.toggle(property, !!value)
)
type = @get("type")
classNamePool.add(type) if type
direction = @get("direction")
classNamePool.add(direction) if direction
return
_getContentContainer: (target)->
return unless @_dom
@_makeContentDom(target) unless @_doms[target]
return @_doms[target]
getVisibleContentContainer: ()-> return @_getContentContainer("visible")
getHiddenContentContainer: ()-> return @_getContentContainer("hidden")
cola.registerWidget(cola.Reveal) | true | ###
Reveal 组件
###
class cola.Reveal extends cola.Widget
@tagName: "c-reveal"
@className: "ui reveal"
@attributes:
type:
refreshDom: true
defaultValue: "fade"
enum: ["fade", "move", "rotate"]
setter: (value)->
oldValue = @["_type"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_type"] = value
return
direction:
refreshDom: true
enum: ["left", "right", "up", "down"]
defaultValue: "left"
setter: (value)->
oldValue = @["_direction"]
if oldValue and @_dom and oldValue isnt value
@get$Dom().removeClass(oldValue)
@["_direction"] = value
return
active:
type: "boolean"
refreshDom: true
defaultValue: false
instant:
type: "boolean"
refreshDom: true
defaultValue: false
disabled:
type: "boolean"
refreshDom: true
defaultValue: false
visibleContent:
refreshDom: true
setter: (value)->
@_setContent(value, "visibleContent")
return @
hiddenContent:
refreshDom: true
setter: (value)->
@_setContent(value, "hiddenContent")
return @
_initDom: (dom)->
super(dom)
for container in ["visibleContent", "hiddenContent"]
key = PI:KEY:<KEY>END_PI#{container}"
if @[key]?.length
@_render(el, container) for el in @[key]
return
_parseDom: (dom)->
return unless dom
@_doms ?= {}
child = dom.firstElementChild
while child
if child.nodeType == 1
widget = cola.widget(child)
if widget
widget$Dom = widget.get$Dom()
@_visibleContent = widget if widget$Dom.has("visible content")
@_hiddenContent = widget if widget$Dom.has("hidden content")
else
$child = $(child)
@_doms.visibleContent = widget if $child.has("visible content")
@_doms.hiddenContent = widget if $child.has("hidden content")
child = child.nextElementSibling
_clearContent: (target)->
old = @["_#{target}"]
if old
for el in old
el.destroy() if el instanceof cola.widget
@["_#{target}"] = []
@_doms ?= {}
$fly(@_doms[target]).empty() if @_doms[target]
return
_setContent: (value, target)->
@_clearContent(target)
if value instanceof Array
for el in value
result = cola.xRender(el, @_scope)
@_addContentElement(result, target) if result
else
result = cola.xRender(value, @_scope)
@_addContentElement(result, target) if result
return
_makeContentDom: (target)->
@_doms ?= {}
if not @_doms[target]
@_doms[target] = document.createElement("div")
@_doms[target].className = "#{if target is "visibleContent" then "visible" else "hidden"} content"
@_dom.appendChild(@_doms[target])
return @_doms[target]
_addContentElement: (element, target)->
name = "_#{target}"
@[name] ?= []
targetList = @[name]
targetList.push(element)
@_render(element, target) if element and @_dom
return
_render: (node, target)->
@_doms ?= {}
@_makeContentDom(target) unless @_doms[target]
dom = node
if node instanceof cola.Widget
dom = node.getDom()
@_doms[target].appendChild(dom) if dom.parentNode isnt @_doms[target]
return
_doRefreshDom: ()->
return unless @_dom
super()
classNamePool = @_classNamePool
["active", "instant", "disabled"].forEach((property)=>
value = @get(property)
classNamePool.toggle(property, !!value)
)
type = @get("type")
classNamePool.add(type) if type
direction = @get("direction")
classNamePool.add(direction) if direction
return
_getContentContainer: (target)->
return unless @_dom
@_makeContentDom(target) unless @_doms[target]
return @_doms[target]
getVisibleContentContainer: ()-> return @_getContentContainer("visible")
getHiddenContentContainer: ()-> return @_getContentContainer("hidden")
cola.registerWidget(cola.Reveal) |
[
{
"context": "############\n##\n## Copyright 2018 M. Hoppe & N. Justus\n##\n## Licensed under the Apache Licen",
"end": 42,
"score": 0.9998503923416138,
"start": 34,
"tag": "NAME",
"value": "M. Hoppe"
},
{
"context": "############\n##\n## Copyright 2018 M. Hoppe & N. Justus\n##\n## L... | app/assets/javascripts/hideable.coffee | LiScI-Lab/Guardian-of-Times | 3 | ############
##
## Copyright 2018 M. Hoppe & N. Justus
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
window.timetracker || (window.timetracker = {})
timetracker.hideable = {}
timetracker.hideable.init = () ->
console.log('hideable init')
timetracker.hideable.init_element($('.has-hideable-content'))
$(document).on 'cocoon:after-insert', (ev, elem) ->
timetracker.hideable.init_element(elem)
return
timetracker.hideable.init_element = (elem) ->
$('.hide-selector select', elem).on 'change', (ev) ->
timetracker.hideable.disable_block ev.target
return
return
timetracker.hideable.disable_block = (elem) ->
elem = $(elem)
hideable_parent = elem.parents('.has-hideable-content').first()
val = elem.val()
$('.hideable', hideable_parent).hide()
$(".hideable.#{val}", hideable_parent).show()
return | 119035 | ############
##
## Copyright 2018 <NAME> & <NAME>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
window.timetracker || (window.timetracker = {})
timetracker.hideable = {}
timetracker.hideable.init = () ->
console.log('hideable init')
timetracker.hideable.init_element($('.has-hideable-content'))
$(document).on 'cocoon:after-insert', (ev, elem) ->
timetracker.hideable.init_element(elem)
return
timetracker.hideable.init_element = (elem) ->
$('.hide-selector select', elem).on 'change', (ev) ->
timetracker.hideable.disable_block ev.target
return
return
timetracker.hideable.disable_block = (elem) ->
elem = $(elem)
hideable_parent = elem.parents('.has-hideable-content').first()
val = elem.val()
$('.hideable', hideable_parent).hide()
$(".hideable.#{val}", hideable_parent).show()
return | true | ############
##
## Copyright 2018 PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
window.timetracker || (window.timetracker = {})
timetracker.hideable = {}
timetracker.hideable.init = () ->
console.log('hideable init')
timetracker.hideable.init_element($('.has-hideable-content'))
$(document).on 'cocoon:after-insert', (ev, elem) ->
timetracker.hideable.init_element(elem)
return
timetracker.hideable.init_element = (elem) ->
$('.hide-selector select', elem).on 'change', (ev) ->
timetracker.hideable.disable_block ev.target
return
return
timetracker.hideable.disable_block = (elem) ->
elem = $(elem)
hideable_parent = elem.parents('.has-hideable-content').first()
val = elem.val()
$('.hideable', hideable_parent).hide()
$(".hideable.#{val}", hideable_parent).show()
return |
[
{
"context": "->\n for i in [1..count]\n key: separator+'key'+Math.random()\n value: Math.random()\ntestOps = (ops, option",
"end": 1168,
"score": 0.8405200839042664,
"start": 1157,
"tag": "KEY",
"value": "Math.random"
},
{
"context": "10)->\n data = for i in [1..count]\n ... | test/subkey-test.coffee | marwahaha/node-nosql-subkey | 0 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
SubkeyNoSQL = require '../src/nosql-subkey'
#AbstractNoSQL = require 'abstract-nosql'
Errors = require 'abstract-object/Error'
Codec = require 'buffer-codec'
EncodingIterator= require 'encoding-iterator'
inherits = require 'inherits-ex/lib/inherits'
isInheritedFrom = require 'inherits-ex/lib/isInheritedFrom'
isObject = require 'util-ex/lib/is/type/object'
FakeDB = require './fake-nosql'
codec = require '../src/codec'
path = require '../src/path'
setImmediate = setImmediate || process.nextTick
InvalidArgumentError = Errors.InvalidArgumentError
PATH_SEP = codec.PATH_SEP
SUBKEY_SEP = codec.SUBKEY_SEP
_encodeKey = codec._encodeKey
encodeKey = codec.encodeKey
toPath = path.join
chai.use(sinonChai)
FakeDB = SubkeyNoSQL(FakeDB)
genOps = (separator='', count=10)->
for i in [1..count]
key: separator+'key'+Math.random()
value: Math.random()
testOps = (ops, options) ->
#console.log 'data', @db.data
for op in ops
encodedKey = getEncodedKey @db, op.key, options, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, op.value
genData = (db, path = "op", opts, count = 10)->
data = for i in [1..count]
key: myKey: Math.random()
value: Math.random()
path: path
db.batch data, opts
vParentPath = opts.path if opts
_opts = {}
for item in data
key = getEncodedKey db, item.key, item, vParentPath
_opts.valueEncoding = item.valueEncoding
_opts.valueEncoding = opts.valueEncoding if opts and not _opts.valueEncoding
valueEncoding = db.valueEncoding _opts
value = if valueEncoding then valueEncoding.encode(item.value) else item.value
db.data.should.have.property key, value
data
getEncodedKey = (db, key, options, parentPath) ->
options = {} unless options
encodeKey db.getPathArray(options, parentPath), key, db.keyEncoding(options), options
getEncodedValue = (db, value, options) ->
encoding = db.valueEncoding options
value = encoding.encode value if encoding
value
getEncodedOps = (db, ops, opts) ->
vParentPath = opts.path if opts
ops.slice().map (op) ->
key: getEncodedKey db, op.key, op, vParentPath
value: if op.value then JSON.stringify op.value else op.value
type: op.type
path: path = db.getPathArray(op, vParentPath)
_keyPath: [path, op.key]
describe "Subkey", ->
before ->
@db = new FakeDB()
@db.open({keyEncoding:'json', valueEncoding: 'json'})
@root = @db.root()
after ->
@db.close()
testPath = (subkey, expectedPath) ->
subkey.fullName.should.be.equal expectedPath
subkey.path().should.be.equal expectedPath
it "should get root subkey from db", ->
@root.fullName.should.be.equal PATH_SEP
@root.pathAsArray().should.be.deep.equal []
result = @db.cache.get @root.fullName
result.should.be.equal @root
result.should.be.equal @db.root()
it "should not enumerable buildin properties", ->
keys = Object.keys @root
keys.should.have.length 0
describe ".parent()", ->
it "should be null for root's parent", ->
should.not.exist @root.parent()
it "should get subkey's parent", ->
subkey = @root.subkey('test')
child = subkey.path('child')
testPath subkey, '/test'
testPath child, '/test/child'
@db.cache.isExists('/test').should.be.true
@db.cache.isExists('/test/child').should.be.true
child.parent().should.be.equal subkey
subkey.free()
@db.cache.isExists('/test').should.be.false
@db.cache.isExists('/test/child').should.be.false
it "should raise error if subkey's parent not found in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
should.throw child.parent.bind(child)
myparent = @root.path('myparent')
testPath myparent, '/myparent'
should.throw child.parent.bind(child)
child.free()
myparent.free()
it "should get subkey's latest parent if latestParent is true and it is not in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true
parent.should.be.equal @root
myparent = @root.path('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true
parent.should.be.equal myparent
child.free()
myparent.free()
it "should get subkey's latest parent via callback if it's is not in cache", (done)->
child = @root.createPath('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true, (err, result)=>
should.not.exist err
result.should.be.equal @root
myparent = @root.createPath('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true, (err, result)->
result.should.be.equal myparent
myparent.free()
child.free()
child.RefCount.should.be.equal 0
done()
it "should get subkey's parent even it's not in cache when createIfMissing", ->
child = @root.createPath('myparent/god/child')
child.RefCount.should.be.equal 1
testPath child, '/myparent/god/child'
# this parent is not exists, so createIfMissing:
parent = child.parent createIfMissing: true
testPath parent, '/myparent/god'
p2 = @root.createPath('/myparent/god')
p2.should.be.equal parent
child.free()
parent.free()
p2.free()
p2.RefCount.should.be.equal 0
p2.free()
p2.isDestroyed().should.be.equal true
it "should get subkey's parent via callback even it's not in cache when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
@db.cache.has('/myparent/god').should.be.false
parent = child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
@root.createPath('/myparent/god').should.be.equal result
result.RefCount.should.be.equal 2
result.destroy()
result.isDestroyed().should.be.equal true
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
it "should get subkey's parent via callback when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = @root.createPath 'myparent/god'
child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
result.should.be.equal parent
result.should.be.equal @root.createPath('/myparent/god')
result.destroy()
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
describe ".setPath(path, callback)", ->
it "should set myself to another path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath('/my/other').should.be.true
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath subkey, '/my/other'
subkey.RefCount.should.be.equal 0
it "should set myself to another path via callback", (done)->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath '/my/other', (err, result)=>
should.not.exist err
result.should.be.equal subkey
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath result, '/my/other'
done()
describe ".path()/.fullName", ->
it "should get myself path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
describe ".createPath(path)/.createSubkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should create subkey", ->
key = @subkey.createPath('subkey1')
testPath key, '/myparent/subkey1'
key.free()
it "should create many subkeys", ->
keys = for i in [0...10]
@subkey.createPath 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.RefCount.should.be.equal 0
it "should create the same subkey more once", ->
key = @subkey.createPath('subkey1')
key.RefCount.should.be.equal 1
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.createPath('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal keys.length+1
for k in keys
k.free()
key.RefCount.should.be.equal 1
key.free()
describe ".path(path)/.subkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should get subkey", ->
key = @subkey.path('subkey1')
testPath key, '/myparent/subkey1'
key.free()
key.isDestroyed().should.be.equal true
it "should get many subkeys", ->
keys = for i in [0...10]
@subkey.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.isDestroyed().should.be.true
it "should get the same subkey more once", ->
key = @subkey.path('subkey1')
key.RefCount.should.be.equal 0
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.path('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal 0
key.free()
key.isDestroyed().should.be.true
it "should free subkeys after parent is freed ", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
parent.free()
for key in keys
assert.equal key.isDestroyed(), true
it "should not free subkeys after parent is freed if pass free(false)", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
# pass false to do not free subkeys:
parent.free(false)
for key in keys
assert.equal key.isDestroyed(), false
describe "put operation", ->
before -> @subkey = @root.path 'myputParent'
after -> @subkey.free()
it "should put key value .putSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value .putAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .putSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .putAsync", (done)->
value = Math.random()
@subkey.putAsync value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute via separator (.putSync)", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, separator: '.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute (.putSync)", ->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute via separator (.putAsync)", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putAsync key, value, separator:'.', (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute (.putAsync)", (done)->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put another path key value .putSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put another path key value via .putAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putAsync key, value, {path: 'hahe'}, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put key value via .put", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value via .put async", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .put", ->
value = Math.random()
key = @subkey.path("tkey")
key.put value
encodedKey = getEncodedKey @db, '.', undefined, key
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .put async", (done)->
value = Math.random()
@subkey.put value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
describe "get operation", ->
before -> @subkey = @root.path 'myGetParent'
after -> @subkey.free()
it "should get key .getSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
it "should get key .getAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .getSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync()
result.should.be.equal value
it "should get itself .getAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute .getSync", ->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.'
result.should.be.equal value
it "should get attribute with path .getSync", ->
key = ".myput"+Math.random()
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, path: path
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute via separator .getSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, separator:'.'
result.should.be.equal value
result = @subkey.getSync '.'+key
result.should.be.equal value
it "should get attribute .getAsync", (done)->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .getAsync", (done)->
key = ".myput"+Math.random()
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute via separator .getAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync '.'+key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get another path key value via .getSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, {path: 'hahe'}
result.should.be.equal value
it "should get another path key value via .getAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get key value via .get", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key
result.should.be.equal value
it "should get key value via .get async", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .get sync", ->
key = ".myput"+Math.random()
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key, path: path
result.should.be.equal value
result = @subkey.get key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute with path .get async", (done)->
key = ".myput"+Math.random()
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.get key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .get", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get()
result.should.be.equal value
it "should get itself .get async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get (err, result)=>
should.not.exist err
result.should.be.equal value
done()
describe "del operation", ->
before -> @subkey = @root.path 'myDelParent'
after -> @subkey.free()
it "should del key .delSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key .delAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .delSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .delAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute .delSync", ->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute via separator .delSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, separator:'.'
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute .delAsync", (done)->
key = ".myput"+Math.random()
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute via separator .delAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, separator:'.', (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del another path key value via .delSync", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, {path: 'hahe'}
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del another path key value via .delAsync", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del key value via .del", ->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key value via .del async", (done)->
key = "myput"+Math.random()
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .del Sync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .del Async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
describe "batch operation", ->
before -> @subkey = @root.path 'myBatchParent'
after -> @subkey.free()
it ".batchSync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops, undefined
it ".batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch attribute via separator (.batchSync)", ->
ops = genOps()
@subkey.batchSync ops, separator:'.'
testOps.call @, ops, separator:'.'
it "should batch attribute (.batchSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
it "should batch attribute via separator (.batchAsync)", (done)->
ops = genOps()
@subkey.batchAsync ops, separator:'.', (err)=>
should.not.exist err
testOps.call @, ops, separator:'.'
done()
it "should batch attribute (.batchAsync)", (done)->
ops = genOps('.')
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch another path key value via .batchSync", ->
ops = genOps()
@subkey.batchSync ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it "should batch another path key value via .batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
it ".batch", ->
ops = genOps()
@subkey.batch ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it ".batch async", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
describe "find operation", ->
before -> @subkey = @root.path 'myFindParent'
after -> @subkey.free()
it "should find (.findSync)", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.findSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.findAsync)", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.findAsync)", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find (.find) Sync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.find) Sync", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.find) Async", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.find) Async", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
| 211103 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
SubkeyNoSQL = require '../src/nosql-subkey'
#AbstractNoSQL = require 'abstract-nosql'
Errors = require 'abstract-object/Error'
Codec = require 'buffer-codec'
EncodingIterator= require 'encoding-iterator'
inherits = require 'inherits-ex/lib/inherits'
isInheritedFrom = require 'inherits-ex/lib/isInheritedFrom'
isObject = require 'util-ex/lib/is/type/object'
FakeDB = require './fake-nosql'
codec = require '../src/codec'
path = require '../src/path'
setImmediate = setImmediate || process.nextTick
InvalidArgumentError = Errors.InvalidArgumentError
PATH_SEP = codec.PATH_SEP
SUBKEY_SEP = codec.SUBKEY_SEP
_encodeKey = codec._encodeKey
encodeKey = codec.encodeKey
toPath = path.join
chai.use(sinonChai)
FakeDB = SubkeyNoSQL(FakeDB)
genOps = (separator='', count=10)->
for i in [1..count]
key: separator+'key'+<KEY>()
value: Math.random()
testOps = (ops, options) ->
#console.log 'data', @db.data
for op in ops
encodedKey = getEncodedKey @db, op.key, options, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, op.value
genData = (db, path = "op", opts, count = 10)->
data = for i in [1..count]
key: myKey: <KEY>
value: Math.random()
path: path
db.batch data, opts
vParentPath = opts.path if opts
_opts = {}
for item in data
key = getEncodedKey db, item.key, item, vParentPath
_opts.valueEncoding = item.valueEncoding
_opts.valueEncoding = opts.valueEncoding if opts and not _opts.valueEncoding
valueEncoding = db.valueEncoding _opts
value = if valueEncoding then valueEncoding.encode(item.value) else item.value
db.data.should.have.property key, value
data
getEncodedKey = (db, key, options, parentPath) ->
options = {} unless options
encodeKey db.getPathArray(options, parentPath), key, db.keyEncoding(options), options
getEncodedValue = (db, value, options) ->
encoding = db.valueEncoding options
value = encoding.encode value if encoding
value
getEncodedOps = (db, ops, opts) ->
vParentPath = opts.path if opts
ops.slice().map (op) ->
key: getEncodedKey db, op.key, op, vParentPath
value: if op.value then JSON.stringify op.value else op.value
type: op.type
path: path = db.getPathArray(op, vParentPath)
_keyPath: [path, op.key]
describe "Subkey", ->
before ->
@db = new FakeDB()
@db.open({keyEncoding:'json', valueEncoding: 'json'})
@root = @db.root()
after ->
@db.close()
testPath = (subkey, expectedPath) ->
subkey.fullName.should.be.equal expectedPath
subkey.path().should.be.equal expectedPath
it "should get root subkey from db", ->
@root.fullName.should.be.equal PATH_SEP
@root.pathAsArray().should.be.deep.equal []
result = @db.cache.get @root.fullName
result.should.be.equal @root
result.should.be.equal @db.root()
it "should not enumerable buildin properties", ->
keys = Object.keys @root
keys.should.have.length 0
describe ".parent()", ->
it "should be null for root's parent", ->
should.not.exist @root.parent()
it "should get subkey's parent", ->
subkey = @root.subkey('test')
child = subkey.path('child')
testPath subkey, '/test'
testPath child, '/test/child'
@db.cache.isExists('/test').should.be.true
@db.cache.isExists('/test/child').should.be.true
child.parent().should.be.equal subkey
subkey.free()
@db.cache.isExists('/test').should.be.false
@db.cache.isExists('/test/child').should.be.false
it "should raise error if subkey's parent not found in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
should.throw child.parent.bind(child)
myparent = @root.path('myparent')
testPath myparent, '/myparent'
should.throw child.parent.bind(child)
child.free()
myparent.free()
it "should get subkey's latest parent if latestParent is true and it is not in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true
parent.should.be.equal @root
myparent = @root.path('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true
parent.should.be.equal myparent
child.free()
myparent.free()
it "should get subkey's latest parent via callback if it's is not in cache", (done)->
child = @root.createPath('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true, (err, result)=>
should.not.exist err
result.should.be.equal @root
myparent = @root.createPath('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true, (err, result)->
result.should.be.equal myparent
myparent.free()
child.free()
child.RefCount.should.be.equal 0
done()
it "should get subkey's parent even it's not in cache when createIfMissing", ->
child = @root.createPath('myparent/god/child')
child.RefCount.should.be.equal 1
testPath child, '/myparent/god/child'
# this parent is not exists, so createIfMissing:
parent = child.parent createIfMissing: true
testPath parent, '/myparent/god'
p2 = @root.createPath('/myparent/god')
p2.should.be.equal parent
child.free()
parent.free()
p2.free()
p2.RefCount.should.be.equal 0
p2.free()
p2.isDestroyed().should.be.equal true
it "should get subkey's parent via callback even it's not in cache when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
@db.cache.has('/myparent/god').should.be.false
parent = child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
@root.createPath('/myparent/god').should.be.equal result
result.RefCount.should.be.equal 2
result.destroy()
result.isDestroyed().should.be.equal true
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
it "should get subkey's parent via callback when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = @root.createPath 'myparent/god'
child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
result.should.be.equal parent
result.should.be.equal @root.createPath('/myparent/god')
result.destroy()
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
describe ".setPath(path, callback)", ->
it "should set myself to another path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath('/my/other').should.be.true
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath subkey, '/my/other'
subkey.RefCount.should.be.equal 0
it "should set myself to another path via callback", (done)->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath '/my/other', (err, result)=>
should.not.exist err
result.should.be.equal subkey
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath result, '/my/other'
done()
describe ".path()/.fullName", ->
it "should get myself path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
describe ".createPath(path)/.createSubkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should create subkey", ->
key = @subkey.createPath('subkey1')
testPath key, '/myparent/subkey1'
key.free()
it "should create many subkeys", ->
keys = for i in [0...10]
@subkey.createPath 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.RefCount.should.be.equal 0
it "should create the same subkey more once", ->
key = @subkey.createPath('subkey1')
key.RefCount.should.be.equal 1
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.createPath('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal keys.length+1
for k in keys
k.free()
key.RefCount.should.be.equal 1
key.free()
describe ".path(path)/.subkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should get subkey", ->
key = @subkey.path('subkey1')
testPath key, '/myparent/subkey1'
key.free()
key.isDestroyed().should.be.equal true
it "should get many subkeys", ->
keys = for i in [0...10]
@subkey.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.isDestroyed().should.be.true
it "should get the same subkey more once", ->
key = @subkey.path('subkey1')
key.RefCount.should.be.equal 0
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.path('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal 0
key.free()
key.isDestroyed().should.be.true
it "should free subkeys after parent is freed ", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
parent.free()
for key in keys
assert.equal key.isDestroyed(), true
it "should not free subkeys after parent is freed if pass free(false)", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
# pass false to do not free subkeys:
parent.free(false)
for key in keys
assert.equal key.isDestroyed(), false
describe "put operation", ->
before -> @subkey = @root.path 'myputParent'
after -> @subkey.free()
it "should put key value .putSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value .putAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .putSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .putAsync", (done)->
value = Math.random()
@subkey.putAsync value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute via separator (.putSync)", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, separator: '.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute (.putSync)", ->
key = <KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute via separator (.putAsync)", (done)->
key = "<KEY>
value = Math.random()
@subkey.putAsync key, value, separator:'.', (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute (.putAsync)", (done)->
key = <KEY>
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put another path key value .putSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put another path key value via .putAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putAsync key, value, {path: 'hahe'}, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put key value via .put", ->
key = "<KEY>
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value via .put async", (done)->
key = "<KEY>
value = Math.random()
@subkey.put key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .put", ->
value = Math.random()
key = @subkey.path("tkey")
key.put value
encodedKey = getEncodedKey @db, '.', undefined, key
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .put async", (done)->
value = Math.random()
@subkey.put value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
describe "get operation", ->
before -> @subkey = @root.path 'myGetParent'
after -> @subkey.free()
it "should get key .getSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
it "should get key .getAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .getSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync()
result.should.be.equal value
it "should get itself .getAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute .getSync", ->
key = <KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.'
result.should.be.equal value
it "should get attribute with path .getSync", ->
key = <KEY>
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, path: path
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute via separator .getSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, separator:'.'
result.should.be.equal value
result = @subkey.getSync '.'+key
result.should.be.equal value
it "should get attribute .getAsync", (done)->
key = <KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .getAsync", (done)->
key = <KEY>
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute via separator .getAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync '.'+key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get another path key value via .getSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, {path: 'hahe'}
result.should.be.equal value
it "should get another path key value via .getAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get key value via .get", ->
key = "<KEY>
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key
result.should.be.equal value
it "should get key value via .get async", (done)->
key = "<KEY>
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .get sync", ->
key = <KEY>
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key, path: path
result.should.be.equal value
result = @subkey.get key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute with path .get async", (done)->
key = <KEY>
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.get key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .get", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get()
result.should.be.equal value
it "should get itself .get async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get (err, result)=>
should.not.exist err
result.should.be.equal value
done()
describe "del operation", ->
before -> @subkey = @root.path 'myDelParent'
after -> @subkey.free()
it "should del key .delSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key .delAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .delSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .delAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute .delSync", ->
key = <KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute via separator .delSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, separator:'.'
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute .delAsync", (done)->
key = <KEY>
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute via separator .delAsync", (done)->
key = "<KEY>()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, separator:'.', (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del another path key value via .delSync", ->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, {path: 'hahe'}
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del another path key value via .delAsync", (done)->
key = "<KEY>
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del key value via .del", ->
key = "<KEY>
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key value via .del async", (done)->
key = "<KEY>
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .del Sync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .del Async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
describe "batch operation", ->
before -> @subkey = @root.path 'myBatchParent'
after -> @subkey.free()
it ".batchSync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops, undefined
it ".batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch attribute via separator (.batchSync)", ->
ops = genOps()
@subkey.batchSync ops, separator:'.'
testOps.call @, ops, separator:'.'
it "should batch attribute (.batchSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
it "should batch attribute via separator (.batchAsync)", (done)->
ops = genOps()
@subkey.batchAsync ops, separator:'.', (err)=>
should.not.exist err
testOps.call @, ops, separator:'.'
done()
it "should batch attribute (.batchAsync)", (done)->
ops = genOps('.')
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch another path key value via .batchSync", ->
ops = genOps()
@subkey.batchSync ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it "should batch another path key value via .batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
it ".batch", ->
ops = genOps()
@subkey.batch ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it ".batch async", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
describe "find operation", ->
before -> @subkey = @root.path 'myFindParent'
after -> @subkey.free()
it "should find (.findSync)", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.findSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.findAsync)", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.findAsync)", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find (.find) Sync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.find) Sync", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.find) Async", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.find) Async", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
| true | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
SubkeyNoSQL = require '../src/nosql-subkey'
#AbstractNoSQL = require 'abstract-nosql'
Errors = require 'abstract-object/Error'
Codec = require 'buffer-codec'
EncodingIterator= require 'encoding-iterator'
inherits = require 'inherits-ex/lib/inherits'
isInheritedFrom = require 'inherits-ex/lib/isInheritedFrom'
isObject = require 'util-ex/lib/is/type/object'
FakeDB = require './fake-nosql'
codec = require '../src/codec'
path = require '../src/path'
setImmediate = setImmediate || process.nextTick
InvalidArgumentError = Errors.InvalidArgumentError
PATH_SEP = codec.PATH_SEP
SUBKEY_SEP = codec.SUBKEY_SEP
_encodeKey = codec._encodeKey
encodeKey = codec.encodeKey
toPath = path.join
chai.use(sinonChai)
FakeDB = SubkeyNoSQL(FakeDB)
genOps = (separator='', count=10)->
for i in [1..count]
key: separator+'key'+PI:KEY:<KEY>END_PI()
value: Math.random()
testOps = (ops, options) ->
#console.log 'data', @db.data
for op in ops
encodedKey = getEncodedKey @db, op.key, options, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, op.value
genData = (db, path = "op", opts, count = 10)->
data = for i in [1..count]
key: myKey: PI:KEY:<KEY>END_PI
value: Math.random()
path: path
db.batch data, opts
vParentPath = opts.path if opts
_opts = {}
for item in data
key = getEncodedKey db, item.key, item, vParentPath
_opts.valueEncoding = item.valueEncoding
_opts.valueEncoding = opts.valueEncoding if opts and not _opts.valueEncoding
valueEncoding = db.valueEncoding _opts
value = if valueEncoding then valueEncoding.encode(item.value) else item.value
db.data.should.have.property key, value
data
getEncodedKey = (db, key, options, parentPath) ->
options = {} unless options
encodeKey db.getPathArray(options, parentPath), key, db.keyEncoding(options), options
getEncodedValue = (db, value, options) ->
encoding = db.valueEncoding options
value = encoding.encode value if encoding
value
getEncodedOps = (db, ops, opts) ->
vParentPath = opts.path if opts
ops.slice().map (op) ->
key: getEncodedKey db, op.key, op, vParentPath
value: if op.value then JSON.stringify op.value else op.value
type: op.type
path: path = db.getPathArray(op, vParentPath)
_keyPath: [path, op.key]
describe "Subkey", ->
before ->
@db = new FakeDB()
@db.open({keyEncoding:'json', valueEncoding: 'json'})
@root = @db.root()
after ->
@db.close()
testPath = (subkey, expectedPath) ->
subkey.fullName.should.be.equal expectedPath
subkey.path().should.be.equal expectedPath
it "should get root subkey from db", ->
@root.fullName.should.be.equal PATH_SEP
@root.pathAsArray().should.be.deep.equal []
result = @db.cache.get @root.fullName
result.should.be.equal @root
result.should.be.equal @db.root()
it "should not enumerable buildin properties", ->
keys = Object.keys @root
keys.should.have.length 0
describe ".parent()", ->
it "should be null for root's parent", ->
should.not.exist @root.parent()
it "should get subkey's parent", ->
subkey = @root.subkey('test')
child = subkey.path('child')
testPath subkey, '/test'
testPath child, '/test/child'
@db.cache.isExists('/test').should.be.true
@db.cache.isExists('/test/child').should.be.true
child.parent().should.be.equal subkey
subkey.free()
@db.cache.isExists('/test').should.be.false
@db.cache.isExists('/test/child').should.be.false
it "should raise error if subkey's parent not found in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
should.throw child.parent.bind(child)
myparent = @root.path('myparent')
testPath myparent, '/myparent'
should.throw child.parent.bind(child)
child.free()
myparent.free()
it "should get subkey's latest parent if latestParent is true and it is not in cache", ->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true
parent.should.be.equal @root
myparent = @root.path('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true
parent.should.be.equal myparent
child.free()
myparent.free()
it "should get subkey's latest parent via callback if it's is not in cache", (done)->
child = @root.createPath('myparent/god/child')
testPath child, '/myparent/god/child'
parent = child.parent latestParent:true, (err, result)=>
should.not.exist err
result.should.be.equal @root
myparent = @root.createPath('myparent')
testPath myparent, '/myparent'
parent = child.parent latestParent:true, (err, result)->
result.should.be.equal myparent
myparent.free()
child.free()
child.RefCount.should.be.equal 0
done()
it "should get subkey's parent even it's not in cache when createIfMissing", ->
child = @root.createPath('myparent/god/child')
child.RefCount.should.be.equal 1
testPath child, '/myparent/god/child'
# this parent is not exists, so createIfMissing:
parent = child.parent createIfMissing: true
testPath parent, '/myparent/god'
p2 = @root.createPath('/myparent/god')
p2.should.be.equal parent
child.free()
parent.free()
p2.free()
p2.RefCount.should.be.equal 0
p2.free()
p2.isDestroyed().should.be.equal true
it "should get subkey's parent via callback even it's not in cache when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
@db.cache.has('/myparent/god').should.be.false
parent = child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
@root.createPath('/myparent/god').should.be.equal result
result.RefCount.should.be.equal 2
result.destroy()
result.isDestroyed().should.be.equal true
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
it "should get subkey's parent via callback when createIfMissing", (done)->
child = @root.path('myparent/god/child')
testPath child, '/myparent/god/child'
parent = @root.createPath 'myparent/god'
child.parent createIfMissing: true, (err, result)=>
should.not.exist err
testPath result, '/myparent/god',
result.should.be.equal parent
result.should.be.equal @root.createPath('/myparent/god')
result.destroy()
child.free()
@db.cache.has('/myparent/god').should.be.false
@db.cache.has('/myparent/god/child').should.be.false
done()
describe ".setPath(path, callback)", ->
it "should set myself to another path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath('/my/other').should.be.true
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath subkey, '/my/other'
subkey.RefCount.should.be.equal 0
it "should set myself to another path via callback", (done)->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
subkey.setPath '/my/other', (err, result)=>
should.not.exist err
result.should.be.equal subkey
# setPath will remove itself from cache.
@db.cache.isExists('/my/subkey').should.be.false
testPath result, '/my/other'
done()
describe ".path()/.fullName", ->
it "should get myself path", ->
subkey = @root.createPath('/my/subkey')
testPath subkey, '/my/subkey'
describe ".createPath(path)/.createSubkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should create subkey", ->
key = @subkey.createPath('subkey1')
testPath key, '/myparent/subkey1'
key.free()
it "should create many subkeys", ->
keys = for i in [0...10]
@subkey.createPath 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.RefCount.should.be.equal 0
it "should create the same subkey more once", ->
key = @subkey.createPath('subkey1')
key.RefCount.should.be.equal 1
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.createPath('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal keys.length+1
for k in keys
k.free()
key.RefCount.should.be.equal 1
key.free()
describe ".path(path)/.subkey(path)", ->
before -> @subkey = @root.path 'myparent'
after -> @subkey.free()
it "should get subkey", ->
key = @subkey.path('subkey1')
testPath key, '/myparent/subkey1'
key.free()
key.isDestroyed().should.be.equal true
it "should get many subkeys", ->
keys = for i in [0...10]
@subkey.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/subkey'+i
key.free()
for key,i in keys
key.isDestroyed().should.be.true
it "should get the same subkey more once", ->
key = @subkey.path('subkey1')
key.RefCount.should.be.equal 0
testPath key, '/myparent/subkey1'
keys = for i in [0...10]
k = @subkey.path('subkey1')
k.should.be.equal key
k
key.RefCount.should.be.equal 0
key.free()
key.isDestroyed().should.be.true
it "should free subkeys after parent is freed ", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
parent.free()
for key in keys
assert.equal key.isDestroyed(), true
it "should not free subkeys after parent is freed if pass free(false)", ->
parent = @subkey.path 'parent'
keys = for i in [0...10]
parent.path 'subkey'+i
keys.should.have.length 10
for key,i in keys
testPath key, '/myparent/parent/subkey'+i
# pass false to do not free subkeys:
parent.free(false)
for key in keys
assert.equal key.isDestroyed(), false
describe "put operation", ->
before -> @subkey = @root.path 'myputParent'
after -> @subkey.free()
it "should put key value .putSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value .putAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .putSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .putAsync", (done)->
value = Math.random()
@subkey.putAsync value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute via separator (.putSync)", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, separator: '.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute (.putSync)", ->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put attribute via separator (.putAsync)", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putAsync key, value, separator:'.', (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put attribute (.putAsync)", (done)->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putAsync key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put another path key value .putSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put another path key value via .putAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putAsync key, value, {path: 'hahe'}, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put key value via .put", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put key value via .put async", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
it "should put value to itself .put", ->
value = Math.random()
key = @subkey.path("tkey")
key.put value
encodedKey = getEncodedKey @db, '.', undefined, key
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
it "should put value to itself .put async", (done)->
value = Math.random()
@subkey.put value, (err)=>
should.not.exist err
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
done()
describe "get operation", ->
before -> @subkey = @root.path 'myGetParent'
after -> @subkey.free()
it "should get key .getSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
it "should get key .getAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .getSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync()
result.should.be.equal value
it "should get itself .getAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute .getSync", ->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.'
result.should.be.equal value
it "should get attribute with path .getSync", ->
key = PI:KEY:<KEY>END_PI
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, path: path
result.should.be.equal value
result = @subkey.getSync key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute via separator .getSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, separator:'.'
result.should.be.equal value
result = @subkey.getSync '.'+key
result.should.be.equal value
it "should get attribute .getAsync", (done)->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .getAsync", (done)->
key = PI:KEY:<KEY>END_PI
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute via separator .getAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, separator:'.', (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.getAsync '.'+key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get another path key value via .getSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.getSync key, {path: 'hahe'}
result.should.be.equal value
it "should get another path key value via .getAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.getAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get key value via .get", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key
result.should.be.equal value
it "should get key value via .get async", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get attribute with path .get sync", ->
key = PI:KEY:<KEY>END_PI
path = "mypath"
value = Math.random()
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get key, path: path
result.should.be.equal value
result = @subkey.get key.slice(1), separator:'.', path:path
result.should.be.equal value
it "should get attribute with path .get async", (done)->
key = PI:KEY:<KEY>END_PI
value = Math.random()
path = "mypath"
@subkey.putSync key, value, path:path
encodedKey = getEncodedKey @db, key, path:path, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get key, path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
@subkey.get key.slice(1), separator:'.', path:path, (err, result)=>
should.not.exist err
result.should.be.equal value
done()
it "should get itself .get", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.get()
result.should.be.equal value
it "should get itself .get async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.get (err, result)=>
should.not.exist err
result.should.be.equal value
done()
describe "del operation", ->
before -> @subkey = @root.path 'myDelParent'
after -> @subkey.free()
it "should del key .delSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key .delAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .delSync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .delAsync", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute .delSync", ->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute via separator .delSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, separator:'.'
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del attribute .delAsync", (done)->
key = PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del attribute via separator .delAsync", (done)->
key = "PI:KEY:<KEY>END_PI()
value = Math.random()
@subkey.putSync key, value, separator:'.'
encodedKey = getEncodedKey @db, key, separator:'.', @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, separator:'.', (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del another path key value via .delSync", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.delSync key, {path: 'hahe'}
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del another path key value via .delAsync", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.putSync key, value, {path: 'hahe'}
encodedKey = getEncodedKey @db, key, {path: 'hahe'}, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.delAsync key, {path: 'hahe'}, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del key value via .del", ->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del key
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del key value via .del async", (done)->
key = "PI:KEY:<KEY>END_PI
value = Math.random()
@subkey.put key, value
encodedKey = getEncodedKey @db, key, undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del key, (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
it "should del itself .del Sync", ->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
result = @subkey.del()
result.should.be.equal true
result = @db.data[encodedKey]
should.not.exist result
it "should del itself .del Async", (done)->
value = Math.random()
@subkey.putSync value
encodedKey = getEncodedKey @db, '.', undefined, @subkey
result = @db.data[encodedKey]
result.should.be.equal getEncodedValue @db, value
@subkey.del (err, result)=>
should.not.exist err
result = @db.data[encodedKey]
should.not.exist result
done()
describe "batch operation", ->
before -> @subkey = @root.path 'myBatchParent'
after -> @subkey.free()
it ".batchSync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops, undefined
it ".batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch attribute via separator (.batchSync)", ->
ops = genOps()
@subkey.batchSync ops, separator:'.'
testOps.call @, ops, separator:'.'
it "should batch attribute (.batchSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
it "should batch attribute via separator (.batchAsync)", (done)->
ops = genOps()
@subkey.batchAsync ops, separator:'.', (err)=>
should.not.exist err
testOps.call @, ops, separator:'.'
done()
it "should batch attribute (.batchAsync)", (done)->
ops = genOps('.')
@subkey.batchAsync ops, (err)=>
should.not.exist err
testOps.call @, ops
done()
it "should batch another path key value via .batchSync", ->
ops = genOps()
@subkey.batchSync ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it "should batch another path key value via .batchAsync", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
it ".batch", ->
ops = genOps()
@subkey.batch ops, path: 'hahe'
testOps.call @, ops, path: 'hahe'
it ".batch async", (done)->
ops = genOps()
@subkey.batchAsync ops, path: 'hahe', (err)=>
should.not.exist err
testOps.call @, ops, path: 'hahe'
done()
describe "find operation", ->
before -> @subkey = @root.path 'myFindParent'
after -> @subkey.free()
it "should find (.findSync)", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.findSync)", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.findSync separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.findAsync)", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.findAsync)", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.findAsync separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find (.find) Sync", ->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find()
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find attributes (.find) Sync", ->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
result = @subkey.find separator:'.'
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
it "should find (.find) Async", (done)->
ops = genOps()
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
it "should find attributes (.find) Async", (done)->
ops = genOps('.')
@subkey.batchSync ops
testOps.call @, ops
ops.sort (a,b)->
a = a.key
b = b.key
return 1 if a > b
return -1 if a < b
return 0
@subkey.find separator:'.', (err, result)=>
should.not.exist err
result.should.have.length ops.length
for item, i in ops
result[i].should.have.property 'key', item.key
result[i].should.have.property 'value', item.value
ops = ops.filter (i)->i.type='del'
@subkey.batchSync ops
done()
|
[
{
"context": "ext) ->\n passport = @\n passport._key = 'passport'\n passport._userProperty = 'currentUser'\n ",
"end": 113,
"score": 0.9025252461433411,
"start": 105,
"tag": "KEY",
"value": "passport"
},
{
"context": "user: require('./db')().users.syncfindByUsername(... | src/lib/mockPassport.coffee | johann8384/cloudykangaroo | 0 | module.exports =
initialize: () ->
(req, res, next) ->
passport = @
passport._key = 'passport'
passport._userProperty = 'currentUser'
passport.serializeUser = (user, ptp, done) -> done null, user
passport.deserializeUser = (user, ptp, done) -> done null, user
req._passport = instance: passport
req._passport.session = user: require('./db')().users.syncfindByUsername('test')
next() | 70259 | module.exports =
initialize: () ->
(req, res, next) ->
passport = @
passport._key = '<KEY>'
passport._userProperty = 'currentUser'
passport.serializeUser = (user, ptp, done) -> done null, user
passport.deserializeUser = (user, ptp, done) -> done null, user
req._passport = instance: passport
req._passport.session = user: require('./db')().users.syncfindByUsername('test')
next() | true | module.exports =
initialize: () ->
(req, res, next) ->
passport = @
passport._key = 'PI:KEY:<KEY>END_PI'
passport._userProperty = 'currentUser'
passport.serializeUser = (user, ptp, done) -> done null, user
passport.deserializeUser = (user, ptp, done) -> done null, user
req._passport = instance: passport
req._passport.session = user: require('./db')().users.syncfindByUsername('test')
next() |
[
{
"context": "Exports', ->\n mockCrudModel =\n name: \"mockModel\"\n create: ->\n update: ->\n ",
"end": 2123,
"score": 0.7791184782981873,
"start": 2119,
"tag": "NAME",
"value": "mock"
}
] | test/server/models/resources.coffee | valueflowquality/gi-util-update | 0 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
mocks = require '../mocks'
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../server'
module.exports = () ->
describe 'Resources', ->
modelFactory = require dir + '/models/resources'
model = null
expectedDefinition =
name: 'Resource'
schemaDefinition:
systemId: 'ObjectId'
name: 'String'
it 'Exports a factory function', (done) ->
expect(modelFactory).to.be.a 'function'
done()
describe 'Constructor: (dal) -> { object }', ->
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
sinon.spy mocks.dal, 'modelFactory'
sinon.spy mocks.dal, 'crudFactory'
model = modelFactory mocks.dal
afterEach ->
mocks.dal.modelFactory.restore()
mocks.dal.schemaFactory.restore()
mocks.dal.crudFactory.restore()
it 'Creates a resources schema', (done) ->
expect(mocks.dal.schemaFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Creates a resources model', (done) ->
returnedDefinition = mocks.dal.schemaFactory.returnValues[0]
expect(mocks.dal.modelFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Uses Crud Factory with returned model', (done) ->
returnedModel = mocks.dal.modelFactory.returnValues[0]
expect(mocks.dal.crudFactory.calledWithMatch(returnedModel))
.to.be.true
done()
describe 'Schema', ->
schema = null
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
model = modelFactory mocks.dal
schema = mocks.dal.schemaFactory.returnValues[0]
afterEach ->
mocks.dal.schemaFactory.restore()
it 'systemId: ObjectId', (done) ->
expect(schema).to.have.property 'systemId', 'ObjectId'
done()
it 'name: String', (done) ->
expect(schema).to.have.property 'name', 'String'
done()
describe 'Exports', ->
mockCrudModel =
name: "mockModel"
create: ->
update: ->
destroy: ->
findById: ->
findOne: ->
findOneBy: ->
find: ->
count: ->
mockCrudModelFactory = () ->
mockCrudModel
beforeEach ->
mocks.dal.crudFactory = mockCrudModelFactory
model = modelFactory mocks.dal
mocks.exportsCrudModel 'Resource'
, modelFactory(mocks.dal)
describe 'Other', ->
stubs = null
beforeEach ->
stubs = {}
model = proxyquire(dir + '/models/resources', stubs)(
mocks.dal
)
sinon.stub mockCrudModel, 'update'
afterEach ->
mockCrudModel.update.restore()
describe 'registerTypes: function(systemId, models, callback)' +
' -> (err, obj)', ->
it 'calls crud.update with a resource type for each model', (done) ->
models =
"model1":
name: "model1 name"
"model2":
name: "model2 name"
expectedType1 =
systemId: 'a'
name: 'model1 name'
expectedType2 =
systemId: 'a'
name: 'model2 name'
model.registerTypes "a", models, "c"
expect(mockCrudModel.update.calledWith(
expectedType1,expectedType1,{upsert: true}, "c")
).to.be.true
expect(mockCrudModel.update.calledWith(
expectedType2,expectedType2,{upsert: true}, "c")
).to.be.true
done()
| 132745 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
mocks = require '../mocks'
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../server'
module.exports = () ->
describe 'Resources', ->
modelFactory = require dir + '/models/resources'
model = null
expectedDefinition =
name: 'Resource'
schemaDefinition:
systemId: 'ObjectId'
name: 'String'
it 'Exports a factory function', (done) ->
expect(modelFactory).to.be.a 'function'
done()
describe 'Constructor: (dal) -> { object }', ->
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
sinon.spy mocks.dal, 'modelFactory'
sinon.spy mocks.dal, 'crudFactory'
model = modelFactory mocks.dal
afterEach ->
mocks.dal.modelFactory.restore()
mocks.dal.schemaFactory.restore()
mocks.dal.crudFactory.restore()
it 'Creates a resources schema', (done) ->
expect(mocks.dal.schemaFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Creates a resources model', (done) ->
returnedDefinition = mocks.dal.schemaFactory.returnValues[0]
expect(mocks.dal.modelFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Uses Crud Factory with returned model', (done) ->
returnedModel = mocks.dal.modelFactory.returnValues[0]
expect(mocks.dal.crudFactory.calledWithMatch(returnedModel))
.to.be.true
done()
describe 'Schema', ->
schema = null
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
model = modelFactory mocks.dal
schema = mocks.dal.schemaFactory.returnValues[0]
afterEach ->
mocks.dal.schemaFactory.restore()
it 'systemId: ObjectId', (done) ->
expect(schema).to.have.property 'systemId', 'ObjectId'
done()
it 'name: String', (done) ->
expect(schema).to.have.property 'name', 'String'
done()
describe 'Exports', ->
mockCrudModel =
name: "<NAME>Model"
create: ->
update: ->
destroy: ->
findById: ->
findOne: ->
findOneBy: ->
find: ->
count: ->
mockCrudModelFactory = () ->
mockCrudModel
beforeEach ->
mocks.dal.crudFactory = mockCrudModelFactory
model = modelFactory mocks.dal
mocks.exportsCrudModel 'Resource'
, modelFactory(mocks.dal)
describe 'Other', ->
stubs = null
beforeEach ->
stubs = {}
model = proxyquire(dir + '/models/resources', stubs)(
mocks.dal
)
sinon.stub mockCrudModel, 'update'
afterEach ->
mockCrudModel.update.restore()
describe 'registerTypes: function(systemId, models, callback)' +
' -> (err, obj)', ->
it 'calls crud.update with a resource type for each model', (done) ->
models =
"model1":
name: "model1 name"
"model2":
name: "model2 name"
expectedType1 =
systemId: 'a'
name: 'model1 name'
expectedType2 =
systemId: 'a'
name: 'model2 name'
model.registerTypes "a", models, "c"
expect(mockCrudModel.update.calledWith(
expectedType1,expectedType1,{upsert: true}, "c")
).to.be.true
expect(mockCrudModel.update.calledWith(
expectedType2,expectedType2,{upsert: true}, "c")
).to.be.true
done()
| true | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
mocks = require '../mocks'
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../server'
module.exports = () ->
describe 'Resources', ->
modelFactory = require dir + '/models/resources'
model = null
expectedDefinition =
name: 'Resource'
schemaDefinition:
systemId: 'ObjectId'
name: 'String'
it 'Exports a factory function', (done) ->
expect(modelFactory).to.be.a 'function'
done()
describe 'Constructor: (dal) -> { object }', ->
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
sinon.spy mocks.dal, 'modelFactory'
sinon.spy mocks.dal, 'crudFactory'
model = modelFactory mocks.dal
afterEach ->
mocks.dal.modelFactory.restore()
mocks.dal.schemaFactory.restore()
mocks.dal.crudFactory.restore()
it 'Creates a resources schema', (done) ->
expect(mocks.dal.schemaFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Creates a resources model', (done) ->
returnedDefinition = mocks.dal.schemaFactory.returnValues[0]
expect(mocks.dal.modelFactory.calledWithMatch(expectedDefinition))
.to.be.true
done()
it 'Uses Crud Factory with returned model', (done) ->
returnedModel = mocks.dal.modelFactory.returnValues[0]
expect(mocks.dal.crudFactory.calledWithMatch(returnedModel))
.to.be.true
done()
describe 'Schema', ->
schema = null
beforeEach ->
sinon.spy mocks.dal, 'schemaFactory'
model = modelFactory mocks.dal
schema = mocks.dal.schemaFactory.returnValues[0]
afterEach ->
mocks.dal.schemaFactory.restore()
it 'systemId: ObjectId', (done) ->
expect(schema).to.have.property 'systemId', 'ObjectId'
done()
it 'name: String', (done) ->
expect(schema).to.have.property 'name', 'String'
done()
describe 'Exports', ->
mockCrudModel =
name: "PI:NAME:<NAME>END_PIModel"
create: ->
update: ->
destroy: ->
findById: ->
findOne: ->
findOneBy: ->
find: ->
count: ->
mockCrudModelFactory = () ->
mockCrudModel
beforeEach ->
mocks.dal.crudFactory = mockCrudModelFactory
model = modelFactory mocks.dal
mocks.exportsCrudModel 'Resource'
, modelFactory(mocks.dal)
describe 'Other', ->
stubs = null
beforeEach ->
stubs = {}
model = proxyquire(dir + '/models/resources', stubs)(
mocks.dal
)
sinon.stub mockCrudModel, 'update'
afterEach ->
mockCrudModel.update.restore()
describe 'registerTypes: function(systemId, models, callback)' +
' -> (err, obj)', ->
it 'calls crud.update with a resource type for each model', (done) ->
models =
"model1":
name: "model1 name"
"model2":
name: "model2 name"
expectedType1 =
systemId: 'a'
name: 'model1 name'
expectedType2 =
systemId: 'a'
name: 'model2 name'
model.registerTypes "a", models, "c"
expect(mockCrudModel.update.calledWith(
expectedType1,expectedType1,{upsert: true}, "c")
).to.be.true
expect(mockCrudModel.update.calledWith(
expectedType2,expectedType2,{upsert: true}, "c")
).to.be.true
done()
|
[
{
"context": " # Grid-light theme for Highcharts JS\n # @author Torstein Honsi\n #\n # Taken from https://github.com/highslide-s",
"end": 1617,
"score": 0.9998664855957031,
"start": 1603,
"tag": "NAME",
"value": "Torstein Honsi"
},
{
"context": "stein Honsi\n #\n # Taken from h... | flink-runtime-web/web-dashboard/app/scripts/index.coffee | sekruse/flink | 0 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
angular.module('flinkApp', ['ui.router', 'angularMoment'])
# --------------------------------------
.run ($rootScope) ->
$rootScope.sidebarVisible = false
$rootScope.showSidebar = ->
$rootScope.sidebarVisible = !$rootScope.sidebarVisible
$rootScope.sidebarClass = 'force-show'
# --------------------------------------
.value 'flinkConfig', {
"refresh-interval": 10000
}
# --------------------------------------
.run (JobsService, MainService, flinkConfig, $interval) ->
MainService.loadConfig().then (config) ->
angular.extend flinkConfig, config
JobsService.listJobs()
$interval ->
JobsService.listJobs()
, flinkConfig["refresh-interval"]
Highcharts.setOptions({
global: {
useUTC: false
}
})
#
# Grid-light theme for Highcharts JS
# @author Torstein Honsi
#
# Taken from https://github.com/highslide-software/highcharts.com
#
Highcharts.createElement('link', {
href: '//fonts.googleapis.com/css?family=Dosis:400,600',
rel: 'stylesheet',
type: 'text/css'
}, null, document.getElementsByTagName('head')[0]);
Highcharts.theme = {
colors: ["#7cb5ec", "#f7a35c", "#90ee7e", "#7798BF", "#aaeeee", "#ff0066", "#eeaaee",
"#55BF3B", "#DF5353", "#7798BF", "#aaeeee"],
chart: {
backgroundColor: null,
style: {
fontFamily: "Dosis, sans-serif"
}
},
title: {
style: {
fontSize: '16px',
fontWeight: 'bold',
textTransform: 'uppercase'
}
},
tooltip: {
borderWidth: 0,
backgroundColor: 'rgba(219,219,216,0.8)',
shadow: false
},
legend: {
itemStyle: {
fontWeight: 'bold',
fontSize: '13px'
}
},
xAxis: {
gridLineWidth: 1,
labels: {
style: {
fontSize: '12px'
}
}
},
yAxis: {
minorTickInterval: 'auto',
title: {
style: {
textTransform: 'uppercase'
}
},
labels: {
style: {
fontSize: '12px'
}
}
},
plotOptions: {
candlestick: {
lineColor: '#404048'
}
},
background2: '#F0F0EA'
};
Highcharts.setOptions(Highcharts.theme);
# --------------------------------------
.config ($uiViewScrollProvider) ->
$uiViewScrollProvider.useAnchorScroll()
# --------------------------------------
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider.state "overview",
url: "/overview"
views:
main:
templateUrl: "partials/overview.html"
controller: 'OverviewController'
.state "running-jobs",
url: "/running-jobs"
views:
main:
templateUrl: "partials/jobs/running-jobs.html"
controller: 'RunningJobsController'
.state "completed-jobs",
url: "/completed-jobs"
views:
main:
templateUrl: "partials/jobs/completed-jobs.html"
controller: 'CompletedJobsController'
.state "single-job",
url: "/jobs/{jobid}"
abstract: true
views:
main:
templateUrl: "partials/jobs/job.html"
controller: 'SingleJobController'
.state "single-job.plan",
url: ""
abstract: true
views:
details:
templateUrl: "partials/jobs/job.plan.html"
controller: 'JobPlanController'
.state "single-job.plan.overview",
url: ""
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.overview.html"
controller: 'JobPlanOverviewController'
.state "single-job.plan.accumulators",
url: "/accumulators"
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.accumulators.html"
controller: 'JobPlanAccumulatorsController'
.state "single-job.timeline",
url: "/timeline"
views:
details:
templateUrl: "partials/jobs/job.timeline.html"
.state "single-job.timeline.vertex",
url: "/{vertexId}"
views:
vertex:
templateUrl: "partials/jobs/job.timeline.vertex.html"
controller: 'JobTimelineVertexController'
.state "single-job.exceptions",
url: "/exceptions"
views:
details:
templateUrl: "partials/jobs/job.exceptions.html"
controller: 'JobExceptionsController'
.state "single-job.properties",
url: "/properties"
views:
details:
templateUrl: "partials/jobs/job.properties.html"
controller: 'JobPropertiesController'
.state "single-job.config",
url: "/config"
views:
details:
templateUrl: "partials/jobs/job.config.html"
.state "all-manager",
url: "/taskmanagers"
views:
main:
templateUrl: "partials/taskmanager/index.html"
controller: 'AllTaskManagersController'
.state "single-manager",
url: "/taskmanager/{taskmanagerid}"
views:
main:
templateUrl: "partials/taskmanager/taskmanager.html"
controller: 'SingleTaskManagerController'
.state "single-manager.metrics",
url: "/metrics"
views:
details:
templateUrl: "partials/taskmanager/taskmanager.metrics.html"
.state "jobmanager",
url: "/jobmanager"
views:
main:
templateUrl: "partials/jobmanager/index.html"
.state "jobmanager.config",
url: "/config"
views:
details:
templateUrl: "partials/jobmanager/config.html"
controller: 'JobManagerConfigController'
.state "jobmanager.stdout",
url: "/stdout"
views:
details:
templateUrl: "partials/jobmanager/stdout.html"
controller: 'JobManagerStdoutController'
.state "jobmanager.log",
url: "/log"
views:
details:
templateUrl: "partials/jobmanager/log.html"
controller: 'JobManagerLogsController'
$urlRouterProvider.otherwise "/overview"
| 128222 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
angular.module('flinkApp', ['ui.router', 'angularMoment'])
# --------------------------------------
.run ($rootScope) ->
$rootScope.sidebarVisible = false
$rootScope.showSidebar = ->
$rootScope.sidebarVisible = !$rootScope.sidebarVisible
$rootScope.sidebarClass = 'force-show'
# --------------------------------------
.value 'flinkConfig', {
"refresh-interval": 10000
}
# --------------------------------------
.run (JobsService, MainService, flinkConfig, $interval) ->
MainService.loadConfig().then (config) ->
angular.extend flinkConfig, config
JobsService.listJobs()
$interval ->
JobsService.listJobs()
, flinkConfig["refresh-interval"]
Highcharts.setOptions({
global: {
useUTC: false
}
})
#
# Grid-light theme for Highcharts JS
# @author <NAME>
#
# Taken from https://github.com/highslide-software/highcharts.com
#
Highcharts.createElement('link', {
href: '//fonts.googleapis.com/css?family=Dosis:400,600',
rel: 'stylesheet',
type: 'text/css'
}, null, document.getElementsByTagName('head')[0]);
Highcharts.theme = {
colors: ["#7cb5ec", "#f7a35c", "#90ee7e", "#7798BF", "#aaeeee", "#ff0066", "#eeaaee",
"#55BF3B", "#DF5353", "#7798BF", "#aaeeee"],
chart: {
backgroundColor: null,
style: {
fontFamily: "Dosis, sans-serif"
}
},
title: {
style: {
fontSize: '16px',
fontWeight: 'bold',
textTransform: 'uppercase'
}
},
tooltip: {
borderWidth: 0,
backgroundColor: 'rgba(219,219,216,0.8)',
shadow: false
},
legend: {
itemStyle: {
fontWeight: 'bold',
fontSize: '13px'
}
},
xAxis: {
gridLineWidth: 1,
labels: {
style: {
fontSize: '12px'
}
}
},
yAxis: {
minorTickInterval: 'auto',
title: {
style: {
textTransform: 'uppercase'
}
},
labels: {
style: {
fontSize: '12px'
}
}
},
plotOptions: {
candlestick: {
lineColor: '#404048'
}
},
background2: '#F0F0EA'
};
Highcharts.setOptions(Highcharts.theme);
# --------------------------------------
.config ($uiViewScrollProvider) ->
$uiViewScrollProvider.useAnchorScroll()
# --------------------------------------
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider.state "overview",
url: "/overview"
views:
main:
templateUrl: "partials/overview.html"
controller: 'OverviewController'
.state "running-jobs",
url: "/running-jobs"
views:
main:
templateUrl: "partials/jobs/running-jobs.html"
controller: 'RunningJobsController'
.state "completed-jobs",
url: "/completed-jobs"
views:
main:
templateUrl: "partials/jobs/completed-jobs.html"
controller: 'CompletedJobsController'
.state "single-job",
url: "/jobs/{jobid}"
abstract: true
views:
main:
templateUrl: "partials/jobs/job.html"
controller: 'SingleJobController'
.state "single-job.plan",
url: ""
abstract: true
views:
details:
templateUrl: "partials/jobs/job.plan.html"
controller: 'JobPlanController'
.state "single-job.plan.overview",
url: ""
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.overview.html"
controller: 'JobPlanOverviewController'
.state "single-job.plan.accumulators",
url: "/accumulators"
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.accumulators.html"
controller: 'JobPlanAccumulatorsController'
.state "single-job.timeline",
url: "/timeline"
views:
details:
templateUrl: "partials/jobs/job.timeline.html"
.state "single-job.timeline.vertex",
url: "/{vertexId}"
views:
vertex:
templateUrl: "partials/jobs/job.timeline.vertex.html"
controller: 'JobTimelineVertexController'
.state "single-job.exceptions",
url: "/exceptions"
views:
details:
templateUrl: "partials/jobs/job.exceptions.html"
controller: 'JobExceptionsController'
.state "single-job.properties",
url: "/properties"
views:
details:
templateUrl: "partials/jobs/job.properties.html"
controller: 'JobPropertiesController'
.state "single-job.config",
url: "/config"
views:
details:
templateUrl: "partials/jobs/job.config.html"
.state "all-manager",
url: "/taskmanagers"
views:
main:
templateUrl: "partials/taskmanager/index.html"
controller: 'AllTaskManagersController'
.state "single-manager",
url: "/taskmanager/{taskmanagerid}"
views:
main:
templateUrl: "partials/taskmanager/taskmanager.html"
controller: 'SingleTaskManagerController'
.state "single-manager.metrics",
url: "/metrics"
views:
details:
templateUrl: "partials/taskmanager/taskmanager.metrics.html"
.state "jobmanager",
url: "/jobmanager"
views:
main:
templateUrl: "partials/jobmanager/index.html"
.state "jobmanager.config",
url: "/config"
views:
details:
templateUrl: "partials/jobmanager/config.html"
controller: 'JobManagerConfigController'
.state "jobmanager.stdout",
url: "/stdout"
views:
details:
templateUrl: "partials/jobmanager/stdout.html"
controller: 'JobManagerStdoutController'
.state "jobmanager.log",
url: "/log"
views:
details:
templateUrl: "partials/jobmanager/log.html"
controller: 'JobManagerLogsController'
$urlRouterProvider.otherwise "/overview"
| true | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
angular.module('flinkApp', ['ui.router', 'angularMoment'])
# --------------------------------------
.run ($rootScope) ->
$rootScope.sidebarVisible = false
$rootScope.showSidebar = ->
$rootScope.sidebarVisible = !$rootScope.sidebarVisible
$rootScope.sidebarClass = 'force-show'
# --------------------------------------
.value 'flinkConfig', {
"refresh-interval": 10000
}
# --------------------------------------
.run (JobsService, MainService, flinkConfig, $interval) ->
MainService.loadConfig().then (config) ->
angular.extend flinkConfig, config
JobsService.listJobs()
$interval ->
JobsService.listJobs()
, flinkConfig["refresh-interval"]
Highcharts.setOptions({
global: {
useUTC: false
}
})
#
# Grid-light theme for Highcharts JS
# @author PI:NAME:<NAME>END_PI
#
# Taken from https://github.com/highslide-software/highcharts.com
#
Highcharts.createElement('link', {
href: '//fonts.googleapis.com/css?family=Dosis:400,600',
rel: 'stylesheet',
type: 'text/css'
}, null, document.getElementsByTagName('head')[0]);
Highcharts.theme = {
colors: ["#7cb5ec", "#f7a35c", "#90ee7e", "#7798BF", "#aaeeee", "#ff0066", "#eeaaee",
"#55BF3B", "#DF5353", "#7798BF", "#aaeeee"],
chart: {
backgroundColor: null,
style: {
fontFamily: "Dosis, sans-serif"
}
},
title: {
style: {
fontSize: '16px',
fontWeight: 'bold',
textTransform: 'uppercase'
}
},
tooltip: {
borderWidth: 0,
backgroundColor: 'rgba(219,219,216,0.8)',
shadow: false
},
legend: {
itemStyle: {
fontWeight: 'bold',
fontSize: '13px'
}
},
xAxis: {
gridLineWidth: 1,
labels: {
style: {
fontSize: '12px'
}
}
},
yAxis: {
minorTickInterval: 'auto',
title: {
style: {
textTransform: 'uppercase'
}
},
labels: {
style: {
fontSize: '12px'
}
}
},
plotOptions: {
candlestick: {
lineColor: '#404048'
}
},
background2: '#F0F0EA'
};
Highcharts.setOptions(Highcharts.theme);
# --------------------------------------
.config ($uiViewScrollProvider) ->
$uiViewScrollProvider.useAnchorScroll()
# --------------------------------------
.config ($stateProvider, $urlRouterProvider) ->
$stateProvider.state "overview",
url: "/overview"
views:
main:
templateUrl: "partials/overview.html"
controller: 'OverviewController'
.state "running-jobs",
url: "/running-jobs"
views:
main:
templateUrl: "partials/jobs/running-jobs.html"
controller: 'RunningJobsController'
.state "completed-jobs",
url: "/completed-jobs"
views:
main:
templateUrl: "partials/jobs/completed-jobs.html"
controller: 'CompletedJobsController'
.state "single-job",
url: "/jobs/{jobid}"
abstract: true
views:
main:
templateUrl: "partials/jobs/job.html"
controller: 'SingleJobController'
.state "single-job.plan",
url: ""
abstract: true
views:
details:
templateUrl: "partials/jobs/job.plan.html"
controller: 'JobPlanController'
.state "single-job.plan.overview",
url: ""
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.overview.html"
controller: 'JobPlanOverviewController'
.state "single-job.plan.accumulators",
url: "/accumulators"
views:
'node-details':
templateUrl: "partials/jobs/job.plan.node-list.accumulators.html"
controller: 'JobPlanAccumulatorsController'
.state "single-job.timeline",
url: "/timeline"
views:
details:
templateUrl: "partials/jobs/job.timeline.html"
.state "single-job.timeline.vertex",
url: "/{vertexId}"
views:
vertex:
templateUrl: "partials/jobs/job.timeline.vertex.html"
controller: 'JobTimelineVertexController'
.state "single-job.exceptions",
url: "/exceptions"
views:
details:
templateUrl: "partials/jobs/job.exceptions.html"
controller: 'JobExceptionsController'
.state "single-job.properties",
url: "/properties"
views:
details:
templateUrl: "partials/jobs/job.properties.html"
controller: 'JobPropertiesController'
.state "single-job.config",
url: "/config"
views:
details:
templateUrl: "partials/jobs/job.config.html"
.state "all-manager",
url: "/taskmanagers"
views:
main:
templateUrl: "partials/taskmanager/index.html"
controller: 'AllTaskManagersController'
.state "single-manager",
url: "/taskmanager/{taskmanagerid}"
views:
main:
templateUrl: "partials/taskmanager/taskmanager.html"
controller: 'SingleTaskManagerController'
.state "single-manager.metrics",
url: "/metrics"
views:
details:
templateUrl: "partials/taskmanager/taskmanager.metrics.html"
.state "jobmanager",
url: "/jobmanager"
views:
main:
templateUrl: "partials/jobmanager/index.html"
.state "jobmanager.config",
url: "/config"
views:
details:
templateUrl: "partials/jobmanager/config.html"
controller: 'JobManagerConfigController'
.state "jobmanager.stdout",
url: "/stdout"
views:
details:
templateUrl: "partials/jobmanager/stdout.html"
controller: 'JobManagerStdoutController'
.state "jobmanager.log",
url: "/log"
views:
details:
templateUrl: "partials/jobmanager/log.html"
controller: 'JobManagerLogsController'
$urlRouterProvider.otherwise "/overview"
|
[
{
"context": " YOUR_GITHUB_PERSONAL_ACCESS_TOKENS\n password : 'x-oauth-basic'",
"end": 228,
"score": 0.9992597699165344,
"start": 215,
"tag": "PASSWORD",
"value": "x-oauth-basic"
}
] | server/EXAMPLE_github_credentials.coffee | briznad/stgr | 0 | # example of github_credentials.coffee file which should live is same dir as stgr.coffee
# credentials to be kept in a gitignored file
module.exports =
username : YOUR_GITHUB_PERSONAL_ACCESS_TOKENS
password : 'x-oauth-basic' | 213036 | # example of github_credentials.coffee file which should live is same dir as stgr.coffee
# credentials to be kept in a gitignored file
module.exports =
username : YOUR_GITHUB_PERSONAL_ACCESS_TOKENS
password : '<PASSWORD>' | true | # example of github_credentials.coffee file which should live is same dir as stgr.coffee
# credentials to be kept in a gitignored file
module.exports =
username : YOUR_GITHUB_PERSONAL_ACCESS_TOKENS
password : 'PI:PASSWORD:<PASSWORD>END_PI' |
[
{
"context": " session: {}\n client:\n _id: 'uuid1'\n user:\n _id: 'uuid2'\n con",
"end": 2113,
"score": 0.8217484354972839,
"start": 2112,
"tag": "USERNAME",
"value": "1"
},
{
"context": " _id: 'uuid1'\n user:\n _id: 'uui... | test/unit/oidc/authorize.coffee | LorianeE/connect | 0 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
IDToken = require '../../../models/IDToken'
AccessToken = require '../../../models/AccessToken'
AuthorizationCode = require '../../../models/AuthorizationCode'
authorize = require('../../../oidc').authorize
describe 'Authorize', ->
{req,res,next,err} = {}
describe 'with consent and "code" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent, "code" response type and "form_post" response_mode', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it 'should respond with the form_post', ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: undefined
id_token: undefined
code: '1234'
}
)
describe 'with consent and "code" response type and "max_age" param', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
max_age: 1000
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from params', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.connectParams.max_age
})
describe 'with consent and "code" response type and client "default_max_age"', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
default_max_age: 2000
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from client default_max_age', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.client.default_max_age
})
describe 'with consent and "code token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should not provide id_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
describe 'with consent and "code id_token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['sms', 'otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
IDToken.prototype.initializePayload.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should not provide access_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('access_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "id_token token" response type', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "code id_token token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['pwd']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "none" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'none'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should not provide authorization code', ->
res.redirect.should.not.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent and response mode query', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'query'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
describe 'with consent and response mode form_post', ->
response = AccessToken.initialize().project('issue')
before (done) ->
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it "should respond with form_post", ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: response.access_token
id_token: response.id_token
code: undefined
}
)
describe 'without consent', ->
before (done) ->
req =
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide an "access_denied" error', ->
res.redirect.should.have.been.calledWith sinon.match('error=access_denied')
| 207449 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
IDToken = require '../../../models/IDToken'
AccessToken = require '../../../models/AccessToken'
AuthorizationCode = require '../../../models/AuthorizationCode'
authorize = require('../../../oidc').authorize
describe 'Authorize', ->
{req,res,next,err} = {}
describe 'with consent and "code" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent, "code" response type and "form_post" response_mode', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it 'should respond with the form_post', ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: undefined
id_token: undefined
code: '1234'
}
)
describe 'with consent and "code" response type and "max_age" param', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
max_age: 1000
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from params', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.connectParams.max_age
})
describe 'with consent and "code" response type and client "default_max_age"', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
default_max_age: 2000
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from client default_max_age', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.client.default_max_age
})
describe 'with consent and "code token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should not provide id_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
describe 'with consent and "code id_token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['sms', 'otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
IDToken.prototype.initializePayload.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should not provide access_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('access_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "id_token token" response type', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "code id_token token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['<PASSWORD>']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "none" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'none'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should not provide authorization code', ->
res.redirect.should.not.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent and response mode query', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'query'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
describe 'with consent and response mode form_post', ->
response = AccessToken.initialize().project('issue')
before (done) ->
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it "should respond with form_post", ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: response.access_token
id_token: response.id_token
code: undefined
}
)
describe 'without consent', ->
before (done) ->
req =
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide an "access_denied" error', ->
res.redirect.should.have.been.calledWith sinon.match('error=access_denied')
| true | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
IDToken = require '../../../models/IDToken'
AccessToken = require '../../../models/AccessToken'
AuthorizationCode = require '../../../models/AuthorizationCode'
authorize = require('../../../oidc').authorize
describe 'Authorize', ->
{req,res,next,err} = {}
describe 'with consent and "code" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent, "code" response type and "form_post" response_mode', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it 'should respond with the form_post', ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: undefined
id_token: undefined
code: '1234'
}
)
describe 'with consent and "code" response type and "max_age" param', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
max_age: 1000
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from params', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.connectParams.max_age
})
describe 'with consent and "code" response type and client "default_max_age"', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
default_max_age: 2000
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should set max_age from client default_max_age', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: req.client.default_max_age
})
describe 'with consent and "code token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should not provide id_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
describe 'with consent and "code id_token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['sms', 'otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
IDToken.prototype.initializePayload.restore()
it 'should set default max_age if none is provided', ->
AuthorizationCode.insert.should.have.been.calledWith sinon.match({
max_age: undefined
})
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should not provide access_token', ->
res.redirect.should.not.have.been.calledWith sinon.match('access_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "id_token token" response type', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['otp']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "code id_token token" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
sinon.spy(IDToken.prototype, 'initializePayload')
req =
session:
amr: ['PI:PASSWORD:<PASSWORD>END_PI']
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'code id_token token'
redirect_uri: 'https://host/callback'
scope: 'openid profile'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
AccessToken.issue.restore()
IDToken.prototype.initializePayload.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a uri fragment', ->
res.redirect.should.have.been.calledWith sinon.match('#')
it 'should provide authorization code', ->
res.redirect.should.have.been.calledWith sinon.match 'code=1234'
it 'should provide access_token', ->
res.redirect.should.have.been.calledWith sinon.match('access_token=')
it 'should provide token_type', ->
res.redirect.should.have.been.calledWith sinon.match('token_type=Bearer')
it 'should provide expires_in', ->
res.redirect.should.have.been.calledWith sinon.match('expires_in=3600')
it 'should provide id_token', ->
res.redirect.should.have.been.calledWith sinon.match('id_token=')
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match req.connectParams.state
it 'should provide session_state', ->
res.redirect.should.have.been.calledWith sinon.match('session_state=')
it 'should include `amr` claim in id_token', ->
IDToken.prototype.initializePayload.should.have.been.calledWith(
sinon.match amr: req.session.amr
)
describe 'with consent and "none" response type', ->
before (done) ->
sinon.stub(AuthorizationCode, 'insert').callsArgWith(1, null, {
code: '1234'
})
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'none'
redirect_uri: 'https://host/callback'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AuthorizationCode.insert.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
it 'should not provide authorization code', ->
res.redirect.should.not.have.been.calledWith sinon.match 'code=1234'
it 'should provide state', ->
res.redirect.should.have.been.calledWith sinon.match 'state=r4nd0m'
it 'should not provide session_state', ->
res.redirect.should.not.have.been.calledWith sinon.match('session_state=')
describe 'with consent and response mode query', ->
before (done) ->
response = AccessToken.initialize().project('issue')
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'query'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide a query string', ->
res.redirect.should.have.been.calledWith sinon.match('?')
describe 'with consent and response mode form_post', ->
response = AccessToken.initialize().project('issue')
before (done) ->
sinon.stub(AccessToken, 'issue').callsArgWith(1, null, response)
req =
session: {}
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
authorize: 'true'
response_type: 'id_token token'
response_mode: 'form_post'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
set: sinon.spy()
render: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
after ->
AccessToken.issue.restore()
it 'should set cache-control headers', ->
res.set.should.have.been.calledWithExactly({
'Cache-Control': 'no-cache, no-store',
'Pragma': 'no-cache'
})
it "should respond with form_post", ->
res.render.should.have.been.calledWithExactly(
"form_post", {
redirect_uri: req.connectParams.redirect_uri
state: req.connectParams.state
access_token: response.access_token
id_token: response.id_token
code: undefined
}
)
describe 'without consent', ->
before (done) ->
req =
client:
_id: 'uuid1'
user:
_id: 'uuid2'
connectParams:
response_type: 'id_token token'
redirect_uri: 'https://host/callback'
nonce: 'n0nc3'
state: 'r4nd0m'
res =
redirect: sinon.spy()
next = sinon.spy()
authorize req, res, next
done()
it 'should redirect to the redirect_uri', ->
res.redirect.should.have.been.calledWith sinon.match(
req.connectParams.redirect_uri
)
it 'should provide an "access_denied" error', ->
res.redirect.should.have.been.calledWith sinon.match('error=access_denied')
|
[
{
"context": "remote'\n ssh:\n host: 'target', username: 'nikita',\n sudo: true\n password: 'secret' # pri",
"end": 127,
"score": 0.9995833039283752,
"start": 121,
"tag": "USERNAME",
"value": "nikita"
},
{
"context": "name: 'nikita',\n sudo: true\n passwor... | packages/tools/env/centos6/test.coffee | wdavidw/node-mecano | 0 |
module.exports =
tags:
tools_repo: true
config: [
label: 'remote'
ssh:
host: 'target', username: 'nikita',
sudo: true
password: 'secret' # private_key_path: '~/.ssh/id_rsa'
]
| 219912 |
module.exports =
tags:
tools_repo: true
config: [
label: 'remote'
ssh:
host: 'target', username: 'nikita',
sudo: true
password: '<PASSWORD>' # private_key_path: '~/.ssh/id_rsa'
]
| true |
module.exports =
tags:
tools_repo: true
config: [
label: 'remote'
ssh:
host: 'target', username: 'nikita',
sudo: true
password: 'PI:PASSWORD:<PASSWORD>END_PI' # private_key_path: '~/.ssh/id_rsa'
]
|
[
{
"context": "t fs.existsSync storage_file\n catch err\n key = \"#{home}+#{process.platform}+permission+denied\"\n console.log ''\n console.log chalk.red \" #{",
"end": 539,
"score": 0.9989232420921326,
"start": 492,
"tag": "KEY",
"value": "\"#{home}+#{process.platform}+permission+... | src/utils/storage.coffee | WittBulter/fine | 155 | os = require 'os'
fs = require 'fs'
path = require 'path'
chalk = require 'chalk'
emoji = require 'node-emoji'
child = require 'child_process'
home = path.join os.homedir(), '.fine'
storage_file = path.join home, 'storage.json'
cache_dir = path.join home, 'cache'
init_home = () ->
try
fs.mkdirSync home if not fs.existsSync home
fs.mkdirSync cache_dir if not fs.existsSync cache_dir
fs.writeFileSync storage_file, '{}' if not fs.existsSync storage_file
catch err
key = "#{home}+#{process.platform}+permission+denied"
console.log ''
console.log chalk.red " #{emoji.get 'lock'} directory '#{home}' does not have write permission."
console.log chalk.cyan " #{emoji.get 'link'} https://stackoverflow.com/search?q=#{key}"
class Cache
path: cache_dir
clear: () ->
await fine.rm cache_dir
fs.mkdirSync cache_dir
class BaseIO
find_all: () ->
json
try
json = JSON.parse fs.readFileSync storage_file, 'utf-8'
catch err
json = {}
json
save_dict: (dict = {}) ->
json = Object.assign {}, do @find_all, dict
str = JSON.stringify json
fs.writeFileSync storage_file, str
json
class Storage extends BaseIO
constructor: (arg) ->
super arg
do init_home
@cache = new Cache
@cache: {}
save: (key, value = null) ->
return {} if not key
key_value = { "#{key}": value }
next = Object.assign {}, do @find_all, key_value
@save_dict next
key_value
find: (key) ->
return null if not key
json = do @find_all
json[key] or null
clear: () ->
@save_dict {}
true
module.exports = new Storage
| 221098 | os = require 'os'
fs = require 'fs'
path = require 'path'
chalk = require 'chalk'
emoji = require 'node-emoji'
child = require 'child_process'
home = path.join os.homedir(), '.fine'
storage_file = path.join home, 'storage.json'
cache_dir = path.join home, 'cache'
init_home = () ->
try
fs.mkdirSync home if not fs.existsSync home
fs.mkdirSync cache_dir if not fs.existsSync cache_dir
fs.writeFileSync storage_file, '{}' if not fs.existsSync storage_file
catch err
key = <KEY>
console.log ''
console.log chalk.red " #{emoji.get 'lock'} directory '#{home}' does not have write permission."
console.log chalk.cyan " #{emoji.get 'link'} https://stackoverflow.com/search?q=#{key}"
class Cache
path: cache_dir
clear: () ->
await fine.rm cache_dir
fs.mkdirSync cache_dir
class BaseIO
find_all: () ->
json
try
json = JSON.parse fs.readFileSync storage_file, 'utf-8'
catch err
json = {}
json
save_dict: (dict = {}) ->
json = Object.assign {}, do @find_all, dict
str = JSON.stringify json
fs.writeFileSync storage_file, str
json
class Storage extends BaseIO
constructor: (arg) ->
super arg
do init_home
@cache = new Cache
@cache: {}
save: (key, value = null) ->
return {} if not key
key_value = { "#{key}": value }
next = Object.assign {}, do @find_all, key_value
@save_dict next
key_value
find: (key) ->
return null if not key
json = do @find_all
json[key] or null
clear: () ->
@save_dict {}
true
module.exports = new Storage
| true | os = require 'os'
fs = require 'fs'
path = require 'path'
chalk = require 'chalk'
emoji = require 'node-emoji'
child = require 'child_process'
home = path.join os.homedir(), '.fine'
storage_file = path.join home, 'storage.json'
cache_dir = path.join home, 'cache'
init_home = () ->
try
fs.mkdirSync home if not fs.existsSync home
fs.mkdirSync cache_dir if not fs.existsSync cache_dir
fs.writeFileSync storage_file, '{}' if not fs.existsSync storage_file
catch err
key = PI:KEY:<KEY>END_PI
console.log ''
console.log chalk.red " #{emoji.get 'lock'} directory '#{home}' does not have write permission."
console.log chalk.cyan " #{emoji.get 'link'} https://stackoverflow.com/search?q=#{key}"
class Cache
path: cache_dir
clear: () ->
await fine.rm cache_dir
fs.mkdirSync cache_dir
class BaseIO
find_all: () ->
json
try
json = JSON.parse fs.readFileSync storage_file, 'utf-8'
catch err
json = {}
json
save_dict: (dict = {}) ->
json = Object.assign {}, do @find_all, dict
str = JSON.stringify json
fs.writeFileSync storage_file, str
json
class Storage extends BaseIO
constructor: (arg) ->
super arg
do init_home
@cache = new Cache
@cache: {}
save: (key, value = null) ->
return {} if not key
key_value = { "#{key}": value }
next = Object.assign {}, do @find_all, key_value
@save_dict next
key_value
find: (key) ->
return null if not key
json = do @find_all
json[key] or null
clear: () ->
@save_dict {}
true
module.exports = new Storage
|
[
{
"context": "id001'] = \n id: \"id001\"\n username: 'martin'\n password: 'secret'\n email: 'hello",
"end": 550,
"score": 0.9995678067207336,
"start": 544,
"tag": "USERNAME",
"value": "martin"
},
{
"context": "01\"\n username: 'martin'\n pass... | test/t14-embed-resolve-test.coffee | codedoctor/node-api-facade | 1 | should = require 'should'
_ = require 'underscore'
model = ->
targetId : "id001"
module.exports = class ResolverUsers
constructor: () ->
# Ususally have a link to persistent store here.
@kinds = ['User'] # Supported types
resolve: (kind,userIdsToRetrieve = [],options = {},cb) =>
userIdsToRetrieve = _.uniq(userIdsToRetrieve)
result =
kind : kind
collectionName: 'users'
items : {}
if _.contains(userIdsToRetrieve,'id001')
result.items['id001'] =
id: "id001"
username: 'martin'
password: 'secret'
email: 'hello@world.com'
cb null,result
describe 'WHEN resolving stuff', ->
index = require '../lib/index'
apiFacade = index.client()
apiFacade.addSchema "TypeA",
mappings:
target:
name : 'targetId'
type: 'User'
embed : true
resolve: true
apiFacade.addSchema "User",
mappings:
id : 'id'
username: 'username'
password: 'password'
email: 'email'
apiFacade.registerResolver new ResolverUsers
it 'IT should transform values', (done) ->
apiFacade.mapRoot 'TypeA', model(), {}, (err,jsonObj) ->
console.log "I GOT: #{JSON.stringify(jsonObj)}"
should.not.exist err
should.exist jsonObj
jsonObj.should.have.property 'target'
jsonObj.target.should.have.property 'id'
jsonObj.target.should.have.property 'username'
jsonObj.target.should.have.property 'password'
jsonObj.target.should.have.property 'email'
jsonObj.should.have.property '_embedded'
jsonObj._embedded.should.have.property 'users'
jsonObj._embedded.users.should.have.property 'id001'
jsonObj._embedded.users.id001.should.have.property 'id'
jsonObj._embedded.users.id001.should.have.property 'username'
jsonObj._embedded.users.id001.should.have.property 'password'
jsonObj._embedded.users.id001.should.have.property 'email'
done null
| 152641 | should = require 'should'
_ = require 'underscore'
model = ->
targetId : "id001"
module.exports = class ResolverUsers
constructor: () ->
# Ususally have a link to persistent store here.
@kinds = ['User'] # Supported types
resolve: (kind,userIdsToRetrieve = [],options = {},cb) =>
userIdsToRetrieve = _.uniq(userIdsToRetrieve)
result =
kind : kind
collectionName: 'users'
items : {}
if _.contains(userIdsToRetrieve,'id001')
result.items['id001'] =
id: "id001"
username: 'martin'
password: '<PASSWORD>'
email: '<EMAIL>'
cb null,result
describe 'WHEN resolving stuff', ->
index = require '../lib/index'
apiFacade = index.client()
apiFacade.addSchema "TypeA",
mappings:
target:
name : 'targetId'
type: 'User'
embed : true
resolve: true
apiFacade.addSchema "User",
mappings:
id : 'id'
username: 'username'
password: '<PASSWORD>'
email: 'email'
apiFacade.registerResolver new ResolverUsers
it 'IT should transform values', (done) ->
apiFacade.mapRoot 'TypeA', model(), {}, (err,jsonObj) ->
console.log "I GOT: #{JSON.stringify(jsonObj)}"
should.not.exist err
should.exist jsonObj
jsonObj.should.have.property 'target'
jsonObj.target.should.have.property 'id'
jsonObj.target.should.have.property 'username'
jsonObj.target.should.have.property 'password'
jsonObj.target.should.have.property 'email'
jsonObj.should.have.property '_embedded'
jsonObj._embedded.should.have.property 'users'
jsonObj._embedded.users.should.have.property 'id001'
jsonObj._embedded.users.id001.should.have.property 'id'
jsonObj._embedded.users.id001.should.have.property 'username'
jsonObj._embedded.users.id001.should.have.property 'password'
jsonObj._embedded.users.id001.should.have.property 'email'
done null
| true | should = require 'should'
_ = require 'underscore'
model = ->
targetId : "id001"
module.exports = class ResolverUsers
constructor: () ->
# Ususally have a link to persistent store here.
@kinds = ['User'] # Supported types
resolve: (kind,userIdsToRetrieve = [],options = {},cb) =>
userIdsToRetrieve = _.uniq(userIdsToRetrieve)
result =
kind : kind
collectionName: 'users'
items : {}
if _.contains(userIdsToRetrieve,'id001')
result.items['id001'] =
id: "id001"
username: 'martin'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
cb null,result
describe 'WHEN resolving stuff', ->
index = require '../lib/index'
apiFacade = index.client()
apiFacade.addSchema "TypeA",
mappings:
target:
name : 'targetId'
type: 'User'
embed : true
resolve: true
apiFacade.addSchema "User",
mappings:
id : 'id'
username: 'username'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
email: 'email'
apiFacade.registerResolver new ResolverUsers
it 'IT should transform values', (done) ->
apiFacade.mapRoot 'TypeA', model(), {}, (err,jsonObj) ->
console.log "I GOT: #{JSON.stringify(jsonObj)}"
should.not.exist err
should.exist jsonObj
jsonObj.should.have.property 'target'
jsonObj.target.should.have.property 'id'
jsonObj.target.should.have.property 'username'
jsonObj.target.should.have.property 'password'
jsonObj.target.should.have.property 'email'
jsonObj.should.have.property '_embedded'
jsonObj._embedded.should.have.property 'users'
jsonObj._embedded.users.should.have.property 'id001'
jsonObj._embedded.users.id001.should.have.property 'id'
jsonObj._embedded.users.id001.should.have.property 'username'
jsonObj._embedded.users.id001.should.have.property 'password'
jsonObj._embedded.users.id001.should.have.property 'email'
done null
|
[
{
"context": "priority=4 right_assoc=1') .strict('$1.hash_key==binary_operator') \nq('bin_op', '**') ",
"end": 7766,
"score": 0.5116285681724548,
"start": 7760,
"tag": "KEY",
"value": "binary"
}
] | src/grammar.coffee | hu2prod/scriptscript | 1 | require 'fy'
{Gram} = require 'gram2'
module = @
# ###################################################################################################
# specific
# ###################################################################################################
# API should be async by default in case we make some optimizations in future
g = new Gram
{_tokenizer} = require './tokenizer'
do ()->
for v in _tokenizer.parser_list
g.extra_hash_key_list.push v.name
q = (a, b)->g.rule a,b
# ###################################################################################################
# 1-position tokens/const
# ###################################################################################################
base_priority = -9000
q('lvalue', '#identifier') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=value ti=id")
q('rvalue', '#lvalue') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=deep ti=pass")
q('num_const', '#decimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#octal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#hexadecimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#binary_literal') .mx("ult=value ti=const type=int")
q('num_const', '#float_literal') .mx("ult=value ti=const type=float")
q('const', '#num_const') .mx("ult=deep ti=pass")
q('str_const', '#string_literal_singleq') .mx("ult=string_singleq ti=const type=string")
q('str_const', '#block_string_literal_singleq') .mx("ult=block_string_singleq ti=const type=string")
q('str_const', '#string_literal_doubleq') .mx("ult=string_doubleq ti=const type=string")
q('str_const', '#block_string_literal_doubleq') .mx("ult=block_string_doubleq ti=const type=string")
q('const', '#str_const') .mx("ult=deep ti=pass")
q('rvalue','#const') .mx("priority=#{base_priority} ult=deep ti=pass")
q('lvalue','@') .mx("priority=#{base_priority} ult=value ti=this block_assign=1")
q('lvalue','@ #identifier') .mx("priority=#{base_priority} ult=value")
# ###################################################################################################
# string interpolation
# ###################################################################################################
q('st1_start', '#inline_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_start', '#block_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st_mid', '#string_template_mid') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_end', '#inline_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_end', '#block_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_start', '#st1_start #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('st1_start', '#st1_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #rvalue #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #rvalue #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# regexp
# ###################################################################################################
q('regexp', '#regexp_literal') .mx("ult=value ti=const type=regexp")
q('regexp', '#here_regexp_literal') .mx("ult=block_regexp ti=const type=regexp")
q('rvalue', '#regexp') .mx("ult=deep ti=pass")
# ###################################################################################################
# regexp interpolation
# ###################################################################################################
q('rextem_start', '#regexp_template_start') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_mid', '#regexp_template_mid') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_end', '#regexp_template_end') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_start', '#rextem_start #rvalue #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rvalue #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rextem_start', '#rextem_start #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# operators define
# ###################################################################################################
q('pre_op', '!') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'not') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '~') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '-') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', '+') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', 'typeof') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'void') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'new') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'delete') .mx('priority=15') .strict('$1.hash_key==unary_operator')
# ++ -- pre_op is banned.
q('post_op', '++') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '--') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '[QUESTION]') .mx('priority=1') #.strict('$1.hash_key==unary_operator')
# https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Operators/Operator_Precedence
# TODO all ops
pipe_priority = 100
q('bin_op', '//|%%') .mx('priority=4 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '**') .mx('priority=4 left_assoc=1') .strict('$1.hash_key==binary_operator') # because JS
q('bin_op', '*|/|%') .mx('priority=5 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '+|-') .mx('priority=6 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<<|>>|>>>') .mx('priority=7 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', 'instanceof') .mx('priority=8 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<|<=|>|>=') .mx('priority=9') .strict('$1.hash_key==binary_operator') # NOTE NOT associative, because chained comparison
q('bin_op', '!=|==') .mx('priority=9 right_assoc=1') .strict('$1.hash_key==binary_operator') # NOTE == <= has same priority
# WARNING a == b < c is bad style. So all fuckups are yours
q('bin_op', '&&|and|or|xor|[PIPE][PIPE]') .mx('priority=10 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('assign_bin_op', '=|+=|-=|*=|/=|%=|<<=|>>=|>>>=|**=|//=|%%=|and=|or=|xor=|[QUESTION]=').mx('priority=3') .strict('$1.hash_key==binary_operator')
# ###################################################################################################
# operators constructions
# ###################################################################################################
# PIPE special
q('bin_op', '#multipipe') .mx("priority=#{pipe_priority} right_assoc=1") # возможно стоит это сделать отдельной конструкцией языка дабы проверять всё более тсчательно
q('multipipe', '[PIPE] #multipipe?')
# NOTE need ~same rule for lvalue ???
q('rvalue', '( #rvalue )') .mx("priority=#{base_priority} ult=bracket ti=bracket")
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority !#rvalue[1].func_decl #bin_op.left_assoc')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl #bin_op.right_assoc')
# BUG in gram2
# # indent set
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority #bin_op.left_assoc')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority #bin_op.right_assoc')
# indent+pipe
q('pre_pipe_rvalue', '#multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('pre_pipe_rvalue', '#pre_pipe_rvalue #eol #multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('rvalue', '#rvalue #multipipe #indent #pre_pipe_rvalue #dedent').mx("priority=#{pipe_priority}") .strict("#rvalue[1].priority<=#{pipe_priority}")
# assign
q('rvalue', '#lvalue #assign_bin_op #rvalue') .mx('priority=#assign_bin_op.priority ult=bin_op ti=assign_bin_op func_decl=#lvalue.func_decl').strict('#lvalue.priority<#assign_bin_op.priority #rvalue.priority<=#assign_bin_op.priority !#lvalue.func_decl !#lvalue.block_assign')
q('rvalue', '#pre_op #rvalue') .mx('priority=#pre_op.priority ult=pre_op ti=pre_op') .strict('#rvalue[1].priority<=#pre_op.priority')
q('rvalue', '#rvalue #post_op') .mx('priority=#post_op.priority ult=post_op ti=post_op').strict('#rvalue[1].priority<#post_op.priority !#rvalue.tail_space') # a++ ++ is not allowed
# ###################################################################################################
# ternary
# ###################################################################################################
q('rvalue', '#rvalue [QUESTION] #rvalue : #rvalue') .mx("priority=#{base_priority} ult=ternary delimiter='[SPACE]' ti=ternary")
# ###################################################################################################
# array
# ###################################################################################################
q('comma_rvalue', '#rvalue') .mx("ult=deep")
# q('comma_rvalue', '#eol #comma_rvalue') .mx("ult=deep") # NOTE eol in back will not work. Gram bug
q('comma_rvalue', '#comma_rvalue #eol #rvalue') .mx("ult=deep delimiter=','")
q('comma_rvalue', '#comma_rvalue #eol? , #eol? #rvalue').mx("ult=deep")
q('array', '[ #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #eol? #comma_rvalue #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #indent #comma_rvalue? #dedent ]') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#array') .mx("priority=#{base_priority} ult=deep ti=array")
# NOTE lvalue array come later
q('array', '[ #num_const .. #num_const ]') .mx("priority=#{base_priority} ult=num_array")
# ###################################################################################################
# hash
# ###################################################################################################
# hash with brackets
q('pair', '#identifier : #rvalue') .mx("ult=hash_pair_simple")
q('pair', '#const : #rvalue') .mx("ult=deep")
q('pair', '( #rvalue ) : #rvalue') .mx("ult=hash_pair_eval")
q('pair', '#identifier') .mx("ult=hash_pair_auto auto=1")
q('pair_comma_rvalue', '#pair') .mx("ult=deep")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol #pair').mx("ult=deep delimiter=','")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol? , #eol? #pair').mx("ult=deep")
q('hash', '{ #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #eol? #pair_comma_rvalue #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #indent #pair_comma_rvalue? #dedent }') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#hash') .mx("priority=#{base_priority} ult=deep ti=hash")
q('BL_pair_comma_rvalue', '#pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#eol #pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#BL_pair_comma_rvalue , #pair').mx("ult=deep") .strict("!#pair.auto")
q('bracket_less_hash', '#BL_pair_comma_rvalue') .mx("priority=#{base_priority} ult=deep")
q('bracket_less_hash', '#indent #BL_pair_comma_rvalue #dedent') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#bracket_less_hash') .mx("priority=#{base_priority} ult=hash_wrap ti=hash")
# LATER bracket-less hash
# fuckup sample
# a a:b,c:d
# a({a:b,c:d})
# a({a:b},{c:d})
# ###################################################################################################
# access
# ###################################################################################################
# [] access
q('lvalue', '#lvalue [ #rvalue ]') .mx("priority=#{base_priority} ult=array_access ti=array_access")
# . access
q('lvalue', '#lvalue . #identifier') .mx("priority=#{base_priority} ult=field_access ti=id_access")
# opencl-like access
# proper
q('lvalue', '#lvalue . #decimal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
q('lvalue', '#lvalue . #octal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
# hack for a.0123 float_enabled
# q('lvalue', '#lvalue #float_literal') .mx("priority=#{base_priority}") .strict('#lvalue.tail_space=0 #float_literal[0:0]="."')
# ###################################################################################################
# function call
# ###################################################################################################
q('rvalue', '#rvalue ( #comma_rvalue? #eol? )') .mx("priority=#{base_priority} ult=func_call ti=func_call").strict('!#rvalue.func_decl')
# ###################################################################################################
# function decl
# ###################################################################################################
q('rvalue', '-> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '=> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) -> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) => #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type -> #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type => #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('arg_list', '#arg') .mx("priority=#{base_priority}")
q('arg_list', '#arg_list , #arg') .mx("priority=#{base_priority}")
q('arg', '#identifier') .mx("priority=#{base_priority}")
q('arg', '#identifier : #type') .mx("priority=#{base_priority}")
q('arg', '#identifier = #rvalue') .mx("priority=#{base_priority}")
q('type', '#identifier') .mx("priority=#{base_priority}")
# LATER array<T> support
q('function_body', '#stmt') .mx("priority=#{base_priority} ult=func_decl_return ti=pass")
q('function_body', '#block') .mx("priority=#{base_priority} ult=deep ti=pass")
# ###################################################################################################
# block
# ###################################################################################################
q('block', '#indent #stmt_plus #dedent') .mx("priority=#{base_priority} ult=block ti=block")
q('stmt_plus', '#stmt') .mx("priority=#{base_priority} ult=deep ti=pass")
q('stmt_plus', '#stmt_plus #eol #stmt') .mx("priority=#{base_priority} ult=deep ti=stmt_plus_last eol_pass=1")
# ###################################################################################################
# macro-block
# ###################################################################################################
q('rvalue', '#identifier #rvalue? #block') .mx("priority=#{base_priority} ult=macro_block ti=macro_stub")
# ###################################################################################################
q('stmt', '#rvalue') .mx("ult=deep ti=pass")
q('stmt', '#stmt #comment') .mx("ult=deep ti=pass")
q('stmt', '#comment') .mx("ult=deep ti=skip")
q('stmt', '#empty') .mx("ult=value ti=skip")
q('stmt', '__test_untranslated') .mx("ti=skip") # FOR test purposes only
show_diff = (a,b)->
### !pragma coverage-skip-block ###
if a.rule != b.rule
perr "RULE mismatch"
perr "a="
perr a.rule
perr "b="
perr b.rule
return
if a.value != b.value
perr "a=#{a.value}"
perr "b=#{b.value}"
return
if a.mx_hash.hash_key != b.mx_hash.hash_key
perr "a.hash_key = #{a.mx_hash.hash_key}"
perr "b.hash_key = #{b.mx_hash.hash_key}"
return
js_a = JSON.stringify a.mx_hash
js_b = JSON.stringify b.mx_hash
if js_a != js_b
perr "a.mx_hash = #{js_a}"
perr "b.mx_hash = #{js_b}"
return
if a.value_array.length != b.value_array.length
perr "list length mismatch #{a.value_array.length} != #{b.value_array.length}"
perr "a=#{a.value_array.map((t)->t.value).join ','}"
perr "b=#{b.value_array.map((t)->t.value).join ','}"
return
for i in [0 ... a.value_array.length]
show_diff a.value_array[i], b.value_array[i]
return
@_parse = (tok_res, opt={})->
gram_res = g.go tok_res,
expected_token : 'stmt_plus'
mode_full : opt.mode_full or false
if gram_res.length == 0
throw new Error "Parsing error. No proper combination found"
if gram_res.length != 1
[a,b] = gram_res
show_diff a,b
### !pragma coverage-skip-block ###
throw new Error "Parsing error. More than one proper combination found #{gram_res.length}"
gram_res
@parse = (tok_res, opt, on_end)->
try
gram_res = module._parse tok_res, opt
catch e
return on_end e
on_end null, gram_res
| 7784 | require 'fy'
{Gram} = require 'gram2'
module = @
# ###################################################################################################
# specific
# ###################################################################################################
# API should be async by default in case we make some optimizations in future
g = new Gram
{_tokenizer} = require './tokenizer'
do ()->
for v in _tokenizer.parser_list
g.extra_hash_key_list.push v.name
q = (a, b)->g.rule a,b
# ###################################################################################################
# 1-position tokens/const
# ###################################################################################################
base_priority = -9000
q('lvalue', '#identifier') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=value ti=id")
q('rvalue', '#lvalue') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=deep ti=pass")
q('num_const', '#decimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#octal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#hexadecimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#binary_literal') .mx("ult=value ti=const type=int")
q('num_const', '#float_literal') .mx("ult=value ti=const type=float")
q('const', '#num_const') .mx("ult=deep ti=pass")
q('str_const', '#string_literal_singleq') .mx("ult=string_singleq ti=const type=string")
q('str_const', '#block_string_literal_singleq') .mx("ult=block_string_singleq ti=const type=string")
q('str_const', '#string_literal_doubleq') .mx("ult=string_doubleq ti=const type=string")
q('str_const', '#block_string_literal_doubleq') .mx("ult=block_string_doubleq ti=const type=string")
q('const', '#str_const') .mx("ult=deep ti=pass")
q('rvalue','#const') .mx("priority=#{base_priority} ult=deep ti=pass")
q('lvalue','@') .mx("priority=#{base_priority} ult=value ti=this block_assign=1")
q('lvalue','@ #identifier') .mx("priority=#{base_priority} ult=value")
# ###################################################################################################
# string interpolation
# ###################################################################################################
q('st1_start', '#inline_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_start', '#block_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st_mid', '#string_template_mid') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_end', '#inline_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_end', '#block_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_start', '#st1_start #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('st1_start', '#st1_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #rvalue #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #rvalue #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# regexp
# ###################################################################################################
q('regexp', '#regexp_literal') .mx("ult=value ti=const type=regexp")
q('regexp', '#here_regexp_literal') .mx("ult=block_regexp ti=const type=regexp")
q('rvalue', '#regexp') .mx("ult=deep ti=pass")
# ###################################################################################################
# regexp interpolation
# ###################################################################################################
q('rextem_start', '#regexp_template_start') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_mid', '#regexp_template_mid') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_end', '#regexp_template_end') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_start', '#rextem_start #rvalue #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rvalue #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rextem_start', '#rextem_start #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# operators define
# ###################################################################################################
q('pre_op', '!') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'not') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '~') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '-') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', '+') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', 'typeof') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'void') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'new') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'delete') .mx('priority=15') .strict('$1.hash_key==unary_operator')
# ++ -- pre_op is banned.
q('post_op', '++') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '--') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '[QUESTION]') .mx('priority=1') #.strict('$1.hash_key==unary_operator')
# https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Operators/Operator_Precedence
# TODO all ops
pipe_priority = 100
q('bin_op', '//|%%') .mx('priority=4 right_assoc=1') .strict('$1.hash_key==<KEY>_operator')
q('bin_op', '**') .mx('priority=4 left_assoc=1') .strict('$1.hash_key==binary_operator') # because JS
q('bin_op', '*|/|%') .mx('priority=5 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '+|-') .mx('priority=6 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<<|>>|>>>') .mx('priority=7 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', 'instanceof') .mx('priority=8 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<|<=|>|>=') .mx('priority=9') .strict('$1.hash_key==binary_operator') # NOTE NOT associative, because chained comparison
q('bin_op', '!=|==') .mx('priority=9 right_assoc=1') .strict('$1.hash_key==binary_operator') # NOTE == <= has same priority
# WARNING a == b < c is bad style. So all fuckups are yours
q('bin_op', '&&|and|or|xor|[PIPE][PIPE]') .mx('priority=10 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('assign_bin_op', '=|+=|-=|*=|/=|%=|<<=|>>=|>>>=|**=|//=|%%=|and=|or=|xor=|[QUESTION]=').mx('priority=3') .strict('$1.hash_key==binary_operator')
# ###################################################################################################
# operators constructions
# ###################################################################################################
# PIPE special
q('bin_op', '#multipipe') .mx("priority=#{pipe_priority} right_assoc=1") # возможно стоит это сделать отдельной конструкцией языка дабы проверять всё более тсчательно
q('multipipe', '[PIPE] #multipipe?')
# NOTE need ~same rule for lvalue ???
q('rvalue', '( #rvalue )') .mx("priority=#{base_priority} ult=bracket ti=bracket")
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority !#rvalue[1].func_decl #bin_op.left_assoc')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl #bin_op.right_assoc')
# BUG in gram2
# # indent set
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority #bin_op.left_assoc')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority #bin_op.right_assoc')
# indent+pipe
q('pre_pipe_rvalue', '#multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('pre_pipe_rvalue', '#pre_pipe_rvalue #eol #multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('rvalue', '#rvalue #multipipe #indent #pre_pipe_rvalue #dedent').mx("priority=#{pipe_priority}") .strict("#rvalue[1].priority<=#{pipe_priority}")
# assign
q('rvalue', '#lvalue #assign_bin_op #rvalue') .mx('priority=#assign_bin_op.priority ult=bin_op ti=assign_bin_op func_decl=#lvalue.func_decl').strict('#lvalue.priority<#assign_bin_op.priority #rvalue.priority<=#assign_bin_op.priority !#lvalue.func_decl !#lvalue.block_assign')
q('rvalue', '#pre_op #rvalue') .mx('priority=#pre_op.priority ult=pre_op ti=pre_op') .strict('#rvalue[1].priority<=#pre_op.priority')
q('rvalue', '#rvalue #post_op') .mx('priority=#post_op.priority ult=post_op ti=post_op').strict('#rvalue[1].priority<#post_op.priority !#rvalue.tail_space') # a++ ++ is not allowed
# ###################################################################################################
# ternary
# ###################################################################################################
q('rvalue', '#rvalue [QUESTION] #rvalue : #rvalue') .mx("priority=#{base_priority} ult=ternary delimiter='[SPACE]' ti=ternary")
# ###################################################################################################
# array
# ###################################################################################################
q('comma_rvalue', '#rvalue') .mx("ult=deep")
# q('comma_rvalue', '#eol #comma_rvalue') .mx("ult=deep") # NOTE eol in back will not work. Gram bug
q('comma_rvalue', '#comma_rvalue #eol #rvalue') .mx("ult=deep delimiter=','")
q('comma_rvalue', '#comma_rvalue #eol? , #eol? #rvalue').mx("ult=deep")
q('array', '[ #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #eol? #comma_rvalue #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #indent #comma_rvalue? #dedent ]') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#array') .mx("priority=#{base_priority} ult=deep ti=array")
# NOTE lvalue array come later
q('array', '[ #num_const .. #num_const ]') .mx("priority=#{base_priority} ult=num_array")
# ###################################################################################################
# hash
# ###################################################################################################
# hash with brackets
q('pair', '#identifier : #rvalue') .mx("ult=hash_pair_simple")
q('pair', '#const : #rvalue') .mx("ult=deep")
q('pair', '( #rvalue ) : #rvalue') .mx("ult=hash_pair_eval")
q('pair', '#identifier') .mx("ult=hash_pair_auto auto=1")
q('pair_comma_rvalue', '#pair') .mx("ult=deep")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol #pair').mx("ult=deep delimiter=','")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol? , #eol? #pair').mx("ult=deep")
q('hash', '{ #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #eol? #pair_comma_rvalue #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #indent #pair_comma_rvalue? #dedent }') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#hash') .mx("priority=#{base_priority} ult=deep ti=hash")
q('BL_pair_comma_rvalue', '#pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#eol #pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#BL_pair_comma_rvalue , #pair').mx("ult=deep") .strict("!#pair.auto")
q('bracket_less_hash', '#BL_pair_comma_rvalue') .mx("priority=#{base_priority} ult=deep")
q('bracket_less_hash', '#indent #BL_pair_comma_rvalue #dedent') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#bracket_less_hash') .mx("priority=#{base_priority} ult=hash_wrap ti=hash")
# LATER bracket-less hash
# fuckup sample
# a a:b,c:d
# a({a:b,c:d})
# a({a:b},{c:d})
# ###################################################################################################
# access
# ###################################################################################################
# [] access
q('lvalue', '#lvalue [ #rvalue ]') .mx("priority=#{base_priority} ult=array_access ti=array_access")
# . access
q('lvalue', '#lvalue . #identifier') .mx("priority=#{base_priority} ult=field_access ti=id_access")
# opencl-like access
# proper
q('lvalue', '#lvalue . #decimal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
q('lvalue', '#lvalue . #octal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
# hack for a.0123 float_enabled
# q('lvalue', '#lvalue #float_literal') .mx("priority=#{base_priority}") .strict('#lvalue.tail_space=0 #float_literal[0:0]="."')
# ###################################################################################################
# function call
# ###################################################################################################
q('rvalue', '#rvalue ( #comma_rvalue? #eol? )') .mx("priority=#{base_priority} ult=func_call ti=func_call").strict('!#rvalue.func_decl')
# ###################################################################################################
# function decl
# ###################################################################################################
q('rvalue', '-> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '=> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) -> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) => #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type -> #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type => #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('arg_list', '#arg') .mx("priority=#{base_priority}")
q('arg_list', '#arg_list , #arg') .mx("priority=#{base_priority}")
q('arg', '#identifier') .mx("priority=#{base_priority}")
q('arg', '#identifier : #type') .mx("priority=#{base_priority}")
q('arg', '#identifier = #rvalue') .mx("priority=#{base_priority}")
q('type', '#identifier') .mx("priority=#{base_priority}")
# LATER array<T> support
q('function_body', '#stmt') .mx("priority=#{base_priority} ult=func_decl_return ti=pass")
q('function_body', '#block') .mx("priority=#{base_priority} ult=deep ti=pass")
# ###################################################################################################
# block
# ###################################################################################################
q('block', '#indent #stmt_plus #dedent') .mx("priority=#{base_priority} ult=block ti=block")
q('stmt_plus', '#stmt') .mx("priority=#{base_priority} ult=deep ti=pass")
q('stmt_plus', '#stmt_plus #eol #stmt') .mx("priority=#{base_priority} ult=deep ti=stmt_plus_last eol_pass=1")
# ###################################################################################################
# macro-block
# ###################################################################################################
q('rvalue', '#identifier #rvalue? #block') .mx("priority=#{base_priority} ult=macro_block ti=macro_stub")
# ###################################################################################################
q('stmt', '#rvalue') .mx("ult=deep ti=pass")
q('stmt', '#stmt #comment') .mx("ult=deep ti=pass")
q('stmt', '#comment') .mx("ult=deep ti=skip")
q('stmt', '#empty') .mx("ult=value ti=skip")
q('stmt', '__test_untranslated') .mx("ti=skip") # FOR test purposes only
show_diff = (a,b)->
### !pragma coverage-skip-block ###
if a.rule != b.rule
perr "RULE mismatch"
perr "a="
perr a.rule
perr "b="
perr b.rule
return
if a.value != b.value
perr "a=#{a.value}"
perr "b=#{b.value}"
return
if a.mx_hash.hash_key != b.mx_hash.hash_key
perr "a.hash_key = #{a.mx_hash.hash_key}"
perr "b.hash_key = #{b.mx_hash.hash_key}"
return
js_a = JSON.stringify a.mx_hash
js_b = JSON.stringify b.mx_hash
if js_a != js_b
perr "a.mx_hash = #{js_a}"
perr "b.mx_hash = #{js_b}"
return
if a.value_array.length != b.value_array.length
perr "list length mismatch #{a.value_array.length} != #{b.value_array.length}"
perr "a=#{a.value_array.map((t)->t.value).join ','}"
perr "b=#{b.value_array.map((t)->t.value).join ','}"
return
for i in [0 ... a.value_array.length]
show_diff a.value_array[i], b.value_array[i]
return
@_parse = (tok_res, opt={})->
gram_res = g.go tok_res,
expected_token : 'stmt_plus'
mode_full : opt.mode_full or false
if gram_res.length == 0
throw new Error "Parsing error. No proper combination found"
if gram_res.length != 1
[a,b] = gram_res
show_diff a,b
### !pragma coverage-skip-block ###
throw new Error "Parsing error. More than one proper combination found #{gram_res.length}"
gram_res
@parse = (tok_res, opt, on_end)->
try
gram_res = module._parse tok_res, opt
catch e
return on_end e
on_end null, gram_res
| true | require 'fy'
{Gram} = require 'gram2'
module = @
# ###################################################################################################
# specific
# ###################################################################################################
# API should be async by default in case we make some optimizations in future
g = new Gram
{_tokenizer} = require './tokenizer'
do ()->
for v in _tokenizer.parser_list
g.extra_hash_key_list.push v.name
q = (a, b)->g.rule a,b
# ###################################################################################################
# 1-position tokens/const
# ###################################################################################################
base_priority = -9000
q('lvalue', '#identifier') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=value ti=id")
q('rvalue', '#lvalue') .mx("priority=#{base_priority} tail_space=$1.tail_space ult=deep ti=pass")
q('num_const', '#decimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#octal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#hexadecimal_literal') .mx("ult=value ti=const type=int")
q('num_const', '#binary_literal') .mx("ult=value ti=const type=int")
q('num_const', '#float_literal') .mx("ult=value ti=const type=float")
q('const', '#num_const') .mx("ult=deep ti=pass")
q('str_const', '#string_literal_singleq') .mx("ult=string_singleq ti=const type=string")
q('str_const', '#block_string_literal_singleq') .mx("ult=block_string_singleq ti=const type=string")
q('str_const', '#string_literal_doubleq') .mx("ult=string_doubleq ti=const type=string")
q('str_const', '#block_string_literal_doubleq') .mx("ult=block_string_doubleq ti=const type=string")
q('const', '#str_const') .mx("ult=deep ti=pass")
q('rvalue','#const') .mx("priority=#{base_priority} ult=deep ti=pass")
q('lvalue','@') .mx("priority=#{base_priority} ult=value ti=this block_assign=1")
q('lvalue','@ #identifier') .mx("priority=#{base_priority} ult=value")
# ###################################################################################################
# string interpolation
# ###################################################################################################
q('st1_start', '#inline_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_start', '#block_string_template_start') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st_mid', '#string_template_mid') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_end', '#inline_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st3_end', '#block_string_template_end') .mx("ult=string_interpolation_prepare ti=string_inter_pass")
q('st1_start', '#st1_start #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('st1_start', '#st1_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together_m1 ti=string_inter_pass")
q('st3_start', '#st3_start #rvalue #st_mid') .mx("ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st1_start #rvalue #st1_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#st3_start #rvalue #st3_end') .mx("priority=#{base_priority} ult=string_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# regexp
# ###################################################################################################
q('regexp', '#regexp_literal') .mx("ult=value ti=const type=regexp")
q('regexp', '#here_regexp_literal') .mx("ult=block_regexp ti=const type=regexp")
q('rvalue', '#regexp') .mx("ult=deep ti=pass")
# ###################################################################################################
# regexp interpolation
# ###################################################################################################
q('rextem_start', '#regexp_template_start') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_mid', '#regexp_template_mid') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_end', '#regexp_template_end') .mx("ult=regexp_interpolation_prepare ti=string_inter_pass")
q('rextem_start', '#rextem_start #rvalue #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rvalue #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rextem_start', '#rextem_start #rextem_mid') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
q('rvalue', '#rextem_start #rextem_end') .mx("ult=regexp_interpolation_put_together ti=string_inter_pass")
# ###################################################################################################
# operators define
# ###################################################################################################
q('pre_op', '!') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'not') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '~') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', '-') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', '+') .mx('priority=1') .strict('$1.hash_key==unary_operator !$1.tail_space')
q('pre_op', 'typeof') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('pre_op', 'void') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'new') .mx('priority=15') .strict('$1.hash_key==unary_operator')
q('pre_op', 'delete') .mx('priority=15') .strict('$1.hash_key==unary_operator')
# ++ -- pre_op is banned.
q('post_op', '++') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '--') .mx('priority=1') .strict('$1.hash_key==unary_operator')
q('post_op', '[QUESTION]') .mx('priority=1') #.strict('$1.hash_key==unary_operator')
# https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Operators/Operator_Precedence
# TODO all ops
pipe_priority = 100
q('bin_op', '//|%%') .mx('priority=4 right_assoc=1') .strict('$1.hash_key==PI:KEY:<KEY>END_PI_operator')
q('bin_op', '**') .mx('priority=4 left_assoc=1') .strict('$1.hash_key==binary_operator') # because JS
q('bin_op', '*|/|%') .mx('priority=5 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '+|-') .mx('priority=6 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<<|>>|>>>') .mx('priority=7 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', 'instanceof') .mx('priority=8 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('bin_op', '<|<=|>|>=') .mx('priority=9') .strict('$1.hash_key==binary_operator') # NOTE NOT associative, because chained comparison
q('bin_op', '!=|==') .mx('priority=9 right_assoc=1') .strict('$1.hash_key==binary_operator') # NOTE == <= has same priority
# WARNING a == b < c is bad style. So all fuckups are yours
q('bin_op', '&&|and|or|xor|[PIPE][PIPE]') .mx('priority=10 right_assoc=1') .strict('$1.hash_key==binary_operator')
q('assign_bin_op', '=|+=|-=|*=|/=|%=|<<=|>>=|>>>=|**=|//=|%%=|and=|or=|xor=|[QUESTION]=').mx('priority=3') .strict('$1.hash_key==binary_operator')
# ###################################################################################################
# operators constructions
# ###################################################################################################
# PIPE special
q('bin_op', '#multipipe') .mx("priority=#{pipe_priority} right_assoc=1") # возможно стоит это сделать отдельной конструкцией языка дабы проверять всё более тсчательно
q('multipipe', '[PIPE] #multipipe?')
# NOTE need ~same rule for lvalue ???
q('rvalue', '( #rvalue )') .mx("priority=#{base_priority} ult=bracket ti=bracket")
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority !#rvalue[1].func_decl #bin_op.left_assoc')
q('rvalue', '#rvalue #bin_op #rvalue') .mx('priority=#bin_op.priority ult=bin_op ti=bin_op func_decl=#rvalue[1].func_decl') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority !#rvalue[1].func_decl #bin_op.right_assoc')
# BUG in gram2
# # indent set
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority<#bin_op.priority')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority<#bin_op.priority #rvalue[2].priority==#bin_op.priority #bin_op.left_assoc')
# q('rvalue', '#rvalue #bin_op #indent #rvalue #dedent') .mx('priority=#bin_op.priority ti=bin_op') .strict('#rvalue[1].priority==#bin_op.priority #rvalue[2].priority<#bin_op.priority #bin_op.right_assoc')
# indent+pipe
q('pre_pipe_rvalue', '#multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('pre_pipe_rvalue', '#pre_pipe_rvalue #eol #multipipe #rvalue') #.strict("#rvalue.priority<#{pipe_priority}")
q('rvalue', '#rvalue #multipipe #indent #pre_pipe_rvalue #dedent').mx("priority=#{pipe_priority}") .strict("#rvalue[1].priority<=#{pipe_priority}")
# assign
q('rvalue', '#lvalue #assign_bin_op #rvalue') .mx('priority=#assign_bin_op.priority ult=bin_op ti=assign_bin_op func_decl=#lvalue.func_decl').strict('#lvalue.priority<#assign_bin_op.priority #rvalue.priority<=#assign_bin_op.priority !#lvalue.func_decl !#lvalue.block_assign')
q('rvalue', '#pre_op #rvalue') .mx('priority=#pre_op.priority ult=pre_op ti=pre_op') .strict('#rvalue[1].priority<=#pre_op.priority')
q('rvalue', '#rvalue #post_op') .mx('priority=#post_op.priority ult=post_op ti=post_op').strict('#rvalue[1].priority<#post_op.priority !#rvalue.tail_space') # a++ ++ is not allowed
# ###################################################################################################
# ternary
# ###################################################################################################
q('rvalue', '#rvalue [QUESTION] #rvalue : #rvalue') .mx("priority=#{base_priority} ult=ternary delimiter='[SPACE]' ti=ternary")
# ###################################################################################################
# array
# ###################################################################################################
q('comma_rvalue', '#rvalue') .mx("ult=deep")
# q('comma_rvalue', '#eol #comma_rvalue') .mx("ult=deep") # NOTE eol in back will not work. Gram bug
q('comma_rvalue', '#comma_rvalue #eol #rvalue') .mx("ult=deep delimiter=','")
q('comma_rvalue', '#comma_rvalue #eol? , #eol? #rvalue').mx("ult=deep")
q('array', '[ #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #eol? #comma_rvalue #eol? ]') .mx("priority=#{base_priority} ult=deep")
q('array', '[ #indent #comma_rvalue? #dedent ]') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#array') .mx("priority=#{base_priority} ult=deep ti=array")
# NOTE lvalue array come later
q('array', '[ #num_const .. #num_const ]') .mx("priority=#{base_priority} ult=num_array")
# ###################################################################################################
# hash
# ###################################################################################################
# hash with brackets
q('pair', '#identifier : #rvalue') .mx("ult=hash_pair_simple")
q('pair', '#const : #rvalue') .mx("ult=deep")
q('pair', '( #rvalue ) : #rvalue') .mx("ult=hash_pair_eval")
q('pair', '#identifier') .mx("ult=hash_pair_auto auto=1")
q('pair_comma_rvalue', '#pair') .mx("ult=deep")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol #pair').mx("ult=deep delimiter=','")
q('pair_comma_rvalue', '#pair_comma_rvalue #eol? , #eol? #pair').mx("ult=deep")
q('hash', '{ #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #eol? #pair_comma_rvalue #eol? }') .mx("priority=#{base_priority} ult=deep")
q('hash', '{ #indent #pair_comma_rvalue? #dedent }') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#hash') .mx("priority=#{base_priority} ult=deep ti=hash")
q('BL_pair_comma_rvalue', '#pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#eol #pair') .mx("ult=deep") .strict("!#pair.auto")
q('BL_pair_comma_rvalue', '#BL_pair_comma_rvalue , #pair').mx("ult=deep") .strict("!#pair.auto")
q('bracket_less_hash', '#BL_pair_comma_rvalue') .mx("priority=#{base_priority} ult=deep")
q('bracket_less_hash', '#indent #BL_pair_comma_rvalue #dedent') .mx("priority=#{base_priority} ult=deep")
q('rvalue', '#bracket_less_hash') .mx("priority=#{base_priority} ult=hash_wrap ti=hash")
# LATER bracket-less hash
# fuckup sample
# a a:b,c:d
# a({a:b,c:d})
# a({a:b},{c:d})
# ###################################################################################################
# access
# ###################################################################################################
# [] access
q('lvalue', '#lvalue [ #rvalue ]') .mx("priority=#{base_priority} ult=array_access ti=array_access")
# . access
q('lvalue', '#lvalue . #identifier') .mx("priority=#{base_priority} ult=field_access ti=id_access")
# opencl-like access
# proper
q('lvalue', '#lvalue . #decimal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
q('lvalue', '#lvalue . #octal_literal') .mx("priority=#{base_priority} ult=opencl_access ti=opencl_access")
# hack for a.0123 float_enabled
# q('lvalue', '#lvalue #float_literal') .mx("priority=#{base_priority}") .strict('#lvalue.tail_space=0 #float_literal[0:0]="."')
# ###################################################################################################
# function call
# ###################################################################################################
q('rvalue', '#rvalue ( #comma_rvalue? #eol? )') .mx("priority=#{base_priority} ult=func_call ti=func_call").strict('!#rvalue.func_decl')
# ###################################################################################################
# function decl
# ###################################################################################################
q('rvalue', '-> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '=> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) -> #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) => #function_body?') .mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type -> #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('rvalue', '( #arg_list? ) : #type => #function_body?').mx("priority=#{base_priority} ult=func_decl ti=func_decl func_decl=1")
q('arg_list', '#arg') .mx("priority=#{base_priority}")
q('arg_list', '#arg_list , #arg') .mx("priority=#{base_priority}")
q('arg', '#identifier') .mx("priority=#{base_priority}")
q('arg', '#identifier : #type') .mx("priority=#{base_priority}")
q('arg', '#identifier = #rvalue') .mx("priority=#{base_priority}")
q('type', '#identifier') .mx("priority=#{base_priority}")
# LATER array<T> support
q('function_body', '#stmt') .mx("priority=#{base_priority} ult=func_decl_return ti=pass")
q('function_body', '#block') .mx("priority=#{base_priority} ult=deep ti=pass")
# ###################################################################################################
# block
# ###################################################################################################
q('block', '#indent #stmt_plus #dedent') .mx("priority=#{base_priority} ult=block ti=block")
q('stmt_plus', '#stmt') .mx("priority=#{base_priority} ult=deep ti=pass")
q('stmt_plus', '#stmt_plus #eol #stmt') .mx("priority=#{base_priority} ult=deep ti=stmt_plus_last eol_pass=1")
# ###################################################################################################
# macro-block
# ###################################################################################################
q('rvalue', '#identifier #rvalue? #block') .mx("priority=#{base_priority} ult=macro_block ti=macro_stub")
# ###################################################################################################
q('stmt', '#rvalue') .mx("ult=deep ti=pass")
q('stmt', '#stmt #comment') .mx("ult=deep ti=pass")
q('stmt', '#comment') .mx("ult=deep ti=skip")
q('stmt', '#empty') .mx("ult=value ti=skip")
q('stmt', '__test_untranslated') .mx("ti=skip") # FOR test purposes only
show_diff = (a,b)->
### !pragma coverage-skip-block ###
if a.rule != b.rule
perr "RULE mismatch"
perr "a="
perr a.rule
perr "b="
perr b.rule
return
if a.value != b.value
perr "a=#{a.value}"
perr "b=#{b.value}"
return
if a.mx_hash.hash_key != b.mx_hash.hash_key
perr "a.hash_key = #{a.mx_hash.hash_key}"
perr "b.hash_key = #{b.mx_hash.hash_key}"
return
js_a = JSON.stringify a.mx_hash
js_b = JSON.stringify b.mx_hash
if js_a != js_b
perr "a.mx_hash = #{js_a}"
perr "b.mx_hash = #{js_b}"
return
if a.value_array.length != b.value_array.length
perr "list length mismatch #{a.value_array.length} != #{b.value_array.length}"
perr "a=#{a.value_array.map((t)->t.value).join ','}"
perr "b=#{b.value_array.map((t)->t.value).join ','}"
return
for i in [0 ... a.value_array.length]
show_diff a.value_array[i], b.value_array[i]
return
@_parse = (tok_res, opt={})->
gram_res = g.go tok_res,
expected_token : 'stmt_plus'
mode_full : opt.mode_full or false
if gram_res.length == 0
throw new Error "Parsing error. No proper combination found"
if gram_res.length != 1
[a,b] = gram_res
show_diff a,b
### !pragma coverage-skip-block ###
throw new Error "Parsing error. More than one proper combination found #{gram_res.length}"
gram_res
@parse = (tok_res, opt, on_end)->
try
gram_res = module._parse tok_res, opt
catch e
return on_end e
on_end null, gram_res
|
[
{
"context": "rts.config = \n app :\n name : 'capn'\n id : '39eee3039930039aaa39394930e90aa05'\n",
"end": 61,
"score": 0.44976431131362915,
"start": 55,
"tag": "KEY",
"value": "39eee3"
},
{
"context": "nfig = \n app :\n name : 'capn'\n id : '39eee3039930039aaa39394930e9... | src/config.iced | AngelKey/Angelkey.capn | 0 |
exports.config =
app :
name : 'capn'
id : '39eee3039930039aaa39394930e90aa05'
| 224044 |
exports.config =
app :
name : 'capn'
id : '<KEY> <PASSWORD>9<PASSWORD> <KEY> <PASSWORD> <KEY> <PASSWORD> <KEY> <PASSWORD> <KEY> <PASSWORD> <PASSWORD> <PASSWORD> <KEY> <PASSWORD>'
| true |
exports.config =
app :
name : 'capn'
id : 'PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI9PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI'
|
[
{
"context": "key: 'image-macro'\n\npatterns: [\n\n # Matches an image or icon inlin",
"end": 17,
"score": 0.9904477000236511,
"start": 6,
"tag": "KEY",
"value": "image-macro"
}
] | grammars/repositories/inlines/image-macro-grammar.cson | andrewcarver/atom-language-asciidoc | 45 | key: 'image-macro'
patterns: [
# Matches an image or icon inline macro.
#
# Examples
#
# image:filename.png[Alt Text]
# image:http://example.com/images/filename.png[Alt Text]
# image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text")
# icon:github[large]
#
name: 'markup.macro.image.asciidoc'
match: '(?<!\\\\)(image|icon):([^:\\[][^\\[]*)\\[((?:\\\\\\]|[^\\]])*?)\\]'
captures:
1: name: 'entity.name.function.asciidoc'
2: name: 'markup.link.asciidoc'
3: name: 'string.unquoted.asciidoc'
]
| 190839 | key: '<KEY>'
patterns: [
# Matches an image or icon inline macro.
#
# Examples
#
# image:filename.png[Alt Text]
# image:http://example.com/images/filename.png[Alt Text]
# image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text")
# icon:github[large]
#
name: 'markup.macro.image.asciidoc'
match: '(?<!\\\\)(image|icon):([^:\\[][^\\[]*)\\[((?:\\\\\\]|[^\\]])*?)\\]'
captures:
1: name: 'entity.name.function.asciidoc'
2: name: 'markup.link.asciidoc'
3: name: 'string.unquoted.asciidoc'
]
| true | key: 'PI:KEY:<KEY>END_PI'
patterns: [
# Matches an image or icon inline macro.
#
# Examples
#
# image:filename.png[Alt Text]
# image:http://example.com/images/filename.png[Alt Text]
# image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text")
# icon:github[large]
#
name: 'markup.macro.image.asciidoc'
match: '(?<!\\\\)(image|icon):([^:\\[][^\\[]*)\\[((?:\\\\\\]|[^\\]])*?)\\]'
captures:
1: name: 'entity.name.function.asciidoc'
2: name: 'markup.link.asciidoc'
3: name: 'string.unquoted.asciidoc'
]
|
[
{
"context": "sion! Check us out on Github: https://github.com/Pagedraw/pagedraw\"}>\n {@props.children}\n ",
"end": 1175,
"score": 0.9996277093887329,
"start": 1167,
"tag": "USERNAME",
"value": "Pagedraw"
},
{
"context": "blem persists, please contact the Pagedraw ... | src/pagedraw-requires.cjsx | caffed/pagedraw | 3,213 | _l = require 'lodash'
React = require 'react'
createReactClass = require 'create-react-class'
CodeShower = require './frontend/code-shower'
queryString = require 'query-string'
{PdButtonOne, Modal, Tabs, Tab} = require './editor/component-lib'
{track_error, assert} = require './util'
analytics = require './frontend/analytics'
Dropzone = require('react-dropzone').default
SketchImporterView = require './pagedraw/sketch-importer'
modal = require './frontend/modal'
{server} = require './editor/server'
FormControl = require './frontend/form-control'
{figma_import} = require './figma-import'
PagedrawnPricingCards = require './pagedraw/pricingcards'
config = require './config'
exports.SketchDropzone = createReactClass
componentWillMount: ->
@current_state = 'none' # | 'loading' | 'error'
@error_message = null # a string, if @current_state == 'error'
@import_canceler = null # a function, if @current_state == 'loading'
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Sketch importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
<div>
<div className="bootstrap">
<div ref="modal_container" />
</div>
{
# we do the modal_container shenanigans for bootstrap css...
switch @current_state
when 'none'
# no modal
<Modal show={false} container={@refs.modal_container} />
when 'loading'
<Modal show container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Importing Sketch...</Modal.Title>
</Modal.Header>
<Modal.Body>
{### this is just a loading spinner... ###}
<SketchImporterView importing={yes} />
</Modal.Body>
<Modal.Footer>
<div style={textAlign: 'left'}>
<PdButtonOne onClick={@cancelImport}>Cancel</PdButtonOne>
</div>
</Modal.Footer>
</Modal>
when 'error'
<Modal show container={@refs.modal_container} onHide={@errorOkay}>
<Modal.Header>
<Modal.Title>Error</Modal.Title>
</Modal.Header>
<Modal.Body>
<SketchImporterView error={@error_message ? ""} />
</Modal.Body>
<Modal.Footer>
<PdButtonOne type="primary" onClick={@errorOkay}>Okay</PdButtonOne>
</Modal.Footer>
</Modal>
}
<Dropzone onDrop={@handleDrop} style={display: 'flex', flexDirection: 'column'}>
{@props.children}
</Dropzone>
</div>
handleDrop: (files) ->
assert -> files?.length > 0
doc_name = files[0].name
doc_name = doc_name.slice(0, -('.sketch'.length)) if doc_name.endsWith('.sketch')
assert => @current_state == 'none'
@current_state = 'loading'
@forceUpdate()
# use local variable to track cancellation so it's per-run of import
should_cancel = false
@import_canceler = ->
should_cancel = true
server.importFromSketch(files[0], ((doc_json) =>
return if should_cancel
return @showError(@sketchImportErrorMessage, new Error('Returned empty doc')) if Object.keys(doc_json.blocks).length <= 1
server.createNewDoc(@props.app.id, doc_name, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestSketchImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
@showError(@metaserverUnreachableErrorMessage, e)
), ((err) =>
# Assume any non 500 error comes with a custom responseText
@showError((
switch err.status
when 500 then @sketchImportErrorMessage
when 0 then @sketchServerUnavailableErrorMessage
else err.responseText
), new Error("sketch server error #{err.status}"))
))
showError: (msg, err) ->
assert => @current_state in ['none', 'loading']
track_error(err, msg)
analytics.track("Sketch importer error", {msg, where: 'dashboard'})
@current_state = 'error'
@error_message = msg
@forceUpdate()
cancelImport: ->
assert => @current_state == 'loading'
# do the cancel
@import_canceler?()
@current_state = 'none'
@forceUpdate()
errorOkay: ->
assert => @current_state == 'error'
@current_state = 'none'
@forceUpdate()
sketchImportErrorMessage: """
We weren't able to recognize your upload as a Sketch file.
If this problem persists, please contact the Pagedraw team at team@pagedraw.io
"""
metaserverUnreachableErrorMessage: """
Unable to create a new doc.
If this problem persists, please contact us at team@pagedraw.io
"""
sketchServerUnavailableErrorMessage: """
Couldn't reach the server to do a Sketch import. Please try again.
If this problem persists, please contact the Pagedraw team at team@pagedraw.io
"""
exports.FigmaModal = createReactClass
componentWillMount: ->
@show = false
@import_in_flight = false
@status = 'default' # | 'loading' | 'error'
@figma_url = ""
componentDidMount: ->
if @props.show_figma_modal
@show = true
@forceUpdate()
figma_url_vl: ->
value: @figma_url
requestChange: (newVal) => @figma_url = newVal; @forceUpdate()
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Figma importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
if not @props.figma_access_token
<a href="/oauth/figma_redirect?app_id=#{@props.app.id}">
{@props.children}
</a>
else
<div>
<form onSubmit={(evt) =>
evt.preventDefault()
figma_import(@figma_url_vl().value, @props.figma_access_token)
.then ({doc_json, fileName}) =>
server.createNewDoc(@props.app.id, fileName, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestFigmaImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
throw new Error()
.catch (e) =>
@status = "error"
.then =>
@import_in_flight = false
@forceUpdate()
@import_in_flight = true
@status = "loading"
@forceUpdate()
}>
<div className="bootstrap">
<div ref="modal_container" />
</div>
<Modal show={@show} container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Import from Figma</Modal.Title>
</Modal.Header>
<Modal.Body>
{
if @status == "default"
<div>
<p>Paste the URL of the Figma design you'd like to import</p>
<label htmlFor="figma_url">Figma link</label>
<FormControl tag="input" valueLink={@figma_url_vl()}
name="figma_url" style={width: '100%'}
placeholder="https://figma.com/file/XXXXXXXXXXXXXXXXXXXXXX/Sample-File-Name" />
</div>
else if @status == "loading"
<img style={display: 'block', marginLeft: 'auto', marginRight: 'auto'} src="https://complex-houses.surge.sh/59ec0968-b6e3-4a00-b082-932b7fcf41a5/loading.gif" />
else
<p style={color: 'red'}>We weren't able to recognize your upload as a Figma file.
If this problem persists, please contact the Pagedraw team at team@pagedraw.io</p>
}
</Modal.Body>
<Modal.Footer>
{<PdButtonOne onClick={=> @show = false; @status = "default"; @forceUpdate()}>Close</PdButtonOne> if @status in ["default", "error"]}
{<PdButtonOne type="primary" submit disabled={@import_in_flight}>Import</PdButtonOne> if @status == "default"}
</Modal.Footer>
</Modal>
</form>
<div onClick={=> @show = true; @forceUpdate()}>
{@props.children}
</div>
</div>
exports.PricingCardsWrapper = (props) ->
<div style={position: 'relative', flexGrow: '1'}>
<div style={position: 'absolute', top: 0, left: 0}>
<PagedrawnPricingCards />
</div>
</div>
| 25694 | _l = require 'lodash'
React = require 'react'
createReactClass = require 'create-react-class'
CodeShower = require './frontend/code-shower'
queryString = require 'query-string'
{PdButtonOne, Modal, Tabs, Tab} = require './editor/component-lib'
{track_error, assert} = require './util'
analytics = require './frontend/analytics'
Dropzone = require('react-dropzone').default
SketchImporterView = require './pagedraw/sketch-importer'
modal = require './frontend/modal'
{server} = require './editor/server'
FormControl = require './frontend/form-control'
{figma_import} = require './figma-import'
PagedrawnPricingCards = require './pagedraw/pricingcards'
config = require './config'
exports.SketchDropzone = createReactClass
componentWillMount: ->
@current_state = 'none' # | 'loading' | 'error'
@error_message = null # a string, if @current_state == 'error'
@import_canceler = null # a function, if @current_state == 'loading'
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Sketch importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
<div>
<div className="bootstrap">
<div ref="modal_container" />
</div>
{
# we do the modal_container shenanigans for bootstrap css...
switch @current_state
when 'none'
# no modal
<Modal show={false} container={@refs.modal_container} />
when 'loading'
<Modal show container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Importing Sketch...</Modal.Title>
</Modal.Header>
<Modal.Body>
{### this is just a loading spinner... ###}
<SketchImporterView importing={yes} />
</Modal.Body>
<Modal.Footer>
<div style={textAlign: 'left'}>
<PdButtonOne onClick={@cancelImport}>Cancel</PdButtonOne>
</div>
</Modal.Footer>
</Modal>
when 'error'
<Modal show container={@refs.modal_container} onHide={@errorOkay}>
<Modal.Header>
<Modal.Title>Error</Modal.Title>
</Modal.Header>
<Modal.Body>
<SketchImporterView error={@error_message ? ""} />
</Modal.Body>
<Modal.Footer>
<PdButtonOne type="primary" onClick={@errorOkay}>Okay</PdButtonOne>
</Modal.Footer>
</Modal>
}
<Dropzone onDrop={@handleDrop} style={display: 'flex', flexDirection: 'column'}>
{@props.children}
</Dropzone>
</div>
handleDrop: (files) ->
assert -> files?.length > 0
doc_name = files[0].name
doc_name = doc_name.slice(0, -('.sketch'.length)) if doc_name.endsWith('.sketch')
assert => @current_state == 'none'
@current_state = 'loading'
@forceUpdate()
# use local variable to track cancellation so it's per-run of import
should_cancel = false
@import_canceler = ->
should_cancel = true
server.importFromSketch(files[0], ((doc_json) =>
return if should_cancel
return @showError(@sketchImportErrorMessage, new Error('Returned empty doc')) if Object.keys(doc_json.blocks).length <= 1
server.createNewDoc(@props.app.id, doc_name, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestSketchImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
@showError(@metaserverUnreachableErrorMessage, e)
), ((err) =>
# Assume any non 500 error comes with a custom responseText
@showError((
switch err.status
when 500 then @sketchImportErrorMessage
when 0 then @sketchServerUnavailableErrorMessage
else err.responseText
), new Error("sketch server error #{err.status}"))
))
showError: (msg, err) ->
assert => @current_state in ['none', 'loading']
track_error(err, msg)
analytics.track("Sketch importer error", {msg, where: 'dashboard'})
@current_state = 'error'
@error_message = msg
@forceUpdate()
cancelImport: ->
assert => @current_state == 'loading'
# do the cancel
@import_canceler?()
@current_state = 'none'
@forceUpdate()
errorOkay: ->
assert => @current_state == 'error'
@current_state = 'none'
@forceUpdate()
sketchImportErrorMessage: """
We weren't able to recognize your upload as a Sketch file.
If this problem persists, please contact the Pagedraw team at <EMAIL>
"""
metaserverUnreachableErrorMessage: """
Unable to create a new doc.
If this problem persists, please contact us at <EMAIL>
"""
sketchServerUnavailableErrorMessage: """
Couldn't reach the server to do a Sketch import. Please try again.
If this problem persists, please contact the Pagedraw team at <EMAIL>
"""
exports.FigmaModal = createReactClass
componentWillMount: ->
@show = false
@import_in_flight = false
@status = 'default' # | 'loading' | 'error'
@figma_url = ""
componentDidMount: ->
if @props.show_figma_modal
@show = true
@forceUpdate()
figma_url_vl: ->
value: @figma_url
requestChange: (newVal) => @figma_url = newVal; @forceUpdate()
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Figma importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
if not @props.figma_access_token
<a href="/oauth/figma_redirect?app_id=#{@props.app.id}">
{@props.children}
</a>
else
<div>
<form onSubmit={(evt) =>
evt.preventDefault()
figma_import(@figma_url_vl().value, @props.figma_access_token)
.then ({doc_json, fileName}) =>
server.createNewDoc(@props.app.id, fileName, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestFigmaImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
throw new Error()
.catch (e) =>
@status = "error"
.then =>
@import_in_flight = false
@forceUpdate()
@import_in_flight = true
@status = "loading"
@forceUpdate()
}>
<div className="bootstrap">
<div ref="modal_container" />
</div>
<Modal show={@show} container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Import from Figma</Modal.Title>
</Modal.Header>
<Modal.Body>
{
if @status == "default"
<div>
<p>Paste the URL of the Figma design you'd like to import</p>
<label htmlFor="figma_url">Figma link</label>
<FormControl tag="input" valueLink={@figma_url_vl()}
name="figma_url" style={width: '100%'}
placeholder="https://figma.com/file/XXXXXXXXXXXXXXXXXXXXXX/Sample-File-Name" />
</div>
else if @status == "loading"
<img style={display: 'block', marginLeft: 'auto', marginRight: 'auto'} src="https://complex-houses.surge.sh/59ec0968-b6e3-4a00-b082-932b7fcf41a5/loading.gif" />
else
<p style={color: 'red'}>We weren't able to recognize your upload as a Figma file.
If this problem persists, please contact the Pagedraw team at <EMAIL>.io</p>
}
</Modal.Body>
<Modal.Footer>
{<PdButtonOne onClick={=> @show = false; @status = "default"; @forceUpdate()}>Close</PdButtonOne> if @status in ["default", "error"]}
{<PdButtonOne type="primary" submit disabled={@import_in_flight}>Import</PdButtonOne> if @status == "default"}
</Modal.Footer>
</Modal>
</form>
<div onClick={=> @show = true; @forceUpdate()}>
{@props.children}
</div>
</div>
exports.PricingCardsWrapper = (props) ->
<div style={position: 'relative', flexGrow: '1'}>
<div style={position: 'absolute', top: 0, left: 0}>
<PagedrawnPricingCards />
</div>
</div>
| true | _l = require 'lodash'
React = require 'react'
createReactClass = require 'create-react-class'
CodeShower = require './frontend/code-shower'
queryString = require 'query-string'
{PdButtonOne, Modal, Tabs, Tab} = require './editor/component-lib'
{track_error, assert} = require './util'
analytics = require './frontend/analytics'
Dropzone = require('react-dropzone').default
SketchImporterView = require './pagedraw/sketch-importer'
modal = require './frontend/modal'
{server} = require './editor/server'
FormControl = require './frontend/form-control'
{figma_import} = require './figma-import'
PagedrawnPricingCards = require './pagedraw/pricingcards'
config = require './config'
exports.SketchDropzone = createReactClass
componentWillMount: ->
@current_state = 'none' # | 'loading' | 'error'
@error_message = null # a string, if @current_state == 'error'
@import_canceler = null # a function, if @current_state == 'loading'
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Sketch importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
<div>
<div className="bootstrap">
<div ref="modal_container" />
</div>
{
# we do the modal_container shenanigans for bootstrap css...
switch @current_state
when 'none'
# no modal
<Modal show={false} container={@refs.modal_container} />
when 'loading'
<Modal show container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Importing Sketch...</Modal.Title>
</Modal.Header>
<Modal.Body>
{### this is just a loading spinner... ###}
<SketchImporterView importing={yes} />
</Modal.Body>
<Modal.Footer>
<div style={textAlign: 'left'}>
<PdButtonOne onClick={@cancelImport}>Cancel</PdButtonOne>
</div>
</Modal.Footer>
</Modal>
when 'error'
<Modal show container={@refs.modal_container} onHide={@errorOkay}>
<Modal.Header>
<Modal.Title>Error</Modal.Title>
</Modal.Header>
<Modal.Body>
<SketchImporterView error={@error_message ? ""} />
</Modal.Body>
<Modal.Footer>
<PdButtonOne type="primary" onClick={@errorOkay}>Okay</PdButtonOne>
</Modal.Footer>
</Modal>
}
<Dropzone onDrop={@handleDrop} style={display: 'flex', flexDirection: 'column'}>
{@props.children}
</Dropzone>
</div>
handleDrop: (files) ->
assert -> files?.length > 0
doc_name = files[0].name
doc_name = doc_name.slice(0, -('.sketch'.length)) if doc_name.endsWith('.sketch')
assert => @current_state == 'none'
@current_state = 'loading'
@forceUpdate()
# use local variable to track cancellation so it's per-run of import
should_cancel = false
@import_canceler = ->
should_cancel = true
server.importFromSketch(files[0], ((doc_json) =>
return if should_cancel
return @showError(@sketchImportErrorMessage, new Error('Returned empty doc')) if Object.keys(doc_json.blocks).length <= 1
server.createNewDoc(@props.app.id, doc_name, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestSketchImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
@showError(@metaserverUnreachableErrorMessage, e)
), ((err) =>
# Assume any non 500 error comes with a custom responseText
@showError((
switch err.status
when 500 then @sketchImportErrorMessage
when 0 then @sketchServerUnavailableErrorMessage
else err.responseText
), new Error("sketch server error #{err.status}"))
))
showError: (msg, err) ->
assert => @current_state in ['none', 'loading']
track_error(err, msg)
analytics.track("Sketch importer error", {msg, where: 'dashboard'})
@current_state = 'error'
@error_message = msg
@forceUpdate()
cancelImport: ->
assert => @current_state == 'loading'
# do the cancel
@import_canceler?()
@current_state = 'none'
@forceUpdate()
errorOkay: ->
assert => @current_state == 'error'
@current_state = 'none'
@forceUpdate()
sketchImportErrorMessage: """
We weren't able to recognize your upload as a Sketch file.
If this problem persists, please contact the Pagedraw team at PI:EMAIL:<EMAIL>END_PI
"""
metaserverUnreachableErrorMessage: """
Unable to create a new doc.
If this problem persists, please contact us at PI:EMAIL:<EMAIL>END_PI
"""
sketchServerUnavailableErrorMessage: """
Couldn't reach the server to do a Sketch import. Please try again.
If this problem persists, please contact the Pagedraw team at PI:EMAIL:<EMAIL>END_PI
"""
exports.FigmaModal = createReactClass
componentWillMount: ->
@show = false
@import_in_flight = false
@status = 'default' # | 'loading' | 'error'
@figma_url = ""
componentDidMount: ->
if @props.show_figma_modal
@show = true
@forceUpdate()
figma_url_vl: ->
value: @figma_url
requestChange: (newVal) => @figma_url = newVal; @forceUpdate()
render: ->
if config.disableFigmaSketchImport
return <div onClick={-> alert "Figma importing is only available in the Open Source version! Check us out on Github: https://github.com/Pagedraw/pagedraw"}>
{@props.children}
</div>
if not @props.figma_access_token
<a href="/oauth/figma_redirect?app_id=#{@props.app.id}">
{@props.children}
</a>
else
<div>
<form onSubmit={(evt) =>
evt.preventDefault()
figma_import(@figma_url_vl().value, @props.figma_access_token)
.then ({doc_json, fileName}) =>
server.createNewDoc(@props.app.id, fileName, @props.app.default_language, _l.cloneDeep(doc_json))
.then ({docRef, docjson}) =>
server.saveLatestFigmaImportForDoc(docRef, docjson)
.then =>
window.location = "/pages/#{docRef.page_id}"
.catch (e) =>
throw new Error()
.catch (e) =>
@status = "error"
.then =>
@import_in_flight = false
@forceUpdate()
@import_in_flight = true
@status = "loading"
@forceUpdate()
}>
<div className="bootstrap">
<div ref="modal_container" />
</div>
<Modal show={@show} container={@refs.modal_container}>
<Modal.Header>
<Modal.Title>Import from Figma</Modal.Title>
</Modal.Header>
<Modal.Body>
{
if @status == "default"
<div>
<p>Paste the URL of the Figma design you'd like to import</p>
<label htmlFor="figma_url">Figma link</label>
<FormControl tag="input" valueLink={@figma_url_vl()}
name="figma_url" style={width: '100%'}
placeholder="https://figma.com/file/XXXXXXXXXXXXXXXXXXXXXX/Sample-File-Name" />
</div>
else if @status == "loading"
<img style={display: 'block', marginLeft: 'auto', marginRight: 'auto'} src="https://complex-houses.surge.sh/59ec0968-b6e3-4a00-b082-932b7fcf41a5/loading.gif" />
else
<p style={color: 'red'}>We weren't able to recognize your upload as a Figma file.
If this problem persists, please contact the Pagedraw team at PI:EMAIL:<EMAIL>END_PI.io</p>
}
</Modal.Body>
<Modal.Footer>
{<PdButtonOne onClick={=> @show = false; @status = "default"; @forceUpdate()}>Close</PdButtonOne> if @status in ["default", "error"]}
{<PdButtonOne type="primary" submit disabled={@import_in_flight}>Import</PdButtonOne> if @status == "default"}
</Modal.Footer>
</Modal>
</form>
<div onClick={=> @show = true; @forceUpdate()}>
{@props.children}
</div>
</div>
exports.PricingCardsWrapper = (props) ->
<div style={position: 'relative', flexGrow: '1'}>
<div style={position: 'absolute', top: 0, left: 0}>
<PagedrawnPricingCards />
</div>
</div>
|
[
{
"context": " protocol: 'http'\n hostname: '127.0.0.1'\n port: 5984\n options:\n ",
"end": 1252,
"score": 0.9997619986534119,
"start": 1243,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " #auth:\n # ... | src/server/settings.coffee | LaPingvino/rizzoma | 0 | ###
Settings module. Contains all available settings with default or best fit for development values.
First level is environment name (for running code in 'dev', 'prod'; when starting node
this value will be got from NODE_ENV system environment variable).
Second level is block name.
If you want to change some settings locally you should use 'settings_local.coffee' file
(it's added to '.gitignore'; example at file 'settings_local.coffee.template').
###
path = require('path')
redisCacheBackendOptions =
host: 'localhost'
port: 6379
db: 1
module.exports.dev =
app:
listenPort: 8000
waveUrl: '/topic/'
waveEmbeddedUrl: '/embedded/'
waveDriveUrl: '/drive/'
# название cookie для флага о том, что пользователь согласился, что его браузер не поддерживается
showIfUnsupportedBrowserCookie: "show_if_unsupported_browser"
# какую подпись с версией и др. информацией о приложении показывать пользователям: 'dev', 'prod'
signatureType: 'dev'
# url of this site (for links in emails, XMPP, auth, ...)
baseUrl: 'http://localhost:8000'
db:
main:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: 'secret'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/main/'
operations:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: 'secret'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/operations/'
sharejs:
opsBeforeCommit: 1
numCachedOps: 1
reapTime: 1
forceReaping: true
maximumAge: 1000000
ot:
amqpOptions: {} # use default from the "amqpConnect"
search:
searchType: 'local' # default
sphinxPort: 9306
sphinxHost: 'localhost'
# searchType: 'amqp'
# amqpOptions: {} # use default options
# searchTimeout: 15 # default search timeout is 15 s
# sphinxTimeout: 10 # default SphinxSearch connect timeout is 10 s
searchIndexer:
indexes: [
# выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
{threshold: 24 * 60 * 60, between: [2, 4]},
{threshold: 15 * 60},
{threshold: 3}
]
indexesPath: '/var/lib/sphinxsearch/data'
docsAtOnce: 10000
indexCommand: path.join(__dirname, '/../../bin/run_sphinx_indexer.sh')
mergeCommand: path.join(__dirname, '/../../bin/merge_sphinx_indexes.sh')
# backup:
# # выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
# threshold: 24 * 60 * 60
# between: [3, 4]
# command: path.join(__dirname, '/../../bin/run_sphinx_backup.sh')
indexPrefix: 'dev' # prefix for indexes directory - allow to use one sphinxsearch with many nodes
indexerType: 'local' # default
# indexerType: 'amqp'
# amqpOptions: {} # use default
ui:
# Константы для интерфейса пользователя
search:
refreshInterval:
# Интервал обновления для списков в панели поиска
visible: 240
hidden: 800
# Время обновления невидимого таба, берется на клиенте случайно из интервала
hiddenTab:
lbound: 900 # Нижняя граница интервала
ubound: 1000 # Верхняя граница интервала
session:
secret: 'zQnNJ272fqRwjP0WyNAZ+UYdDOl3tO4uHz1di+9pTaMChLnl'
key: 'connect.sid'
cookie:
maxAge: null # session cookie
# /ping/ request interval, in seconds
refreshInterval: 1200
storeType: 'memory' # (default session store)
# storeType: 'redis'
storeOptions: # for redis:
ttl: 90 * 60 # 90 minutes
# host: 'localhost'
# port: 6379
# db: 1
# pass: ''
# prefix: 'sess:'
socialSharing:
url: '/!/'
signSalt: 'MvRfesxTEVTn+uWT'
signLength: 6
timeout: 120
gadget:
enabled: true
# URL контейнера для гаджетов Shindig без завершающего слеша.
# (возможно, должен быть на совсем отдельном от основного домене)
shindigUrl: 'https://d1twizu1s7sme1.cloudfront.net'
logger:
# настройки логгера
defaultLoggerName: 'system'
logLevel: 'DEBUG'
# Число означает глубину логирования. По умолчанию в ноде 2.
logRequest: 3
logResponse: 3
# Использовать X-Forwarded-For для определения ip клиента
useXForwardedFor: true
# Список адресов от которых принимать X-Forwarded-For
trustedAddress: ['127.0.0.1',]
transports:
"*":[
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
],
"http": [
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
#писать али нет meta, по умолчанию писать
meta: false
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
]
amqpConnect:
# Подключение к AMQP брокеру (RabbitMQ) для отправки логов, запросов на индексацию и поиск и т.д.
# Здесь находятся настройки соединения по умолчанию. Они могут быть переписаны в search.amqpOptions и пр.
port: 5672
host: '127.0.0.1'
login: 'guest'
password: 'guest'
vhost: '/'
implOptions:
reconnect: true
reconnectBackoffStrategy: 'exponential'
reconnectBackoffTime: 1000
reconnectExponentialLimit: 120000
generator:
# id update-handler'а в базе.
sequencerHandlerId: 'sequence/increment'
# Префикс, нужен на будущее, если будет нужно больше одного генератора на тип.
prefix: '0'
# Разделитель в id.
delimiter: '_'
# Основание системы счисления в в которой будет представлена числовая часть id (для компактности).
base: 32
sockjs:
heartbeat_delay: 25000
contacts:
updateThreshold: 24 * 60 * 60 # Автоматически обновляем список контактов не чаще 24 часов
maxContactsCount: 1000 # Количество контактов, запрашиваемое у стороннего сервиса
redirectUri: /\/auth\/(google|facebook)\/contacts\/callback/
updateUrl: /\/contacts\/(google|facebook)\/update/
avatarsPath: path.join(__dirname, '/../../data/avatars/')
internalAvatarsUrl: '/avatars/'
sources:
google:
apiUrl: 'https://www.google.com/m8/feeds'
codeUrl: 'https://accounts.google.com/o/oauth2/auth'
tokenUrl: 'https://accounts.google.com/o/oauth2/token'
scope: 'https://www.google.com/m8/feeds'
redirectUri: '/auth/google/contacts/callback'
avatarsFetchingCount: 1
facebook:
apiUrl: 'https://graph.facebook.com'
codeUrl: 'https://facebook.com/dialog/oauth'
tokenUrl: 'https://graph.facebook.com/oauth/access_token'
scope: 'friends_about_me,xmpp_login'
redirectUri: '/auth/facebook/contacts/callback'
#Реквизиты приложения в соответствующем провайдере для авторизации.
auth:
authUrl: /\/auth\/(google|facebook)\/$/
embeddedAuthUrl: '/auth/embedded/'
callbackUrl: /\/auth\/(google|facebook)\/callback/
googleAuthByTokenUrl: '/auth/google-by-token/'
logoutUrl: '/logout'
ajaxLogoutUrl: '/ajax-logout'
facebook:
# application id and secret for http://localhost:8000
# (can be obtained at https://developers.facebook.com/apps/ "Create a New App", App ID and App Secret values)
clientID: '123'
clientSecret: 'facebook-app-secret'
callbackURL: '/auth/facebook/callback'
scope: ['email']
# override profileURL instead of profileFields because updated_time and verified can be specified in it.
profileURL: 'https://graph.facebook.com/me?fields=id,email,first_name,gender,last_name,link,locale,name,picture,timezone,updated_time,verified'
google:
# application id and secret for http://localhost:8000
# (can be obtained at https://console.developers.google.com/project "APIS&AUTH">"Credentials")
clientID: '123'
clientSecret: 'google-client-secret'
callbackURL: '/auth/google/callback'
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
googleByToken:
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
password: {}
# refresh_token для Google Drive
gDriveConf:
gDriveRefreshToken: ''
updateInterval: 60 # 1 minute
cachePath: '/tmp' # directory for googleapis discovery documents
# топики которые создаются для только что зарегистрировавшегося пользователя
welcomeWaves: [
{
# welcome topic source and owner
sourceWaveUrl: null
ownerUserEmail: 'support@rizzoma.com'
}
]
supportEmail: 'support@rizzoma.com'
notification:
# notification settings (transports settings and rules)
transport: {}
# smtp:
# host: 'smtp.gmail.com'
# port: 587
# ssl: false
# use_authentication: true
# user: 'username@gmail.com'
# pass: ''
# from: 'username@gmail.com'
# fromName: 'Notificator'
# xmpp:
# jid: 'username@gmail.com'
# password: ''
# idleTimeout: 30 # проверять связь (слать пинг) после такого времени неактивности соединения
# pingTimeout: 10 # время, которое ожидаем ответа на пинг
# connectTimeout: 30 # если за это время не удалось установить соединение,то попробуем еще раз
# switching: {} # automatically uses smtp or facebook-xmpp (for Facebook users)
# 'facebook-xmpp': {}
rules:
message: ["xmpp", "switching"]
task: ["xmpp", "switching"]
add_participant: ["switching"]
'import': ["smtp"]
weekly_changes_digest: ["smtp"]
daily_changes_digest: ["smtp"]
announce: ["smtp"]
first_visit: ["smtp"]
new_comment: ["smtp"]
merge: ["smtp"]
access_request: ["smtp"]
register_confirm: ["smtp"]
forgot_password: ["smtp"]
enterprise_request: ["smtp"]
payment_blocked: ["smtp"]
payment_fault: ["smtp"]
payment_no_card: ["smtp"]
payment_success: ["smtp"]
# генерация хэша в письме добавления, для предупреждения, что добавили по одному email, а заходит с другого
referalEmailSalt: 'xt5IzUDyZbPPzNaxmZNxBKEz5gG8mmFniVlY59HWCcnCowuG'
# получение ответов на письма о меншенах, тасках и реплаях
replyEmail: "username@gmail.com" # подставлять этот адрес вместе с id и хэшами в заголовок Reply-To
emailReplyFetcher:
imap:
username: 'username@gmail.com'
password: ''
host: 'imap.gmail.com',
port: 993,
secure: true
siteAnalytics:
googleAnalytics:
id: 'UA-22635528-4'
# domainName: 'rizzoma.com'
rtbm:
id: '35A57B3B'
loggedInId: '45CF8FB4'
# mixpanel:
# id: ''
# yandexMetrika:
# id: ''
files:
type: 'local'
uploadSizeLimit: 50 * 1024 * 1024
# Настройки файлового хранилища Amazon S3:
# accessKeyId = Публичный ключ доступа
# secretAccessKey = Приватный ключ доступа
# awsAccountId = Идентификатор аккаунта (обязателен для awssum, но можно отдать любую непустую строку)
# region = регион для амазона. Взять соответствующую константу из файла ./node_modules/awssum/lib/amazon/amazon.js
# ('us-east-1' | 'us-west-1' | 'us-west-2' | 'eu-west-1' | 'ap-southeast-1' | 'ap-northeast-1' | 'sa-east-1' | 'us-gov-west-1)
# buckets = названия корзин, в которые будут складываться файлы
# linkExpiration = время жизни раздаваемых ссылок в секундах
# type - тип процессора файлов: 'local' | 's3'
# uploadSizeLimit - квота на закачку файлов
#s3:
# accessKeyId: ''
# secretAccessKey: ''
# awsAccountId: ''
# region: ''
# buckets: {
# files: ''
# avatars: ''
# store: ''
# }
# linkExpiration: 60
#local: {}
cache: {} # not using by default
# user:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# blip:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# wave:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 200}
# op:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 500}
# tipList:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 1}
# urlAlias:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
hangout:
title: 'Rizzoma'
appId: '286807350752'
devMode: true
api:
rest:
version: 1
'export':
version: 1
'export':
# Путь к директории, где хранятся сформированные архивы
# Не забудьте поправить скрипт для очистки и конфигурацию веб-сервера
archivePath: path.join(__dirname, '/../../lib/export/')
sitemap:
destinationPath: path.join(__dirname, '/../../lib/sitemap/')
staticSrcPath: path.join(__dirname, '/../static/')
payment:
getMonthTeamTopicTax: 500 #центы
apiPublicKey: ''
teamTopicTemplate: #Шаблон для командных топиков
url: null
accountsMerge:
emailConfirmCallbackUrl: /accounts_merge/
oauthConfirmUrl: /\/accounts_merge\/(google|facebook)\/$/
oauthConfirmCallbackUrl: /\/accounts_merge\/(google|facebook)\/callback/
google:
callbackURL: '/accounts_merge/google/callback'
facebook:
callbackURL: '/accounts_merge/facebook/callback'
store:
itemsInstalledByDefault: []
| 156137 | ###
Settings module. Contains all available settings with default or best fit for development values.
First level is environment name (for running code in 'dev', 'prod'; when starting node
this value will be got from NODE_ENV system environment variable).
Second level is block name.
If you want to change some settings locally you should use 'settings_local.coffee' file
(it's added to '.gitignore'; example at file 'settings_local.coffee.template').
###
path = require('path')
redisCacheBackendOptions =
host: 'localhost'
port: 6379
db: 1
module.exports.dev =
app:
listenPort: 8000
waveUrl: '/topic/'
waveEmbeddedUrl: '/embedded/'
waveDriveUrl: '/drive/'
# название cookie для флага о том, что пользователь согласился, что его браузер не поддерживается
showIfUnsupportedBrowserCookie: "show_if_unsupported_browser"
# какую подпись с версией и др. информацией о приложении показывать пользователям: 'dev', 'prod'
signatureType: 'dev'
# url of this site (for links in emails, XMPP, auth, ...)
baseUrl: 'http://localhost:8000'
db:
main:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: '<PASSWORD>'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/main/'
operations:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: '<PASSWORD>'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/operations/'
sharejs:
opsBeforeCommit: 1
numCachedOps: 1
reapTime: 1
forceReaping: true
maximumAge: 1000000
ot:
amqpOptions: {} # use default from the "amqpConnect"
search:
searchType: 'local' # default
sphinxPort: 9306
sphinxHost: 'localhost'
# searchType: 'amqp'
# amqpOptions: {} # use default options
# searchTimeout: 15 # default search timeout is 15 s
# sphinxTimeout: 10 # default SphinxSearch connect timeout is 10 s
searchIndexer:
indexes: [
# выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
{threshold: 24 * 60 * 60, between: [2, 4]},
{threshold: 15 * 60},
{threshold: 3}
]
indexesPath: '/var/lib/sphinxsearch/data'
docsAtOnce: 10000
indexCommand: path.join(__dirname, '/../../bin/run_sphinx_indexer.sh')
mergeCommand: path.join(__dirname, '/../../bin/merge_sphinx_indexes.sh')
# backup:
# # выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
# threshold: 24 * 60 * 60
# between: [3, 4]
# command: path.join(__dirname, '/../../bin/run_sphinx_backup.sh')
indexPrefix: 'dev' # prefix for indexes directory - allow to use one sphinxsearch with many nodes
indexerType: 'local' # default
# indexerType: 'amqp'
# amqpOptions: {} # use default
ui:
# Константы для интерфейса пользователя
search:
refreshInterval:
# Интервал обновления для списков в панели поиска
visible: 240
hidden: 800
# Время обновления невидимого таба, берется на клиенте случайно из интервала
hiddenTab:
lbound: 900 # Нижняя граница интервала
ubound: 1000 # Верхняя граница интервала
session:
secret: '<KEY>'
key: '<KEY>'
cookie:
maxAge: null # session cookie
# /ping/ request interval, in seconds
refreshInterval: 1200
storeType: 'memory' # (default session store)
# storeType: 'redis'
storeOptions: # for redis:
ttl: 90 * 60 # 90 minutes
# host: 'localhost'
# port: 6379
# db: 1
# pass: ''
# prefix: 'sess:'
socialSharing:
url: '/!/'
signSalt: 'MvRfesxTEVTn+uWT'
signLength: 6
timeout: 120
gadget:
enabled: true
# URL контейнера для гаджетов Shindig без завершающего слеша.
# (возможно, должен быть на совсем отдельном от основного домене)
shindigUrl: 'https://d1twizu1s7sme1.cloudfront.net'
logger:
# настройки логгера
defaultLoggerName: 'system'
logLevel: 'DEBUG'
# Число означает глубину логирования. По умолчанию в ноде 2.
logRequest: 3
logResponse: 3
# Использовать X-Forwarded-For для определения ip клиента
useXForwardedFor: true
# Список адресов от которых принимать X-Forwarded-For
trustedAddress: ['127.0.0.1',]
transports:
"*":[
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
],
"http": [
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
#писать али нет meta, по умолчанию писать
meta: false
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
]
amqpConnect:
# Подключение к AMQP брокеру (RabbitMQ) для отправки логов, запросов на индексацию и поиск и т.д.
# Здесь находятся настройки соединения по умолчанию. Они могут быть переписаны в search.amqpOptions и пр.
port: 5672
host: '127.0.0.1'
login: 'guest'
password: '<PASSWORD>'
vhost: '/'
implOptions:
reconnect: true
reconnectBackoffStrategy: 'exponential'
reconnectBackoffTime: 1000
reconnectExponentialLimit: 120000
generator:
# id update-handler'а в базе.
sequencerHandlerId: 'sequence/increment'
# Префикс, нужен на будущее, если будет нужно больше одного генератора на тип.
prefix: '0'
# Разделитель в id.
delimiter: '_'
# Основание системы счисления в в которой будет представлена числовая часть id (для компактности).
base: 32
sockjs:
heartbeat_delay: 25000
contacts:
updateThreshold: 24 * 60 * 60 # Автоматически обновляем список контактов не чаще 24 часов
maxContactsCount: 1000 # Количество контактов, запрашиваемое у стороннего сервиса
redirectUri: /\/auth\/(google|facebook)\/contacts\/callback/
updateUrl: /\/contacts\/(google|facebook)\/update/
avatarsPath: path.join(__dirname, '/../../data/avatars/')
internalAvatarsUrl: '/avatars/'
sources:
google:
apiUrl: 'https://www.google.com/m8/feeds'
codeUrl: 'https://accounts.google.com/o/oauth2/auth'
tokenUrl: 'https://accounts.google.com/o/oauth2/token'
scope: 'https://www.google.com/m8/feeds'
redirectUri: '/auth/google/contacts/callback'
avatarsFetchingCount: 1
facebook:
apiUrl: 'https://graph.facebook.com'
codeUrl: 'https://facebook.com/dialog/oauth'
tokenUrl: 'https://graph.facebook.com/oauth/access_token'
scope: 'friends_about_me,xmpp_login'
redirectUri: '/auth/facebook/contacts/callback'
#Реквизиты приложения в соответствующем провайдере для авторизации.
auth:
authUrl: /\/auth\/(google|facebook)\/$/
embeddedAuthUrl: '/auth/embedded/'
callbackUrl: /\/auth\/(google|facebook)\/callback/
googleAuthByTokenUrl: '/auth/google-by-token/'
logoutUrl: '/logout'
ajaxLogoutUrl: '/ajax-logout'
facebook:
# application id and secret for http://localhost:8000
# (can be obtained at https://developers.facebook.com/apps/ "Create a New App", App ID and App Secret values)
clientID: '123'
clientSecret: 'facebook-app-secret'
callbackURL: '/auth/facebook/callback'
scope: ['email']
# override profileURL instead of profileFields because updated_time and verified can be specified in it.
profileURL: 'https://graph.facebook.com/me?fields=id,email,first_name,gender,last_name,link,locale,name,picture,timezone,updated_time,verified'
google:
# application id and secret for http://localhost:8000
# (can be obtained at https://console.developers.google.com/project "APIS&AUTH">"Credentials")
clientID: '123'
clientSecret: 'google-client-secret'
callbackURL: '/auth/google/callback'
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
googleByToken:
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
password: {}
# refresh_token для Google Drive
gDriveConf:
gDriveRefreshToken: ''
updateInterval: 60 # 1 minute
cachePath: '/tmp' # directory for googleapis discovery documents
# топики которые создаются для только что зарегистрировавшегося пользователя
welcomeWaves: [
{
# welcome topic source and owner
sourceWaveUrl: null
ownerUserEmail: '<EMAIL>'
}
]
supportEmail: '<EMAIL>'
notification:
# notification settings (transports settings and rules)
transport: {}
# smtp:
# host: 'smtp.gmail.com'
# port: 587
# ssl: false
# use_authentication: true
# user: '<EMAIL>'
# pass: ''
# from: '<EMAIL>'
# fromName: 'Notificator'
# xmpp:
# jid: '<EMAIL>'
# password: ''
# idleTimeout: 30 # проверять связь (слать пинг) после такого времени неактивности соединения
# pingTimeout: 10 # время, которое ожидаем ответа на пинг
# connectTimeout: 30 # если за это время не удалось установить соединение,то попробуем еще раз
# switching: {} # automatically uses smtp or facebook-xmpp (for Facebook users)
# 'facebook-xmpp': {}
rules:
message: ["xmpp", "switching"]
task: ["xmpp", "switching"]
add_participant: ["switching"]
'import': ["smtp"]
weekly_changes_digest: ["smtp"]
daily_changes_digest: ["smtp"]
announce: ["smtp"]
first_visit: ["smtp"]
new_comment: ["smtp"]
merge: ["smtp"]
access_request: ["smtp"]
register_confirm: ["smtp"]
forgot_password: ["<PASSWORD>"]
enterprise_request: ["smtp"]
payment_blocked: ["smtp"]
payment_fault: ["smtp"]
payment_no_card: ["smtp"]
payment_success: ["smtp"]
# генерация хэша в письме добавления, для предупреждения, что добавили по одному email, а заходит с другого
referalEmailSalt: '<KEY>Zb<KEY>'
# получение ответов на письма о меншенах, тасках и реплаях
replyEmail: "<EMAIL>" # подставлять этот адрес вместе с id и хэшами в заголовок Reply-To
emailReplyFetcher:
imap:
username: '<EMAIL>'
password:<PASSWORD> ''
host: 'imap.gmail.com',
port: 993,
secure: true
siteAnalytics:
googleAnalytics:
id: 'UA-22635528-4'
# domainName: 'rizzoma.com'
rtbm:
id: '35A57B3B'
loggedInId: '45CF8FB4'
# mixpanel:
# id: ''
# yandexMetrika:
# id: ''
files:
type: 'local'
uploadSizeLimit: 50 * 1024 * 1024
# Настройки файлового хранилища Amazon S3:
# accessKeyId = Публичный ключ доступа
# secretAccessKey = Приватный ключ доступа
# awsAccountId = Идентификатор аккаунта (обязателен для awssum, но можно отдать любую непустую строку)
# region = регион для амазона. Взять соответствующую константу из файла ./node_modules/awssum/lib/amazon/amazon.js
# ('us-east-1' | 'us-west-1' | 'us-west-2' | 'eu-west-1' | 'ap-southeast-1' | 'ap-northeast-1' | 'sa-east-1' | 'us-gov-west-1)
# buckets = названия корзин, в которые будут складываться файлы
# linkExpiration = время жизни раздаваемых ссылок в секундах
# type - тип процессора файлов: 'local' | 's3'
# uploadSizeLimit - квота на закачку файлов
#s3:
# accessKeyId: ''
# secretAccessKey: ''
# awsAccountId: ''
# region: ''
# buckets: {
# files: ''
# avatars: ''
# store: ''
# }
# linkExpiration: 60
#local: {}
cache: {} # not using by default
# user:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# blip:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# wave:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 200}
# op:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 500}
# tipList:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 1}
# urlAlias:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
hangout:
title: 'Rizzoma'
appId: '286807350752'
devMode: true
api:
rest:
version: 1
'export':
version: 1
'export':
# Путь к директории, где хранятся сформированные архивы
# Не забудьте поправить скрипт для очистки и конфигурацию веб-сервера
archivePath: path.join(__dirname, '/../../lib/export/')
sitemap:
destinationPath: path.join(__dirname, '/../../lib/sitemap/')
staticSrcPath: path.join(__dirname, '/../static/')
payment:
getMonthTeamTopicTax: 500 #центы
apiPublicKey: ''
teamTopicTemplate: #Шаблон для командных топиков
url: null
accountsMerge:
emailConfirmCallbackUrl: /accounts_merge/
oauthConfirmUrl: /\/accounts_merge\/(google|facebook)\/$/
oauthConfirmCallbackUrl: /\/accounts_merge\/(google|facebook)\/callback/
google:
callbackURL: '/accounts_merge/google/callback'
facebook:
callbackURL: '/accounts_merge/facebook/callback'
store:
itemsInstalledByDefault: []
| true | ###
Settings module. Contains all available settings with default or best fit for development values.
First level is environment name (for running code in 'dev', 'prod'; when starting node
this value will be got from NODE_ENV system environment variable).
Second level is block name.
If you want to change some settings locally you should use 'settings_local.coffee' file
(it's added to '.gitignore'; example at file 'settings_local.coffee.template').
###
path = require('path')
redisCacheBackendOptions =
host: 'localhost'
port: 6379
db: 1
module.exports.dev =
app:
listenPort: 8000
waveUrl: '/topic/'
waveEmbeddedUrl: '/embedded/'
waveDriveUrl: '/drive/'
# название cookie для флага о том, что пользователь согласился, что его браузер не поддерживается
showIfUnsupportedBrowserCookie: "show_if_unsupported_browser"
# какую подпись с версией и др. информацией о приложении показывать пользователям: 'dev', 'prod'
signatureType: 'dev'
# url of this site (for links in emails, XMPP, auth, ...)
baseUrl: 'http://localhost:8000'
db:
main:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: 'PI:PASSWORD:<PASSWORD>END_PI'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/main/'
operations:
type: 'cradle'
protocol: 'http'
hostname: '127.0.0.1'
port: 5984
options:
cache: false
#auth:
# username: 'root'
# password: 'PI:PASSWORD:<PASSWORD>END_PI'
db: 'project_rizzoma'
designsDir: '/src/server/couch_views/operations/'
sharejs:
opsBeforeCommit: 1
numCachedOps: 1
reapTime: 1
forceReaping: true
maximumAge: 1000000
ot:
amqpOptions: {} # use default from the "amqpConnect"
search:
searchType: 'local' # default
sphinxPort: 9306
sphinxHost: 'localhost'
# searchType: 'amqp'
# amqpOptions: {} # use default options
# searchTimeout: 15 # default search timeout is 15 s
# sphinxTimeout: 10 # default SphinxSearch connect timeout is 10 s
searchIndexer:
indexes: [
# выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
{threshold: 24 * 60 * 60, between: [2, 4]},
{threshold: 15 * 60},
{threshold: 3}
]
indexesPath: '/var/lib/sphinxsearch/data'
docsAtOnce: 10000
indexCommand: path.join(__dirname, '/../../bin/run_sphinx_indexer.sh')
mergeCommand: path.join(__dirname, '/../../bin/merge_sphinx_indexes.sh')
# backup:
# # выполнять каждые threshold секунд, если попадает в between: [starthour, endhour], starthour included in server offset
# threshold: 24 * 60 * 60
# between: [3, 4]
# command: path.join(__dirname, '/../../bin/run_sphinx_backup.sh')
indexPrefix: 'dev' # prefix for indexes directory - allow to use one sphinxsearch with many nodes
indexerType: 'local' # default
# indexerType: 'amqp'
# amqpOptions: {} # use default
ui:
# Константы для интерфейса пользователя
search:
refreshInterval:
# Интервал обновления для списков в панели поиска
visible: 240
hidden: 800
# Время обновления невидимого таба, берется на клиенте случайно из интервала
hiddenTab:
lbound: 900 # Нижняя граница интервала
ubound: 1000 # Верхняя граница интервала
session:
secret: 'PI:KEY:<KEY>END_PI'
key: 'PI:KEY:<KEY>END_PI'
cookie:
maxAge: null # session cookie
# /ping/ request interval, in seconds
refreshInterval: 1200
storeType: 'memory' # (default session store)
# storeType: 'redis'
storeOptions: # for redis:
ttl: 90 * 60 # 90 minutes
# host: 'localhost'
# port: 6379
# db: 1
# pass: ''
# prefix: 'sess:'
socialSharing:
url: '/!/'
signSalt: 'MvRfesxTEVTn+uWT'
signLength: 6
timeout: 120
gadget:
enabled: true
# URL контейнера для гаджетов Shindig без завершающего слеша.
# (возможно, должен быть на совсем отдельном от основного домене)
shindigUrl: 'https://d1twizu1s7sme1.cloudfront.net'
logger:
# настройки логгера
defaultLoggerName: 'system'
logLevel: 'DEBUG'
# Число означает глубину логирования. По умолчанию в ноде 2.
logRequest: 3
logResponse: 3
# Использовать X-Forwarded-For для определения ip клиента
useXForwardedFor: true
# Список адресов от которых принимать X-Forwarded-For
trustedAddress: ['127.0.0.1',]
transports:
"*":[
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
],
"http": [
{
transportClass: require('./common/logger/categoried_stdout').CategoriedStdoutLogger
colorize: true
loggerNameField: 'category'
#писать али нет meta, по умолчанию писать
meta: false
}
# {
# transportClass: require('./common/logger/graylog2').Graylog2Logger
# loggerNameField: 'graylogFacility'
# graylogHost: '127.0.0.1'
# graylogPort: 12201
# appInfo: true
# }
]
amqpConnect:
# Подключение к AMQP брокеру (RabbitMQ) для отправки логов, запросов на индексацию и поиск и т.д.
# Здесь находятся настройки соединения по умолчанию. Они могут быть переписаны в search.amqpOptions и пр.
port: 5672
host: '127.0.0.1'
login: 'guest'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
vhost: '/'
implOptions:
reconnect: true
reconnectBackoffStrategy: 'exponential'
reconnectBackoffTime: 1000
reconnectExponentialLimit: 120000
generator:
# id update-handler'а в базе.
sequencerHandlerId: 'sequence/increment'
# Префикс, нужен на будущее, если будет нужно больше одного генератора на тип.
prefix: '0'
# Разделитель в id.
delimiter: '_'
# Основание системы счисления в в которой будет представлена числовая часть id (для компактности).
base: 32
sockjs:
heartbeat_delay: 25000
contacts:
updateThreshold: 24 * 60 * 60 # Автоматически обновляем список контактов не чаще 24 часов
maxContactsCount: 1000 # Количество контактов, запрашиваемое у стороннего сервиса
redirectUri: /\/auth\/(google|facebook)\/contacts\/callback/
updateUrl: /\/contacts\/(google|facebook)\/update/
avatarsPath: path.join(__dirname, '/../../data/avatars/')
internalAvatarsUrl: '/avatars/'
sources:
google:
apiUrl: 'https://www.google.com/m8/feeds'
codeUrl: 'https://accounts.google.com/o/oauth2/auth'
tokenUrl: 'https://accounts.google.com/o/oauth2/token'
scope: 'https://www.google.com/m8/feeds'
redirectUri: '/auth/google/contacts/callback'
avatarsFetchingCount: 1
facebook:
apiUrl: 'https://graph.facebook.com'
codeUrl: 'https://facebook.com/dialog/oauth'
tokenUrl: 'https://graph.facebook.com/oauth/access_token'
scope: 'friends_about_me,xmpp_login'
redirectUri: '/auth/facebook/contacts/callback'
#Реквизиты приложения в соответствующем провайдере для авторизации.
auth:
authUrl: /\/auth\/(google|facebook)\/$/
embeddedAuthUrl: '/auth/embedded/'
callbackUrl: /\/auth\/(google|facebook)\/callback/
googleAuthByTokenUrl: '/auth/google-by-token/'
logoutUrl: '/logout'
ajaxLogoutUrl: '/ajax-logout'
facebook:
# application id and secret for http://localhost:8000
# (can be obtained at https://developers.facebook.com/apps/ "Create a New App", App ID and App Secret values)
clientID: '123'
clientSecret: 'facebook-app-secret'
callbackURL: '/auth/facebook/callback'
scope: ['email']
# override profileURL instead of profileFields because updated_time and verified can be specified in it.
profileURL: 'https://graph.facebook.com/me?fields=id,email,first_name,gender,last_name,link,locale,name,picture,timezone,updated_time,verified'
google:
# application id and secret for http://localhost:8000
# (can be obtained at https://console.developers.google.com/project "APIS&AUTH">"Credentials")
clientID: '123'
clientSecret: 'google-client-secret'
callbackURL: '/auth/google/callback'
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
googleByToken:
scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
password: {}
# refresh_token для Google Drive
gDriveConf:
gDriveRefreshToken: ''
updateInterval: 60 # 1 minute
cachePath: '/tmp' # directory for googleapis discovery documents
# топики которые создаются для только что зарегистрировавшегося пользователя
welcomeWaves: [
{
# welcome topic source and owner
sourceWaveUrl: null
ownerUserEmail: 'PI:EMAIL:<EMAIL>END_PI'
}
]
supportEmail: 'PI:EMAIL:<EMAIL>END_PI'
notification:
# notification settings (transports settings and rules)
transport: {}
# smtp:
# host: 'smtp.gmail.com'
# port: 587
# ssl: false
# use_authentication: true
# user: 'PI:EMAIL:<EMAIL>END_PI'
# pass: ''
# from: 'PI:EMAIL:<EMAIL>END_PI'
# fromName: 'Notificator'
# xmpp:
# jid: 'PI:EMAIL:<EMAIL>END_PI'
# password: ''
# idleTimeout: 30 # проверять связь (слать пинг) после такого времени неактивности соединения
# pingTimeout: 10 # время, которое ожидаем ответа на пинг
# connectTimeout: 30 # если за это время не удалось установить соединение,то попробуем еще раз
# switching: {} # automatically uses smtp or facebook-xmpp (for Facebook users)
# 'facebook-xmpp': {}
rules:
message: ["xmpp", "switching"]
task: ["xmpp", "switching"]
add_participant: ["switching"]
'import': ["smtp"]
weekly_changes_digest: ["smtp"]
daily_changes_digest: ["smtp"]
announce: ["smtp"]
first_visit: ["smtp"]
new_comment: ["smtp"]
merge: ["smtp"]
access_request: ["smtp"]
register_confirm: ["smtp"]
forgot_password: ["PI:PASSWORD:<PASSWORD>END_PI"]
enterprise_request: ["smtp"]
payment_blocked: ["smtp"]
payment_fault: ["smtp"]
payment_no_card: ["smtp"]
payment_success: ["smtp"]
# генерация хэша в письме добавления, для предупреждения, что добавили по одному email, а заходит с другого
referalEmailSalt: 'PI:KEY:<KEY>END_PIZbPI:KEY:<KEY>END_PI'
# получение ответов на письма о меншенах, тасках и реплаях
replyEmail: "PI:EMAIL:<EMAIL>END_PI" # подставлять этот адрес вместе с id и хэшами в заголовок Reply-To
emailReplyFetcher:
imap:
username: 'PI:EMAIL:<EMAIL>END_PI'
password:PI:PASSWORD:<PASSWORD>END_PI ''
host: 'imap.gmail.com',
port: 993,
secure: true
siteAnalytics:
googleAnalytics:
id: 'UA-22635528-4'
# domainName: 'rizzoma.com'
rtbm:
id: '35A57B3B'
loggedInId: '45CF8FB4'
# mixpanel:
# id: ''
# yandexMetrika:
# id: ''
files:
type: 'local'
uploadSizeLimit: 50 * 1024 * 1024
# Настройки файлового хранилища Amazon S3:
# accessKeyId = Публичный ключ доступа
# secretAccessKey = Приватный ключ доступа
# awsAccountId = Идентификатор аккаунта (обязателен для awssum, но можно отдать любую непустую строку)
# region = регион для амазона. Взять соответствующую константу из файла ./node_modules/awssum/lib/amazon/amazon.js
# ('us-east-1' | 'us-west-1' | 'us-west-2' | 'eu-west-1' | 'ap-southeast-1' | 'ap-northeast-1' | 'sa-east-1' | 'us-gov-west-1)
# buckets = названия корзин, в которые будут складываться файлы
# linkExpiration = время жизни раздаваемых ссылок в секундах
# type - тип процессора файлов: 'local' | 's3'
# uploadSizeLimit - квота на закачку файлов
#s3:
# accessKeyId: ''
# secretAccessKey: ''
# awsAccountId: ''
# region: ''
# buckets: {
# files: ''
# avatars: ''
# store: ''
# }
# linkExpiration: 60
#local: {}
cache: {} # not using by default
# user:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# blip:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
# wave:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 200}
# op:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 500}
# tipList:
# backend: 'MemoryLruBackend'
# backendOptions: {cacheSize: 1}
# urlAlias:
# backend: 'RedisLruBackend'
# backendOptions: redisCacheBackendOptions
hangout:
title: 'Rizzoma'
appId: '286807350752'
devMode: true
api:
rest:
version: 1
'export':
version: 1
'export':
# Путь к директории, где хранятся сформированные архивы
# Не забудьте поправить скрипт для очистки и конфигурацию веб-сервера
archivePath: path.join(__dirname, '/../../lib/export/')
sitemap:
destinationPath: path.join(__dirname, '/../../lib/sitemap/')
staticSrcPath: path.join(__dirname, '/../static/')
payment:
getMonthTeamTopicTax: 500 #центы
apiPublicKey: ''
teamTopicTemplate: #Шаблон для командных топиков
url: null
accountsMerge:
emailConfirmCallbackUrl: /accounts_merge/
oauthConfirmUrl: /\/accounts_merge\/(google|facebook)\/$/
oauthConfirmCallbackUrl: /\/accounts_merge\/(google|facebook)\/callback/
google:
callbackURL: '/accounts_merge/google/callback'
facebook:
callbackURL: '/accounts_merge/facebook/callback'
store:
itemsInstalledByDefault: []
|
[
{
"context": "#\n# Form CoffeeScript file.\n#\n# @author Matthew Casey\n#\n# (c) University of Surrey 2019\n#\n\nVMV.Form ||=",
"end": 53,
"score": 0.9998666048049927,
"start": 40,
"tag": "NAME",
"value": "Matthew Casey"
}
] | app/assets/javascripts/shared/form.coffee | saschneider/VMVLedger | 0 | #
# Form CoffeeScript file.
#
# @author Matthew Casey
#
# (c) University of Surrey 2019
#
VMV.Form ||= {}
#
# Initialise whenever the page is loaded.
#
$ ->
# Make all selects into bootstrap-select controls with the required defaults. Here we ignore datetime select elements because they don't work well.
$.fn.selectpicker.Constructor.DEFAULTS.container = 'body'
$.fn.selectpicker.Constructor.DEFAULTS.style = 'btn-form-control'
$('select').not('.rails-bootstrap-forms-datetime-select select').selectpicker()
# Display the file name when selected by the user.
$('.custom-file-input').on('change', (event) -> VMV.Form.updateFileSelection(event.target))
# Show direct uploads.
$(document).on('direct-upload:initialize', (event) ->
target = $(event.target)
html = $('.custom-file-label[for=' + target.attr('id') + ']').html()
$('.custom-file-label[for=' + target.attr('id') + ']').html(html + '<span id="direct-upload-progress-' + event.detail.id + '" class="direct-upload-progress" style="width: 0%"></span>')
return
)
$(document).on('direct-upload:progress', (event) ->
element = $('#direct-upload-progress-' + event.detail.id)
element.css('width', event.detail.progress + '%')
return
)
#
# Updates the correspinding file name display for a form file input when a file has been selected.
#
# @param target The target file control.
#
VMV.Form.updateFileSelection = (target) ->
# Remove the "fakepath" from a file input and update the display.
fileName = $(target).val().replace(/^.*[\\\/]/, '')
$(target).next('.custom-file-label').html(fileName)
return
| 123819 | #
# Form CoffeeScript file.
#
# @author <NAME>
#
# (c) University of Surrey 2019
#
VMV.Form ||= {}
#
# Initialise whenever the page is loaded.
#
$ ->
# Make all selects into bootstrap-select controls with the required defaults. Here we ignore datetime select elements because they don't work well.
$.fn.selectpicker.Constructor.DEFAULTS.container = 'body'
$.fn.selectpicker.Constructor.DEFAULTS.style = 'btn-form-control'
$('select').not('.rails-bootstrap-forms-datetime-select select').selectpicker()
# Display the file name when selected by the user.
$('.custom-file-input').on('change', (event) -> VMV.Form.updateFileSelection(event.target))
# Show direct uploads.
$(document).on('direct-upload:initialize', (event) ->
target = $(event.target)
html = $('.custom-file-label[for=' + target.attr('id') + ']').html()
$('.custom-file-label[for=' + target.attr('id') + ']').html(html + '<span id="direct-upload-progress-' + event.detail.id + '" class="direct-upload-progress" style="width: 0%"></span>')
return
)
$(document).on('direct-upload:progress', (event) ->
element = $('#direct-upload-progress-' + event.detail.id)
element.css('width', event.detail.progress + '%')
return
)
#
# Updates the correspinding file name display for a form file input when a file has been selected.
#
# @param target The target file control.
#
VMV.Form.updateFileSelection = (target) ->
# Remove the "fakepath" from a file input and update the display.
fileName = $(target).val().replace(/^.*[\\\/]/, '')
$(target).next('.custom-file-label').html(fileName)
return
| true | #
# Form CoffeeScript file.
#
# @author PI:NAME:<NAME>END_PI
#
# (c) University of Surrey 2019
#
VMV.Form ||= {}
#
# Initialise whenever the page is loaded.
#
$ ->
# Make all selects into bootstrap-select controls with the required defaults. Here we ignore datetime select elements because they don't work well.
$.fn.selectpicker.Constructor.DEFAULTS.container = 'body'
$.fn.selectpicker.Constructor.DEFAULTS.style = 'btn-form-control'
$('select').not('.rails-bootstrap-forms-datetime-select select').selectpicker()
# Display the file name when selected by the user.
$('.custom-file-input').on('change', (event) -> VMV.Form.updateFileSelection(event.target))
# Show direct uploads.
$(document).on('direct-upload:initialize', (event) ->
target = $(event.target)
html = $('.custom-file-label[for=' + target.attr('id') + ']').html()
$('.custom-file-label[for=' + target.attr('id') + ']').html(html + '<span id="direct-upload-progress-' + event.detail.id + '" class="direct-upload-progress" style="width: 0%"></span>')
return
)
$(document).on('direct-upload:progress', (event) ->
element = $('#direct-upload-progress-' + event.detail.id)
element.css('width', event.detail.progress + '%')
return
)
#
# Updates the correspinding file name display for a form file input when a file has been selected.
#
# @param target The target file control.
#
VMV.Form.updateFileSelection = (target) ->
# Remove the "fakepath" from a file input and update the display.
fileName = $(target).val().replace(/^.*[\\\/]/, '')
$(target).next('.custom-file-label').html(fileName)
return
|
[
{
"context": "ock scoped\" variables by binding context\n# @author Matt DuVall <http://www.mattduvall.com>\n###\n'use strict'\n\n#--",
"end": 104,
"score": 0.9997347593307495,
"start": 93,
"tag": "NAME",
"value": "Matt DuVall"
}
] | src/rules/block-scoped-var.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to check for "block scoped" variables by binding context
# @author Matt DuVall <http://www.mattduvall.com>
###
'use strict'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'enforce the use of variables within the scope they are defined'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/block-scoped-var'
schema: []
messages:
outOfScope: "'{{name}}' used outside of binding context."
create: (context) ->
stack = []
###*
# Makes a block scope.
# @param {ASTNode} node - A node of a scope.
# @returns {void}
###
enterScope = (node) -> stack.push node.range
###*
# Pops the last block scope.
# @returns {void}
###
exitScope = -> stack.pop()
###*
# Reports a given reference.
# @param {eslint-scope.Reference} reference - A reference to report.
# @returns {void}
###
report = (reference) ->
{identifier} = reference
context.report
node: identifier, messageId: 'outOfScope', data: name: identifier.name
###*
# Finds and reports references which are outside of valid scopes.
# @param {ASTNode} node - A node to get variables.
# @returns {void}
###
checkForVariables = (node) ->
return unless node.declaration
# Defines a predicate to check whether or not a given reference is outside of valid scope.
scopeRange = stack[stack.length - 1]
###*
# Check if a reference is out of scope
# @param {ASTNode} reference node to examine
# @returns {boolean} True is its outside the scope
# @private
###
isOutsideOfScope = (reference) ->
idRange = reference.identifier.range
idRange[0] < scopeRange[0] or idRange[1] > scopeRange[1]
# Gets declared variables, and checks its references.
variables = context.getDeclaredVariables node
# Reports.
for variable in variables
variable.references.filter(isOutsideOfScope).forEach report
Program: (node) -> stack ###:### = [node.range]
# Manages scopes.
BlockStatement: enterScope
'BlockStatement:exit': exitScope
For: enterScope
'For:exit': exitScope
ForStatement: enterScope
'ForStatement:exit': exitScope
ForInStatement: enterScope
'ForInStatement:exit': exitScope
ForOfStatement: enterScope
'ForOfStatement:exit': exitScope
SwitchStatement: enterScope
'SwitchStatement:exit': exitScope
CatchClause: enterScope
'CatchClause:exit': exitScope
# Finds and reports references which are outside of valid scope.
Identifier: checkForVariables
| 160609 | ###*
# @fileoverview Rule to check for "block scoped" variables by binding context
# @author <NAME> <http://www.mattduvall.com>
###
'use strict'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'enforce the use of variables within the scope they are defined'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/block-scoped-var'
schema: []
messages:
outOfScope: "'{{name}}' used outside of binding context."
create: (context) ->
stack = []
###*
# Makes a block scope.
# @param {ASTNode} node - A node of a scope.
# @returns {void}
###
enterScope = (node) -> stack.push node.range
###*
# Pops the last block scope.
# @returns {void}
###
exitScope = -> stack.pop()
###*
# Reports a given reference.
# @param {eslint-scope.Reference} reference - A reference to report.
# @returns {void}
###
report = (reference) ->
{identifier} = reference
context.report
node: identifier, messageId: 'outOfScope', data: name: identifier.name
###*
# Finds and reports references which are outside of valid scopes.
# @param {ASTNode} node - A node to get variables.
# @returns {void}
###
checkForVariables = (node) ->
return unless node.declaration
# Defines a predicate to check whether or not a given reference is outside of valid scope.
scopeRange = stack[stack.length - 1]
###*
# Check if a reference is out of scope
# @param {ASTNode} reference node to examine
# @returns {boolean} True is its outside the scope
# @private
###
isOutsideOfScope = (reference) ->
idRange = reference.identifier.range
idRange[0] < scopeRange[0] or idRange[1] > scopeRange[1]
# Gets declared variables, and checks its references.
variables = context.getDeclaredVariables node
# Reports.
for variable in variables
variable.references.filter(isOutsideOfScope).forEach report
Program: (node) -> stack ###:### = [node.range]
# Manages scopes.
BlockStatement: enterScope
'BlockStatement:exit': exitScope
For: enterScope
'For:exit': exitScope
ForStatement: enterScope
'ForStatement:exit': exitScope
ForInStatement: enterScope
'ForInStatement:exit': exitScope
ForOfStatement: enterScope
'ForOfStatement:exit': exitScope
SwitchStatement: enterScope
'SwitchStatement:exit': exitScope
CatchClause: enterScope
'CatchClause:exit': exitScope
# Finds and reports references which are outside of valid scope.
Identifier: checkForVariables
| true | ###*
# @fileoverview Rule to check for "block scoped" variables by binding context
# @author PI:NAME:<NAME>END_PI <http://www.mattduvall.com>
###
'use strict'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'enforce the use of variables within the scope they are defined'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/block-scoped-var'
schema: []
messages:
outOfScope: "'{{name}}' used outside of binding context."
create: (context) ->
stack = []
###*
# Makes a block scope.
# @param {ASTNode} node - A node of a scope.
# @returns {void}
###
enterScope = (node) -> stack.push node.range
###*
# Pops the last block scope.
# @returns {void}
###
exitScope = -> stack.pop()
###*
# Reports a given reference.
# @param {eslint-scope.Reference} reference - A reference to report.
# @returns {void}
###
report = (reference) ->
{identifier} = reference
context.report
node: identifier, messageId: 'outOfScope', data: name: identifier.name
###*
# Finds and reports references which are outside of valid scopes.
# @param {ASTNode} node - A node to get variables.
# @returns {void}
###
checkForVariables = (node) ->
return unless node.declaration
# Defines a predicate to check whether or not a given reference is outside of valid scope.
scopeRange = stack[stack.length - 1]
###*
# Check if a reference is out of scope
# @param {ASTNode} reference node to examine
# @returns {boolean} True is its outside the scope
# @private
###
isOutsideOfScope = (reference) ->
idRange = reference.identifier.range
idRange[0] < scopeRange[0] or idRange[1] > scopeRange[1]
# Gets declared variables, and checks its references.
variables = context.getDeclaredVariables node
# Reports.
for variable in variables
variable.references.filter(isOutsideOfScope).forEach report
Program: (node) -> stack ###:### = [node.range]
# Manages scopes.
BlockStatement: enterScope
'BlockStatement:exit': exitScope
For: enterScope
'For:exit': exitScope
ForStatement: enterScope
'ForStatement:exit': exitScope
ForInStatement: enterScope
'ForInStatement:exit': exitScope
ForOfStatement: enterScope
'ForOfStatement:exit': exitScope
SwitchStatement: enterScope
'SwitchStatement:exit': exitScope
CatchClause: enterScope
'CatchClause:exit': exitScope
# Finds and reports references which are outside of valid scope.
Identifier: checkForVariables
|
[
{
"context": "in the skies.\n\n\n\"\"\",\n keys : {\n passphrase : \"urnsrock\",\n ids : [ \"69B0017B1C3D9917\", \"F4317C265F08C",
"end": 2287,
"score": 0.997698962688446,
"start": 2279,
"tag": "PASSWORD",
"value": "urnsrock"
},
{
"context": "ys : {\n passphrase : \"urnsro... | test/files/msg_roundtrip.iced | thinq4yourself/kbpgp | 1 |
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../lib/keymanager'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
detachsign = require '../../lib/openpgp/detachsign'
hashmod = require '../../lib/hash'
#===============================================================================
data = {
msg : """
Season of mists and mellow fruitfulness
Close bosom-friend of the maturing sun
Conspiring with him how to load and bless
With fruit the vines that round the thatch-eaves run;
To bend with apples the moss'd cottage-trees,
And fill all fruit with ripeness to the core;
To swell the gourd, and plump the hazel shells
With a sweet kernel; to set budding more,
And still more, later flowers for the bees,
Until they think warm days will never cease,
For Summer has o'er-brimm'd their clammy cells.
Who hath not seen thee oft amid thy store?
Sometimes whoever seeks abroad may find
Thee sitting careless on a granary floor,
Thy hair soft-lifted by the winnowing wind;
Or on a half-reap'd furrow sound asleep,
Drows'd with the fume of poppies, while thy hook
Spares the next swath and all its twined flowers:
And sometimes like a gleaner thou dost keep
Steady thy laden head across a brook;
Or by a cider-press, with patient look,
Thou watchest the last oozings hours by hours.
Where are the songs of Spring? Ay, where are they?
Think not of them, thou hast thy music too,-
While barred clouds bloom the soft-dying day,
And touch the stubble-plains with rosy hue;
Then in a wailful choir the small gnats mourn
Among the river sallows, borne aloft
Or sinking as the light wind lives or dies;
And full-grown lambs loud bleat from hilly bourn;
Hedge-crickets sing; and now with treble soft
The red-breast whistles from a garden-croft;
And gathering swallows twitter in the skies.
""",
keys : {
passphrase : "urnsrock",
ids : [ "69B0017B1C3D9917", "F4317C265F08C3A2" ],
blocks : [ """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
lQH+BFJ3A/QBBACukT3BRAH6vVn3KULmWutg5BMzutXyNfd1N53BA+7qAow8HW9Q
IVt1QD4Lw0X31PNvnlr01QinzJ0vK5TiZQtlJIOnjJ3iJ3vlMiPQwe26UkN7g4WZ
kmD/ceGioJa6iM9B2cN6IM/cGO33g7zi+f20I8z7lcvJp2Zt2hHQysSoDQARAQAB
/gMDAtVo6Z0kF21hYDSYOTEoTzS0U9hphymRV5qzfYMyM1cT+Swtj2uUR/chfoH5
m9C3sUb9ykwW7LAsbD2AGgjuGQJRQbvudQR+CApk85uNutq8soLTUNqs7hjE6s7y
qOBBYzubuq2JNc1Dl4wJz5CUV6j8ZTa1qLHVVbFeVLOMbXKygjpGZPtNSImmrB5d
MwcsaeWV8YHlhHzdWllKYzcz9jb7sVOMFxlZiTlOhFAbp675OxHl0qKUFdvSA4m1
dYxacp8x7cwrWvQo6WpWHbdGlDYngTmziAf2MjzL1JNRkUTg738Ya8UC7Gzmwbku
DIdswHfpQk3FsickwE06c/lm4EBK180fAxn0h7Pb5JsANW04w5szVIiD9/t9GyK5
8VWdpix3m9V79pqT00GM5qjjr6Al20ygoC9NWoi50mj99vf8NxoYdHjwcSD3l50w
9c60ULBPXjq099IijQWtVkQc14KcOiFze/3SE6Zo+f5DtCpKb2huIEtlYXRzIChK
b2hubnkpIDxqb2huLmtlYXRzQGdtYWlsLmNvbT6IvgQTAQIAKAUCUncD9AIbAwUJ
OGQJAAYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQabABexw9mReQHAP+MyeG
OHi+qRHmCOejziLBl4DZwBeOiEGo/5SJUfl0E76kkG5TAD15pWZbpd4LcBYH2xQe
cAXbdZEdUYpuGCuBeHTQxD+cVAmq1aguCMT54K13V/h8VNGkF4suWrpeMomWcD7O
5Dtqw9OvW/EV10Bk4KnjtnK7HIVP3f9ddbp+NjadAf4EUncD9AEEAK4xy2sVg5mq
QY5gDBYy1uPtFCd3+8iZGVChRApwLnba2kJx1buO9MqgXZ0wmoEJzLgfN5dVDfAQ
ypfltBJgzCXsxbUFCW6m5NV+GjKhIsF4AtMfCfymIsughig55ySw9rurpKmHKEDe
FDsMzKAcpEnaK/4+VY27nlwNFboW5MbFABEBAAH+AwMC1WjpnSQXbWFgPHyfO1zk
Pt44CsEJ5DlvoMIwg8x662EN+zYY3e8MTKcKgtT5qTsZer3puBe7WHDc6YzzCGJq
9XsBNkwnvxt9s3PCHqCkDll0JV6JLP32R7hGnYVFDj9HbMsc6HC9Rp29Rm2nX7L3
NUrSZEMS6Za7KSURMffxIUJqjm9aq9spRUmL9i2IM3ah5XQdrl7s9dJmDdEKG9un
hcgab9DVEMSoL60QH+YpZf2SKFMq0TJIA7g9WYn6MgHuzNhlFlMC2GBPgu+tvY2z
FUNpL44gmk1wPXpcO2BKlnUNsmfc/5Mz1nsWfFcFaUiipmM75niodXOKZxtmlee7
1Vz4enh7m7SKUHiFsQKnPU3egT49OdPonaAaTW7GyhOEj6onW297A7oaZE3zExcB
Rdv2SxgLLMhK6SxADNnTgKwaUcbXX7lWeGtoabtTn6BR9Z/Ljof1JNQc3MqYqLaw
maHbIcP9kZHPAf2ouO2IpQQYAQIADwUCUncD9AIbDAUJOGQJAAAKCRBpsAF7HD2Z
F7ncA/40tgyKkAQLVO6CUiIqq6Vrmyn/sEEqv7CoaH2lRosAaOVBR0AuUUos26ZB
adQQqGYWu6c8XPIOMUVpeMFbBKBkiJvHh2DNlCE3XhbUF0guwZ/GYihFzf4iRZA8
g75kAuLgjcwnMyBzzDLAjyUlgKLBFT9dubmmVk9YWdvEOiB+2g==
=kzrG
-----END PGP PRIVATE KEY BLOCK-----
""", """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
lQH+BFJ3BJYBBADi3DxAFfX6ZNfwHmzgdrgwwaBdYQ1hWusclf2lVRu7Pj0Ha2OK
vsNBH/yRIRNjv+YAGXGoNwEmYVyFllNUbXYZwJKA+GmoC4ZMtCAT6sS2/6AM1syi
/FxGGvJYzBU0HAgb8KeUmNCBe9WBQQsmWrygjfig1RvhT55Ca0Y+fpcnfwARAQAB
/gMDAkqm8JYGqR9OYAnwLxwv9rH0YKH6JY68Kvy+cnGiDqi2LRW1GC8IyVa/qbxt
Ak2RVlTRy2fYfTMxpZpawpSUOkIi5t8ZOJVcc8lGOYWPF0M2G3Xad7zERBPzNy1Y
aE6UiFNkQSVF04eBBw6AbVAvkMlLewbrznoHhmaWWBtNrig0AD+AnmAkvbZKzBjU
5QmAmlQgjegdWjmt1mS6/uLWXZ2vCir54LidFVeO3Tn7ZZNLoKlQfHz0lkH6NRfe
QIRnSOqwzLqWePqNGKyjn16bBSRQC8sdmLanIC7om86DDoFxu66nGvC5WbixC4La
Mdu4WqKF3yyGYmfdFsHDQF9t2D3BxThe5y+BrUv2SK6cRxcrTCWITibsDU51SBcW
hw5EYlP/HrrS702urC7IJLQ5k39KpA1S+G0KzEm50glJuW8OkbNUIDnWsRIYAEuF
/6wW35HGrGOOqYQBei81Bu5gBKDdNpsQLIuD8X++nY3WtCtXaWxsaWFtIFdvcmRz
d29ydGggKEJpbGx5KSA8d3c3MEBnbWFpbC5jb20+iL4EEwECACgFAlJ3BJYCGwMF
CThkCQAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEPQxfCZfCMOiWhYD/2Fi
CaiDG1qcNt/dOQ9uRkRuKJUX260PhWB+QzVSLy5v8HaAaefGEQhnCWu/OLSjQyku
fVTjydTqn9kNtPAumXOd20x5i3dRzvTUAEEtjVtR1wAfSdcKgpGYwakbMTGAe/QS
gfjTyV2aMfSrrSRLI4hSYqmbkk2zixmKNa4cFuzGnQH9BFJ3BJYBBADFc2tuLtBX
FgOXQgarKa3EMM3I6QdmbDde15Tr7Cm8jb+JGl5lT3gMqLb5gRkbdIrDEFblhTbG
A+ZeUyWe2bf3EeV6h7v4uYVmXDI+B4ej4qy1kyb2hezheDgkd94qG5/ccjBqT/rg
gZwGn+nSB3tOxVxJwvQUVJ2icRfaDUnqSQARAQAB/gMDAkqm8JYGqR9OYAO4FR6T
RpHtyJWgE8ycC50IdlKTJBnnj1IcSbbFZfHg/z97NIATq+UMZrd+kuvb9o4DiV5h
f4XKB+WAe8ZWT48XNyw2JZnuE65JINkpkBFgZPPBmkaR9bz3Zk94sagaVPo2z3FU
uwrr1KPcKQU/hslGyr9yu/B06UQ419ZaeXyrTUaVmi4fBDAYTT6+gdH+Ae8GCgMF
+p+AZM22vi4bSlTg88YCEZ/g5F9D0Uatz1XxpcAE88CwbWZJ2kPyVa23bQqJdttf
RDXUk3EBmO8rbvHSGaubjexCALsR7ve9qYIkUGgMo2c8akvIrNai8v/fEU+hKUbY
7MDSvfDzLziwONHo9FmZNKWaunFiN0xr6TIV//u+nPQH5FXZGVlGV+oJIRCBQNKa
yr6vUm5Y6CGDazMH9roPCFfKASyJhgXNsnWiFmd0qcR9fDoOzM/ytM7j+NFPw0io
+zvizUF/LjaUfPhh2tuIpQQYAQIADwUCUncElgIbDAUJOGQJAAAKCRD0MXwmXwjD
olo1A/9gvmuwrKuqepO9/5gRei8vM1S3JmWjtRIRaf+RL3BNVQC9YUBDQ1Q/kLTO
bgfb9tUj1ukZ/e5y5hIC0y9zKJmJ7yFPucnRwQ9fTdx3vibCm86sv9PPs2aA2SwP
puPX3hq9W6Ojdj8mG9DksKH5C9f2bCeNL8aa0gHa6ZrzMof5uQ==
=ieHK
-----END PGP PRIVATE KEY BLOCK-----
"""] } }
#===============================================================
load_keyring = (T,cb) ->
ring = new PgpKeyRing()
asp = new ASP {}
for b in data.keys.blocks
await KeyManager.import_from_armored_pgp { raw : b, asp }, defer err, km
T.no_error err
T.waypoint "imported decryption key"
await km.unlock_pgp { passphrase : data.keys.passphrase }, defer err
T.no_error err
T.waypoint "unlocked decryption key"
ring.add_key_manager km
cb ring
#===============================================================
ring = literals = null
exports.init = (T,cb) ->
await load_keyring T, defer tmp
ring = tmp
literals = [ new Literal {
data : new Buffer(data.msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
cb()
#===============================================================
# Also test various new-line scenarios.
exports.clear_sign_1 = (T,cb) -> clear_sign data.msg, T, cb
exports.clear_sign_2 = (T,cb) -> clear_sign "foo\nbar", T, cb
exports.clear_sign_3 = (T,cb) -> clear_sign "foo\nbar\n\n\n", T, cb
exports.clear_sign_4 = (T,cb) -> clear_sign "foo", T, cb
exports.clear_sign_5 = (T,cb) -> clear_sign "foo\n\n\n\nbar", T, cb
# And dash-encoding
exports.clear_sign_6 = (T,cb) -> clear_sign "-what\n-is\n---up?", T, cb
exports.clear_sign_7 = (T,cb) -> clear_sign "- what\n- is\n- up?", T, cb
exports.clear_sign_8 = (T,cb) -> clear_sign "-----------------word", T, cb
clear_sign = (msg, T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer msg, 'utf8'
await clearsign.sign { signing_key, msg }, defer err, outmsg
T.no_error err
await do_message { keyfetch : ring, armored : outmsg }, defer err, literals
T.no_error err
cb()
#===============================================================
exports.detached_sign_wholesale = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
await detachsign.sign { signing_key, data : msg }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.detached_sign_streaming = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
hash_streamer = hashmod.streamers.SHA384()
hash_streamer.update(msg)
await detachsign.sign { hash_streamer, signing_key }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.encrypt = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
T.no_error err
await burn { literals, encryption_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.sign = (T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign_armor = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, actext, ctext
T.no_error err
[err,msg] = armor.decode actext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process msg, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
| 28393 |
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../lib/keymanager'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
detachsign = require '../../lib/openpgp/detachsign'
hashmod = require '../../lib/hash'
#===============================================================================
data = {
msg : """
Season of mists and mellow fruitfulness
Close bosom-friend of the maturing sun
Conspiring with him how to load and bless
With fruit the vines that round the thatch-eaves run;
To bend with apples the moss'd cottage-trees,
And fill all fruit with ripeness to the core;
To swell the gourd, and plump the hazel shells
With a sweet kernel; to set budding more,
And still more, later flowers for the bees,
Until they think warm days will never cease,
For Summer has o'er-brimm'd their clammy cells.
Who hath not seen thee oft amid thy store?
Sometimes whoever seeks abroad may find
Thee sitting careless on a granary floor,
Thy hair soft-lifted by the winnowing wind;
Or on a half-reap'd furrow sound asleep,
Drows'd with the fume of poppies, while thy hook
Spares the next swath and all its twined flowers:
And sometimes like a gleaner thou dost keep
Steady thy laden head across a brook;
Or by a cider-press, with patient look,
Thou watchest the last oozings hours by hours.
Where are the songs of Spring? Ay, where are they?
Think not of them, thou hast thy music too,-
While barred clouds bloom the soft-dying day,
And touch the stubble-plains with rosy hue;
Then in a wailful choir the small gnats mourn
Among the river sallows, borne aloft
Or sinking as the light wind lives or dies;
And full-grown lambs loud bleat from hilly bourn;
Hedge-crickets sing; and now with treble soft
The red-breast whistles from a garden-croft;
And gathering swallows twitter in the skies.
""",
keys : {
passphrase : "<PASSWORD>",
ids : [ "<KEY>", "F4317C265F08C3A2" ],
blocks : [ """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
<KEY>
<KEY>
<KEY>
-----END PGP PRIVATE KEY BLOCK-----
""", """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
<KEY>
-----END PGP PRIVATE KEY BLOCK-----
"""] } }
#===============================================================
load_keyring = (T,cb) ->
ring = new PgpKeyRing()
asp = new ASP {}
for b in data.keys.blocks
await KeyManager.import_from_armored_pgp { raw : b, asp }, defer err, km
T.no_error err
T.waypoint "imported decryption key"
await km.unlock_pgp { passphrase : data.keys.passphrase }, defer err
T.no_error err
T.waypoint "unlocked decryption key"
ring.add_key_manager km
cb ring
#===============================================================
ring = literals = null
exports.init = (T,cb) ->
await load_keyring T, defer tmp
ring = tmp
literals = [ new Literal {
data : new Buffer(data.msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
cb()
#===============================================================
# Also test various new-line scenarios.
exports.clear_sign_1 = (T,cb) -> clear_sign data.msg, T, cb
exports.clear_sign_2 = (T,cb) -> clear_sign "foo\nbar", T, cb
exports.clear_sign_3 = (T,cb) -> clear_sign "foo\nbar\n\n\n", T, cb
exports.clear_sign_4 = (T,cb) -> clear_sign "foo", T, cb
exports.clear_sign_5 = (T,cb) -> clear_sign "foo\n\n\n\nbar", T, cb
# And dash-encoding
exports.clear_sign_6 = (T,cb) -> clear_sign "-what\n-is\n---up?", T, cb
exports.clear_sign_7 = (T,cb) -> clear_sign "- what\n- is\n- up?", T, cb
exports.clear_sign_8 = (T,cb) -> clear_sign "-----------------word", T, cb
clear_sign = (msg, T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer msg, 'utf8'
await clearsign.sign { signing_key, msg }, defer err, outmsg
T.no_error err
await do_message { keyfetch : ring, armored : outmsg }, defer err, literals
T.no_error err
cb()
#===============================================================
exports.detached_sign_wholesale = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
await detachsign.sign { signing_key, data : msg }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.detached_sign_streaming = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
hash_streamer = hashmod.streamers.SHA384()
hash_streamer.update(msg)
await detachsign.sign { hash_streamer, signing_key }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.encrypt = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
T.no_error err
await burn { literals, encryption_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.sign = (T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign_armor = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, actext, ctext
T.no_error err
[err,msg] = armor.decode actext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process msg, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
| true |
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../lib/keymanager'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
detachsign = require '../../lib/openpgp/detachsign'
hashmod = require '../../lib/hash'
#===============================================================================
data = {
msg : """
Season of mists and mellow fruitfulness
Close bosom-friend of the maturing sun
Conspiring with him how to load and bless
With fruit the vines that round the thatch-eaves run;
To bend with apples the moss'd cottage-trees,
And fill all fruit with ripeness to the core;
To swell the gourd, and plump the hazel shells
With a sweet kernel; to set budding more,
And still more, later flowers for the bees,
Until they think warm days will never cease,
For Summer has o'er-brimm'd their clammy cells.
Who hath not seen thee oft amid thy store?
Sometimes whoever seeks abroad may find
Thee sitting careless on a granary floor,
Thy hair soft-lifted by the winnowing wind;
Or on a half-reap'd furrow sound asleep,
Drows'd with the fume of poppies, while thy hook
Spares the next swath and all its twined flowers:
And sometimes like a gleaner thou dost keep
Steady thy laden head across a brook;
Or by a cider-press, with patient look,
Thou watchest the last oozings hours by hours.
Where are the songs of Spring? Ay, where are they?
Think not of them, thou hast thy music too,-
While barred clouds bloom the soft-dying day,
And touch the stubble-plains with rosy hue;
Then in a wailful choir the small gnats mourn
Among the river sallows, borne aloft
Or sinking as the light wind lives or dies;
And full-grown lambs loud bleat from hilly bourn;
Hedge-crickets sing; and now with treble soft
The red-breast whistles from a garden-croft;
And gathering swallows twitter in the skies.
""",
keys : {
passphrase : "PI:PASSWORD:<PASSWORD>END_PI",
ids : [ "PI:KEY:<KEY>END_PI", "F4317C265F08C3A2" ],
blocks : [ """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PI
-----END PGP PRIVATE KEY BLOCK-----
""", """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.14 (GNU/Linux)
PI:KEY:<KEY>END_PI
-----END PGP PRIVATE KEY BLOCK-----
"""] } }
#===============================================================
load_keyring = (T,cb) ->
ring = new PgpKeyRing()
asp = new ASP {}
for b in data.keys.blocks
await KeyManager.import_from_armored_pgp { raw : b, asp }, defer err, km
T.no_error err
T.waypoint "imported decryption key"
await km.unlock_pgp { passphrase : data.keys.passphrase }, defer err
T.no_error err
T.waypoint "unlocked decryption key"
ring.add_key_manager km
cb ring
#===============================================================
ring = literals = null
exports.init = (T,cb) ->
await load_keyring T, defer tmp
ring = tmp
literals = [ new Literal {
data : new Buffer(data.msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
cb()
#===============================================================
# Also test various new-line scenarios.
exports.clear_sign_1 = (T,cb) -> clear_sign data.msg, T, cb
exports.clear_sign_2 = (T,cb) -> clear_sign "foo\nbar", T, cb
exports.clear_sign_3 = (T,cb) -> clear_sign "foo\nbar\n\n\n", T, cb
exports.clear_sign_4 = (T,cb) -> clear_sign "foo", T, cb
exports.clear_sign_5 = (T,cb) -> clear_sign "foo\n\n\n\nbar", T, cb
# And dash-encoding
exports.clear_sign_6 = (T,cb) -> clear_sign "-what\n-is\n---up?", T, cb
exports.clear_sign_7 = (T,cb) -> clear_sign "- what\n- is\n- up?", T, cb
exports.clear_sign_8 = (T,cb) -> clear_sign "-----------------word", T, cb
clear_sign = (msg, T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer msg, 'utf8'
await clearsign.sign { signing_key, msg }, defer err, outmsg
T.no_error err
await do_message { keyfetch : ring, armored : outmsg }, defer err, literals
T.no_error err
cb()
#===============================================================
exports.detached_sign_wholesale = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
await detachsign.sign { signing_key, data : msg }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.detached_sign_streaming = (T, cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags }, defer err, signing_key
T.no_error err
msg = new Buffer data.msg, 'utf8'
hash_streamer = hashmod.streamers.SHA384()
hash_streamer.update(msg)
await detachsign.sign { hash_streamer, signing_key }, defer err, outmsg
throw err if err?
T.no_error err
await do_message { data : msg, keyfetch : ring, armored : outmsg }, defer err
throw err if err?
T.no_error err
cb()
#===============================================================
exports.encrypt = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
T.no_error err
await burn { literals, encryption_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.sign = (T,cb) ->
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, armored, ctext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext}, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
exports.encrypt_and_sign_armor = (T,cb) ->
key_id = new Buffer data.keys.ids[0], 'hex'
flags = C.openpgp.key_flags.encrypt_comm
await ring.find_best_key { key_id, flags}, defer err, encryption_key
key_id = new Buffer data.keys.ids[1], 'hex'
flags = C.openpgp.key_flags.sign_data
await ring.find_best_key { key_id, flags}, defer err, signing_key
T.no_error err
await burn { literals, encryption_key, signing_key }, defer err, actext, ctext
T.no_error err
[err,msg] = armor.decode actext
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process msg, defer err, out
T.no_error err
T.assert (out[0].get_data_signer()?), "was signed!"
T.equal data.msg, out[0].toString(), "message came back right"
cb()
#===============================================================
|
[
{
"context": ": 'It'\n\t\tgross: 327481748\n\t},\n\t{\n\t\tmovie: 'Thor: Ragnarok'\n\t\tgross: 315034967\n\t},\n\t{\n\t\tmovie: 'Despicable M",
"end": 806,
"score": 0.6389152407646179,
"start": 799,
"tag": "NAME",
"value": "agnarok"
},
{
"context": "ovie: 'Coco'\n\t\tgross: 208840284... | prototypes/barChartD3.framer/app.coffee | davo/FramerMunichMeetup | 2 | # Import D3.js
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild script
appendScript 'https://cdnjs.cloudflare.com/ajax/libs/d3/4.13.0/d3.min.js'
# Data
# Top 20 Movies in 2017 by Box Office, in millions of dollars
data = [
{
movie: 'Star Wars: The Last Jedi'
gross: 619483244
},
{
movie: 'Beauty and the Beast'
gross: 504014165
},
{
movie: 'Wonder Woman'
gross: 412563408
},
{
movie: 'Jumanji: Welcome to the Jungle'
gross: 397250264
},
{
movie: 'Guardians of the Galaxy Vol. 2'
gross: 389813101
},
{
movie: 'Spider-Man: Homecoming'
gross: 334201140
},
{
movie: 'It'
gross: 327481748
},
{
movie: 'Thor: Ragnarok'
gross: 315034967
},
{
movie: 'Despicable Me 3'
gross: 264624300
},
{
movie: 'Justice League'
gross: 229007315
},
{
movie: 'Logan'
gross: 226277068
},
{
movie: 'The Fate of the Furious'
gross: 226008385
},
{
movie: 'Coco'
gross: 208840284
},
{
movie: 'Dunkirk'
gross: 188045546
},
{
movie: 'Get Out'
gross: 176040665
},
{
movie: 'The LEGO Batman Movie'
gross: 175750384
},
{
movie: 'The Boss Baby'
gross: 175003033
},
{
movie: 'Pirates of the Caribbean: Dead Men Tell No Tales'
gross: 172558876
},
{
movie: 'Kong: Skull Island'
gross: 168052812
},
{
movie: 'The Greatest Showman'
gross: 167614558
}
]
chart.borderColor = null
barTemplate.visible = false
tag = tooltipTemplate.selectChild '.tag'
tag.visible = false
tooltipTemplate.opacity = 0
movieTitle = tooltipTemplate.selectChild 'movieTitle'
totalGross = tooltipTemplate.selectChild 'totalGross'
# Array to store the bars
bars = []
# Setting up D3.js
# 1. Scales
# We need to set up some ranges for our data.
# Using d3.scaleBand() to distribute the
# collection of movie titles between 0 and the width of our chart.
x = d3.scaleBand()
.rangeRound([0, chart.width])
# Keep working
y = d3.scaleLinear()
.range([0, chart.height])
formatNumber = d3.format('.1f')
tooltipTemplate.midX = 1
data.forEach (d, i) ->
x.domain data.map((d) -> d.movie)
y.domain [0, d3.max data, (d) -> d.gross]
bar = new SliderComponent
name: '.bar'
backgroundColor: null
parent: chart
rotation: 180
min: 0
max: chart.height
width: 20
value: 20
height: chart.height
x: x d.movie
custom: [d.movie, d.gross]
bars.push bar
bar.animate
value: y d.gross
options:
time: 0.5
delay: 0.02 * i
bar.fill.gradient = barTemplate.gradient
bar.fill.borderRadius = barTemplate.borderRadius
bar.sliderOverlay.off Events.TapStart
bar.knob.draggable = false
bar.knob.visible = false
bar.knob.name = '.'
bar.fill.name = '.'
bar.sliderOverlay.name = '.'
bar.on 'mouseover', () ->
bars.map (b) ->
b.opacity = 0.48
@.opacity = 1
number = formatNumber(bar.custom[1] / 1e6)
movieTitle.template =
movieTitle: bar.custom[0]
totalGross.template =
totalGross: "$#{ number }M"
tooltipTemplate.animate
opacity: 1
x: bar.x + tooltipTemplate.width / 2
y: bar.height - bar.value
rotationX: 0
options:
time: 0.25
bar.on 'mouseout', () ->
bars.map (b) ->
b.animate
opacity: 1
options:
time: 0.05
tooltipTemplate.animate
opacity: 0
y: tooltipTemplate.y + 12
options:
time: 0.25
| 10006 | # Import D3.js
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild script
appendScript 'https://cdnjs.cloudflare.com/ajax/libs/d3/4.13.0/d3.min.js'
# Data
# Top 20 Movies in 2017 by Box Office, in millions of dollars
data = [
{
movie: 'Star Wars: The Last Jedi'
gross: 619483244
},
{
movie: 'Beauty and the Beast'
gross: 504014165
},
{
movie: 'Wonder Woman'
gross: 412563408
},
{
movie: 'Jumanji: Welcome to the Jungle'
gross: 397250264
},
{
movie: 'Guardians of the Galaxy Vol. 2'
gross: 389813101
},
{
movie: 'Spider-Man: Homecoming'
gross: 334201140
},
{
movie: 'It'
gross: 327481748
},
{
movie: 'Thor: R<NAME>'
gross: 315034967
},
{
movie: 'Despicable Me 3'
gross: 264624300
},
{
movie: 'Justice League'
gross: 229007315
},
{
movie: 'Logan'
gross: 226277068
},
{
movie: 'The Fate of the Furious'
gross: 226008385
},
{
movie: 'Coco'
gross: 208840284
},
{
movie: 'D<NAME>irk'
gross: 188045546
},
{
movie: 'Get Out'
gross: 176040665
},
{
movie: 'The LEGO Batman Movie'
gross: 175750384
},
{
movie: 'The Boss Baby'
gross: 175003033
},
{
movie: 'Pirates of the Caribbean: Dead Men Tell No Tales'
gross: 172558876
},
{
movie: 'Kong: Skull Island'
gross: 168052812
},
{
movie: 'The Greatest Showman'
gross: 167614558
}
]
chart.borderColor = null
barTemplate.visible = false
tag = tooltipTemplate.selectChild '.tag'
tag.visible = false
tooltipTemplate.opacity = 0
movieTitle = tooltipTemplate.selectChild 'movieTitle'
totalGross = tooltipTemplate.selectChild 'totalGross'
# Array to store the bars
bars = []
# Setting up D3.js
# 1. Scales
# We need to set up some ranges for our data.
# Using d3.scaleBand() to distribute the
# collection of movie titles between 0 and the width of our chart.
x = d3.scaleBand()
.rangeRound([0, chart.width])
# Keep working
y = d3.scaleLinear()
.range([0, chart.height])
formatNumber = d3.format('.1f')
tooltipTemplate.midX = 1
data.forEach (d, i) ->
x.domain data.map((d) -> d.movie)
y.domain [0, d3.max data, (d) -> d.gross]
bar = new SliderComponent
name: '.bar'
backgroundColor: null
parent: chart
rotation: 180
min: 0
max: chart.height
width: 20
value: 20
height: chart.height
x: x d.movie
custom: [d.movie, d.gross]
bars.push bar
bar.animate
value: y d.gross
options:
time: 0.5
delay: 0.02 * i
bar.fill.gradient = barTemplate.gradient
bar.fill.borderRadius = barTemplate.borderRadius
bar.sliderOverlay.off Events.TapStart
bar.knob.draggable = false
bar.knob.visible = false
bar.knob.name = '.'
bar.fill.name = '.'
bar.sliderOverlay.name = '.'
bar.on 'mouseover', () ->
bars.map (b) ->
b.opacity = 0.48
@.opacity = 1
number = formatNumber(bar.custom[1] / 1e6)
movieTitle.template =
movieTitle: bar.custom[0]
totalGross.template =
totalGross: "$#{ number }M"
tooltipTemplate.animate
opacity: 1
x: bar.x + tooltipTemplate.width / 2
y: bar.height - bar.value
rotationX: 0
options:
time: 0.25
bar.on 'mouseout', () ->
bars.map (b) ->
b.animate
opacity: 1
options:
time: 0.05
tooltipTemplate.animate
opacity: 0
y: tooltipTemplate.y + 12
options:
time: 0.25
| true | # Import D3.js
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild script
appendScript 'https://cdnjs.cloudflare.com/ajax/libs/d3/4.13.0/d3.min.js'
# Data
# Top 20 Movies in 2017 by Box Office, in millions of dollars
data = [
{
movie: 'Star Wars: The Last Jedi'
gross: 619483244
},
{
movie: 'Beauty and the Beast'
gross: 504014165
},
{
movie: 'Wonder Woman'
gross: 412563408
},
{
movie: 'Jumanji: Welcome to the Jungle'
gross: 397250264
},
{
movie: 'Guardians of the Galaxy Vol. 2'
gross: 389813101
},
{
movie: 'Spider-Man: Homecoming'
gross: 334201140
},
{
movie: 'It'
gross: 327481748
},
{
movie: 'Thor: RPI:NAME:<NAME>END_PI'
gross: 315034967
},
{
movie: 'Despicable Me 3'
gross: 264624300
},
{
movie: 'Justice League'
gross: 229007315
},
{
movie: 'Logan'
gross: 226277068
},
{
movie: 'The Fate of the Furious'
gross: 226008385
},
{
movie: 'Coco'
gross: 208840284
},
{
movie: 'DPI:NAME:<NAME>END_PIirk'
gross: 188045546
},
{
movie: 'Get Out'
gross: 176040665
},
{
movie: 'The LEGO Batman Movie'
gross: 175750384
},
{
movie: 'The Boss Baby'
gross: 175003033
},
{
movie: 'Pirates of the Caribbean: Dead Men Tell No Tales'
gross: 172558876
},
{
movie: 'Kong: Skull Island'
gross: 168052812
},
{
movie: 'The Greatest Showman'
gross: 167614558
}
]
chart.borderColor = null
barTemplate.visible = false
tag = tooltipTemplate.selectChild '.tag'
tag.visible = false
tooltipTemplate.opacity = 0
movieTitle = tooltipTemplate.selectChild 'movieTitle'
totalGross = tooltipTemplate.selectChild 'totalGross'
# Array to store the bars
bars = []
# Setting up D3.js
# 1. Scales
# We need to set up some ranges for our data.
# Using d3.scaleBand() to distribute the
# collection of movie titles between 0 and the width of our chart.
x = d3.scaleBand()
.rangeRound([0, chart.width])
# Keep working
y = d3.scaleLinear()
.range([0, chart.height])
formatNumber = d3.format('.1f')
tooltipTemplate.midX = 1
data.forEach (d, i) ->
x.domain data.map((d) -> d.movie)
y.domain [0, d3.max data, (d) -> d.gross]
bar = new SliderComponent
name: '.bar'
backgroundColor: null
parent: chart
rotation: 180
min: 0
max: chart.height
width: 20
value: 20
height: chart.height
x: x d.movie
custom: [d.movie, d.gross]
bars.push bar
bar.animate
value: y d.gross
options:
time: 0.5
delay: 0.02 * i
bar.fill.gradient = barTemplate.gradient
bar.fill.borderRadius = barTemplate.borderRadius
bar.sliderOverlay.off Events.TapStart
bar.knob.draggable = false
bar.knob.visible = false
bar.knob.name = '.'
bar.fill.name = '.'
bar.sliderOverlay.name = '.'
bar.on 'mouseover', () ->
bars.map (b) ->
b.opacity = 0.48
@.opacity = 1
number = formatNumber(bar.custom[1] / 1e6)
movieTitle.template =
movieTitle: bar.custom[0]
totalGross.template =
totalGross: "$#{ number }M"
tooltipTemplate.animate
opacity: 1
x: bar.x + tooltipTemplate.width / 2
y: bar.height - bar.value
rotationX: 0
options:
time: 0.25
bar.on 'mouseout', () ->
bars.map (b) ->
b.animate
opacity: 1
options:
time: 0.05
tooltipTemplate.animate
opacity: 0
y: tooltipTemplate.y + 12
options:
time: 0.25
|
[
{
"context": "###\n backbone-orm.js 0.5.12\n Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm\n Lice",
"end": 58,
"score": 0.9987055063247681,
"start": 50,
"tag": "NAME",
"value": "Vidigami"
},
{
"context": " Copyright (c) 2013 Vidigami - https://github.com/v... | src/cache/cursor.coffee | michaelBenin/backbone-orm | 1 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 Vidigami - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
# @private
module.exports = class CacheCursor extends require('../cursor')
toJSON: (callback) -> @wrapped_sync_fn('cursor', _.extend({}, @_find, @_cursor)).toJSON callback
| 171348 | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 <NAME> - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
# @private
module.exports = class CacheCursor extends require('../cursor')
toJSON: (callback) -> @wrapped_sync_fn('cursor', _.extend({}, @_find, @_cursor)).toJSON callback
| true | ###
backbone-orm.js 0.5.12
Copyright (c) 2013 PI:NAME:<NAME>END_PI - https://github.com/vidigami/backbone-orm
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Dependencies: Backbone.js, Underscore.js, and Moment.js.
###
_ = require 'underscore'
# @private
module.exports = class CacheCursor extends require('../cursor')
toJSON: (callback) -> @wrapped_sync_fn('cursor', _.extend({}, @_find, @_cursor)).toJSON callback
|
[
{
"context": "in@HADOOP.RYBA'\n kadmin_password: 'test'\n kdc: ['a.fqdn']\n ",
"end": 1073,
"score": 0.9994955062866211,
"start": 1069,
"tag": "PASSWORD",
"value": "test"
},
{
"context": "\n krb5: user:\n password: ... | packages/metal/hadoop/yarn_ts/test.coffee | ryba-io/ryba | 24 |
normalize = require 'masson/lib/config/normalize'
store = require 'masson/lib/config/store'
nikita = require 'nikita'
fs = require 'fs'
describe 'hadoop.yarn_nm', ->
tmp = '/tmp/masson-test'
beforeEach ->
require('module')._cache = {}
nikita
.system.mkdir target: tmp
.promise()
afterEach ->
nikita
.system.remove tmp
.promise()
it 'validate heapsize and newsize', ->
services = []
store normalize
clusters: 'ryba': services:
'java':
module: 'masson/commons/java'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'cgroups':
module: 'masson/core/cgroups'
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'krb5_client':
module: 'masson/core/krb5_client'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
admin:
'HADOOP.RYBA':
kadmin_principal: 'admin/admin@HADOOP.RYBA'
kadmin_password: 'test'
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
principals: []
etc_krb5_conf:
libdefaults: 'default_realm': 'HADOOP.RYBA'
realms:
'HADOOP.RYBA':
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
'test_user':
module: '@rybajs/metal/commons/test_user'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
krb5: user:
password: 'test123'
password_sync: true
'zookeeper':
module: '@rybajs/metal/zookeeper/server'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'core':
module: "@rybajs/metal/hadoop/core"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options: hdfs:
user: {}
group: {}
krb5_user:
password: 'test123'
'namenode':
module: "@rybajs/metal/hadoop/hdfs_nn"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
options:
nameservice: 'rybak', hdfs_site: {}
hdfs: user: {}, group: {}, krb5_user: password: 'test123'
'journalnode':
module: '@rybajs/metal/hadoop/hdfs_jn'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
hdfs_site: 'dfs.journalnode.edits.dir': '/var/hdfs/jn'
'datanode':
module: "@rybajs/metal/hadoop/hdfs_dn"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'timelineserver':
module: "@rybajs/metal/hadoop/yarn_ts"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
options:
heapsize: '1024m'
newsize: '200m'
'historyserver':
module: "@rybajs/metal/hadoop/mapred_jhs"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
'resourcemanager':
module: "@rybajs/metal/hadoop/yarn_rm"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
'nodemanager':
module: "@rybajs/metal/hadoop/yarn_nm"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
nodes:
'a.fqdn': ip: '10.10.10.1'
'b.fqdn': ip: '10.10.10.2'
'c.fqdn': ip: '10.10.10.3'
.chain()
.service 'ryba', "timelineserver", (service) ->
service.options.heapsize.should.match /([0-9]*)([mMgGkK])/
service.options.newsize.should.match /([0-9]*)([mMgGkK])/
| 158487 |
normalize = require 'masson/lib/config/normalize'
store = require 'masson/lib/config/store'
nikita = require 'nikita'
fs = require 'fs'
describe 'hadoop.yarn_nm', ->
tmp = '/tmp/masson-test'
beforeEach ->
require('module')._cache = {}
nikita
.system.mkdir target: tmp
.promise()
afterEach ->
nikita
.system.remove tmp
.promise()
it 'validate heapsize and newsize', ->
services = []
store normalize
clusters: 'ryba': services:
'java':
module: 'masson/commons/java'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'cgroups':
module: 'masson/core/cgroups'
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'krb5_client':
module: 'masson/core/krb5_client'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
admin:
'HADOOP.RYBA':
kadmin_principal: 'admin/admin@HADOOP.RYBA'
kadmin_password: '<PASSWORD>'
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
principals: []
etc_krb5_conf:
libdefaults: 'default_realm': 'HADOOP.RYBA'
realms:
'HADOOP.RYBA':
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
'test_user':
module: '@rybajs/metal/commons/test_user'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
krb5: user:
password: '<PASSWORD>'
password_sync: true
'zookeeper':
module: '@rybajs/metal/zookeeper/server'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'core':
module: "@rybajs/metal/hadoop/core"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options: hdfs:
user: {}
group: {}
krb5_user:
password: '<PASSWORD>'
'namenode':
module: "@rybajs/metal/hadoop/hdfs_nn"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
options:
nameservice: 'rybak', hdfs_site: {}
hdfs: user: {}, group: {}, krb5_user: password: '<PASSWORD>'
'journalnode':
module: '@rybajs/metal/hadoop/hdfs_jn'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
hdfs_site: 'dfs.journalnode.edits.dir': '/var/hdfs/jn'
'datanode':
module: "@rybajs/metal/hadoop/hdfs_dn"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'timelineserver':
module: "@rybajs/metal/hadoop/yarn_ts"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
options:
heapsize: '1024m'
newsize: '200m'
'historyserver':
module: "@rybajs/metal/hadoop/mapred_jhs"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
'resourcemanager':
module: "@rybajs/metal/hadoop/yarn_rm"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
'nodemanager':
module: "@rybajs/metal/hadoop/yarn_nm"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
nodes:
'a.fqdn': ip: '10.10.10.1'
'b.fqdn': ip: '10.10.10.2'
'c.fqdn': ip: '10.10.10.3'
.chain()
.service 'ryba', "timelineserver", (service) ->
service.options.heapsize.should.match /([0-9]*)([mMgGkK])/
service.options.newsize.should.match /([0-9]*)([mMgGkK])/
| true |
normalize = require 'masson/lib/config/normalize'
store = require 'masson/lib/config/store'
nikita = require 'nikita'
fs = require 'fs'
describe 'hadoop.yarn_nm', ->
tmp = '/tmp/masson-test'
beforeEach ->
require('module')._cache = {}
nikita
.system.mkdir target: tmp
.promise()
afterEach ->
nikita
.system.remove tmp
.promise()
it 'validate heapsize and newsize', ->
services = []
store normalize
clusters: 'ryba': services:
'java':
module: 'masson/commons/java'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'cgroups':
module: 'masson/core/cgroups'
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'krb5_client':
module: 'masson/core/krb5_client'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
admin:
'HADOOP.RYBA':
kadmin_principal: 'admin/admin@HADOOP.RYBA'
kadmin_password: 'PI:PASSWORD:<PASSWORD>END_PI'
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
principals: []
etc_krb5_conf:
libdefaults: 'default_realm': 'HADOOP.RYBA'
realms:
'HADOOP.RYBA':
kdc: ['a.fqdn']
admin_server: ['a.fqdn']
kpasswd_server: 'a.fqdn'
'test_user':
module: '@rybajs/metal/commons/test_user'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
krb5: user:
password: 'PI:PASSWORD:<PASSWORD>END_PI'
password_sync: true
'zookeeper':
module: '@rybajs/metal/zookeeper/server'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
'core':
module: "@rybajs/metal/hadoop/core"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options: hdfs:
user: {}
group: {}
krb5_user:
password: 'PI:PASSWORD:<PASSWORD>END_PI'
'namenode':
module: "@rybajs/metal/hadoop/hdfs_nn"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
options:
nameservice: 'rybak', hdfs_site: {}
hdfs: user: {}, group: {}, krb5_user: password: 'PI:PASSWORD:<PASSWORD>END_PI'
'journalnode':
module: '@rybajs/metal/hadoop/hdfs_jn'
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn', 'c.fqdn']
options:
hdfs_site: 'dfs.journalnode.edits.dir': '/var/hdfs/jn'
'datanode':
module: "@rybajs/metal/hadoop/hdfs_dn"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
'timelineserver':
module: "@rybajs/metal/hadoop/yarn_ts"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
options:
heapsize: '1024m'
newsize: '200m'
'historyserver':
module: "@rybajs/metal/hadoop/mapred_jhs"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn']
'resourcemanager':
module: "@rybajs/metal/hadoop/yarn_rm"
affinity: type: 'nodes', match: 'any', values: ['a.fqdn', 'b.fqdn']
'nodemanager':
module: "@rybajs/metal/hadoop/yarn_nm"
affinity: type: 'nodes', match: 'any', values: ['c.fqdn']
nodes:
'a.fqdn': ip: '10.10.10.1'
'b.fqdn': ip: '10.10.10.2'
'c.fqdn': ip: '10.10.10.3'
.chain()
.service 'ryba', "timelineserver", (service) ->
service.options.heapsize.should.match /([0-9]*)([mMgGkK])/
service.options.newsize.should.match /([0-9]*)([mMgGkK])/
|
[
{
"context": "',\n user: 'username',\n passwor",
"end": 852,
"score": 0.6913262605667114,
"start": 844,
"tag": "USERNAME",
"value": "username"
},
{
"context": " passwor... | spec/adapters/mysql_spec.coffee | monokrome/modelingclay | 0 | MySqlAdapter = require('../../lib/adapters/mysql').MySqlAdapter
Query = require('../../lib/query').Query
fields = require('../../lib/fields')
mysql = require 'mysql'
mysqlMOCK = {
createClient: ->
return {
end: ->
return true
execute: (sql, params, callback) ->
# lol do nothing.
}
}
describe 'MySqlAdapter', ->
describe '#connect', ->
it 'should create a client', ->
spyOn(mysqlMOCK, 'createClient').andCallThrough()
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
expect(mysqlMOCK.createClient).toHaveBeenCalledWith({
host: 'hostname',
user: 'username',
password: 'password',
})
expect(adapter.client).toBeDefined()
describe '#disconnect', ->
it 'should call through to mysql lib', ->
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
spyOn(adapter.client, 'end')
adapter.disconnect()
expect(adapter.client.end).toHaveBeenCalled()
describe '#query', ->
it 'should return a new instance of Query', ->
adapter = new MySqlAdapter()
expect(adapter.query()).toBeInstanceOf(Query)
describe '#execute', ->
it 'should exec the query', ->
spyOn(mysql.Client.prototype, '_connect');
spyOn(mysql.Client.prototype, 'query').andCallThrough()
adapter = new MySqlAdapter(mysql)
adapter.connect('hostname', 'username', 'password', 'database')
testCallback = ->
adapter.execute('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
expect(mysql.Client.prototype.query).toHaveBeenCalledWith('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
describe '#fieldToSql', ->
it 'should use AUTO_INCREMENT with auto integer fields', ->
adapter = new MySqlAdapter(mysql)
autoInt = new fields.AutoIntegerField()
result = adapter.fieldToSql(autoInt)
expect(result).toEqual("`id` int(11) NOT NULL AUTO_INCREMENT")
| 200704 | MySqlAdapter = require('../../lib/adapters/mysql').MySqlAdapter
Query = require('../../lib/query').Query
fields = require('../../lib/fields')
mysql = require 'mysql'
mysqlMOCK = {
createClient: ->
return {
end: ->
return true
execute: (sql, params, callback) ->
# lol do nothing.
}
}
describe 'MySqlAdapter', ->
describe '#connect', ->
it 'should create a client', ->
spyOn(mysqlMOCK, 'createClient').andCallThrough()
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
expect(mysqlMOCK.createClient).toHaveBeenCalledWith({
host: 'hostname',
user: 'username',
password: '<PASSWORD>',
})
expect(adapter.client).toBeDefined()
describe '#disconnect', ->
it 'should call through to mysql lib', ->
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
spyOn(adapter.client, 'end')
adapter.disconnect()
expect(adapter.client.end).toHaveBeenCalled()
describe '#query', ->
it 'should return a new instance of Query', ->
adapter = new MySqlAdapter()
expect(adapter.query()).toBeInstanceOf(Query)
describe '#execute', ->
it 'should exec the query', ->
spyOn(mysql.Client.prototype, '_connect');
spyOn(mysql.Client.prototype, 'query').andCallThrough()
adapter = new MySqlAdapter(mysql)
adapter.connect('hostname', 'username', 'password', 'database')
testCallback = ->
adapter.execute('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
expect(mysql.Client.prototype.query).toHaveBeenCalledWith('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
describe '#fieldToSql', ->
it 'should use AUTO_INCREMENT with auto integer fields', ->
adapter = new MySqlAdapter(mysql)
autoInt = new fields.AutoIntegerField()
result = adapter.fieldToSql(autoInt)
expect(result).toEqual("`id` int(11) NOT NULL AUTO_INCREMENT")
| true | MySqlAdapter = require('../../lib/adapters/mysql').MySqlAdapter
Query = require('../../lib/query').Query
fields = require('../../lib/fields')
mysql = require 'mysql'
mysqlMOCK = {
createClient: ->
return {
end: ->
return true
execute: (sql, params, callback) ->
# lol do nothing.
}
}
describe 'MySqlAdapter', ->
describe '#connect', ->
it 'should create a client', ->
spyOn(mysqlMOCK, 'createClient').andCallThrough()
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
expect(mysqlMOCK.createClient).toHaveBeenCalledWith({
host: 'hostname',
user: 'username',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
})
expect(adapter.client).toBeDefined()
describe '#disconnect', ->
it 'should call through to mysql lib', ->
adapter = new MySqlAdapter(mysqlMOCK)
adapter.connect('hostname', 'username', 'password', 'database')
spyOn(adapter.client, 'end')
adapter.disconnect()
expect(adapter.client.end).toHaveBeenCalled()
describe '#query', ->
it 'should return a new instance of Query', ->
adapter = new MySqlAdapter()
expect(adapter.query()).toBeInstanceOf(Query)
describe '#execute', ->
it 'should exec the query', ->
spyOn(mysql.Client.prototype, '_connect');
spyOn(mysql.Client.prototype, 'query').andCallThrough()
adapter = new MySqlAdapter(mysql)
adapter.connect('hostname', 'username', 'password', 'database')
testCallback = ->
adapter.execute('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
expect(mysql.Client.prototype.query).toHaveBeenCalledWith('SELECT * FROM foo WHERE(x = ?)', [1], testCallback)
describe '#fieldToSql', ->
it 'should use AUTO_INCREMENT with auto integer fields', ->
adapter = new MySqlAdapter(mysql)
autoInt = new fields.AutoIntegerField()
result = adapter.fieldToSql(autoInt)
expect(result).toEqual("`id` int(11) NOT NULL AUTO_INCREMENT")
|
[
{
"context": "@getAvatarUrlFromUsername = (username) ->\n\tkey = \"avatar_random_#{username}\"\n\trandom = Session?.keys[key] or 0\n\tif not usernam",
"end": 76,
"score": 0.9973134398460388,
"start": 50,
"tag": "KEY",
"value": "avatar_random_#{username}\""
}
] | packages/rocketchat-ui/lib/getAvatarUrlFromUsername.coffee | amaapp/ama | 0 | @getAvatarUrlFromUsername = (username) ->
key = "avatar_random_#{username}"
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl()
else
path = '/'
"#{path}avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
| 19761 | @getAvatarUrlFromUsername = (username) ->
key = "<KEY>
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl()
else
path = '/'
"#{path}avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
| true | @getAvatarUrlFromUsername = (username) ->
key = "PI:KEY:<KEY>END_PI
random = Session?.keys[key] or 0
if not username?
return
if Meteor.isCordova
path = Meteor.absoluteUrl()
else
path = '/'
"#{path}avatar/#{encodeURIComponent(username)}.jpg?_dc=#{random}"
|
[
{
"context": " The MIT License (MIT)\r\n\r\n Copyright (c) 2014 SSSamuel\r\n\r\n Permission is hereby granted, free of char",
"end": 106,
"score": 0.9988061785697937,
"start": 98,
"tag": "USERNAME",
"value": "SSSamuel"
},
{
"context": "ll\r\n kill ninja!\r\n\r\n @ 2... | src/jQuery.ninjakey.coffee | samuel1112/jQuery.Ninjakey | 0 | ###
A Simple jQuery shortcuts library
The MIT License (MIT)
Copyright (c) 2014 SSSamuel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
- NinjaKey->constructor
@param
:key shortcuts String/Array
now support:
a-z,A-Z,0-9
special keys:
alt,ctrl/command,shift
home,left,up,right,down,enter,esc,space,backspace,del
example:
ctrl+alt+a, ctrl+enter, Ctrl+Shift+A
!! NOT SUPPORT
:selector String
support jquery selector string
:callback
selector will build a jQuery object,which will be callback `this`
@ return
obj -> NinjaKey
- NinjaKey::kill
kill ninja!
@ 2014-05-16
@ samuel - samuel.m.shen@gmail.com
@ version 0.0.1
###
$ = jQuery
NinjaKey_id_pool = {}
macCheck = (->
return {} unless navigator?
if /Mac/.test navigator.appVersion
mac: true
else
{}
)()
class NinjaKey
idGenerator = ->
loop
_r = 'ninja' + 'xxxxxxxxx'.replace(/[x]/g, ->
num = Math.random() * 16 | 0;
num.toString(16);
);
break if not NinjaKey_id_pool.hasOwnProperty(_r)
_r
parseKey = (keys)->
keycodeMap =
'HOME': 36
'LEFT': 37
'UP': 38
'RIGHT': 39
'DOWN': 40
'ENTER': 13
'ESC': 27
'SPACE': 32
'BACKSPACE': 8
'DEL': 46
conditions = []
for key in keys
if key
key = key.split('+')
key_condition = []
fnKey = key
fnKeyStatus = [0, 0, 0]
for _key in fnKey
_key = _key.toLowerCase()
if _key is 'ctrl' or _key is 'command' or _key is 'cmd'
fnKeyStatus[0] = 1
else if _key is 'alt'
fnKeyStatus[1] = 1
else if _key is 'shift'
fnKeyStatus[2] = 1
else
_key = _key.toUpperCase()
if not keycodeMap.hasOwnProperty(_key)
key_condition.push('e.which===' + _key.charCodeAt(0))
else
key_condition.push('e.which===' + keycodeMap[_key])
# use util function to check os
# on mac command is normal use as command+c mean 'copy'
# command is a metaKey
if not macCheck.mac
if fnKeyStatus[0] is 1
key_condition.push('e.ctrlKey')
else key_condition.push('!e.ctrlKey')
else
if fnKeyStatus[0] is 1
key_condition.push('e.metaKey')
else key_condition.push('!e.metaKey')
if fnKeyStatus[1] is 1
key_condition.push('e.altKey')
else key_condition.push('!e.altKey')
if fnKeyStatus[2] is 1
key_condition.push('e.shiftKey')
else key_condition.push('!e.shiftKey')
conditions.push('(' + key_condition.join(' && ') + ')')
gFunc = 'return ' + (conditions.join(' || ') or false) + ';';
new Function('e', gFunc)
constructor: (key, elem, callback)->
key = if {}.toString.call(key) is '[object Array]' then key else [key]
@__id__ = idGenerator()
_passion = parseKey(key)
if typeof elem isnt 'string'
callback = elem
elem = document
$(document).on('keydown.' + @__id__, (e)->
if _passion(e)
if callback?
callback.call($(elem))
false
)
kill: ()->
$(document).off('keydown.' + @__id__)
window.NinjaKey = (key, elem, callback)->
new NinjaKey(key, elem, callback) | 141836 | ###
A Simple jQuery shortcuts library
The MIT License (MIT)
Copyright (c) 2014 SSSamuel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
- NinjaKey->constructor
@param
:key shortcuts String/Array
now support:
a-z,A-Z,0-9
special keys:
alt,ctrl/command,shift
home,left,up,right,down,enter,esc,space,backspace,del
example:
ctrl+alt+a, ctrl+enter, Ctrl+Shift+A
!! NOT SUPPORT
:selector String
support jquery selector string
:callback
selector will build a jQuery object,which will be callback `this`
@ return
obj -> NinjaKey
- NinjaKey::kill
kill ninja!
@ 2014-05-16
@ samuel - <EMAIL>
@ version 0.0.1
###
$ = jQuery
NinjaKey_id_pool = {}
macCheck = (->
return {} unless navigator?
if /Mac/.test navigator.appVersion
mac: true
else
{}
)()
class NinjaKey
idGenerator = ->
loop
_r = 'ninja' + 'xxxxxxxxx'.replace(/[x]/g, ->
num = Math.random() * 16 | 0;
num.toString(16);
);
break if not NinjaKey_id_pool.hasOwnProperty(_r)
_r
parseKey = (keys)->
keycodeMap =
'HOME': 36
'LEFT': 37
'UP': 38
'RIGHT': 39
'DOWN': 40
'ENTER': 13
'ESC': 27
'SPACE': 32
'BACKSPACE': 8
'DEL': 46
conditions = []
for key in keys
if key
key = key.split('+')
key_condition = []
fnKey = key
fnKeyStatus = [0, 0, 0]
for _key in fnKey
_key = _key.toLowerCase()
if _key is 'ctrl' or _key is 'command' or _key is 'cmd'
fnKeyStatus[0] = 1
else if _key is 'alt'
fnKeyStatus[1] = 1
else if _key is 'shift'
fnKeyStatus[2] = 1
else
_key = _key.toUpperCase()
if not keycodeMap.hasOwnProperty(_key)
key_condition.push('e.which===' + _key.charCodeAt(0))
else
key_condition.push('e.which===' + keycodeMap[_key])
# use util function to check os
# on mac command is normal use as command+c mean 'copy'
# command is a metaKey
if not macCheck.mac
if fnKeyStatus[0] is 1
key_condition.push('e.ctrlKey')
else key_condition.push('!e.ctrlKey')
else
if fnKeyStatus[0] is 1
key_condition.push('e.metaKey')
else key_condition.push('!e.metaKey')
if fnKeyStatus[1] is 1
key_condition.push('e.altKey')
else key_condition.push('!e.altKey')
if fnKeyStatus[2] is 1
key_condition.push('e.shiftKey')
else key_condition.push('!e.shiftKey')
conditions.push('(' + key_condition.join(' && ') + ')')
gFunc = 'return ' + (conditions.join(' || ') or false) + ';';
new Function('e', gFunc)
constructor: (key, elem, callback)->
key = if {}.toString.call(key) is '[object Array]' then key else [key]
@__id__ = idGenerator()
_passion = parseKey(key)
if typeof elem isnt 'string'
callback = elem
elem = document
$(document).on('keydown.' + @__id__, (e)->
if _passion(e)
if callback?
callback.call($(elem))
false
)
kill: ()->
$(document).off('keydown.' + @__id__)
window.NinjaKey = (key, elem, callback)->
new NinjaKey(key, elem, callback) | true | ###
A Simple jQuery shortcuts library
The MIT License (MIT)
Copyright (c) 2014 SSSamuel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
- NinjaKey->constructor
@param
:key shortcuts String/Array
now support:
a-z,A-Z,0-9
special keys:
alt,ctrl/command,shift
home,left,up,right,down,enter,esc,space,backspace,del
example:
ctrl+alt+a, ctrl+enter, Ctrl+Shift+A
!! NOT SUPPORT
:selector String
support jquery selector string
:callback
selector will build a jQuery object,which will be callback `this`
@ return
obj -> NinjaKey
- NinjaKey::kill
kill ninja!
@ 2014-05-16
@ samuel - PI:EMAIL:<EMAIL>END_PI
@ version 0.0.1
###
$ = jQuery
NinjaKey_id_pool = {}
macCheck = (->
return {} unless navigator?
if /Mac/.test navigator.appVersion
mac: true
else
{}
)()
class NinjaKey
idGenerator = ->
loop
_r = 'ninja' + 'xxxxxxxxx'.replace(/[x]/g, ->
num = Math.random() * 16 | 0;
num.toString(16);
);
break if not NinjaKey_id_pool.hasOwnProperty(_r)
_r
parseKey = (keys)->
keycodeMap =
'HOME': 36
'LEFT': 37
'UP': 38
'RIGHT': 39
'DOWN': 40
'ENTER': 13
'ESC': 27
'SPACE': 32
'BACKSPACE': 8
'DEL': 46
conditions = []
for key in keys
if key
key = key.split('+')
key_condition = []
fnKey = key
fnKeyStatus = [0, 0, 0]
for _key in fnKey
_key = _key.toLowerCase()
if _key is 'ctrl' or _key is 'command' or _key is 'cmd'
fnKeyStatus[0] = 1
else if _key is 'alt'
fnKeyStatus[1] = 1
else if _key is 'shift'
fnKeyStatus[2] = 1
else
_key = _key.toUpperCase()
if not keycodeMap.hasOwnProperty(_key)
key_condition.push('e.which===' + _key.charCodeAt(0))
else
key_condition.push('e.which===' + keycodeMap[_key])
# use util function to check os
# on mac command is normal use as command+c mean 'copy'
# command is a metaKey
if not macCheck.mac
if fnKeyStatus[0] is 1
key_condition.push('e.ctrlKey')
else key_condition.push('!e.ctrlKey')
else
if fnKeyStatus[0] is 1
key_condition.push('e.metaKey')
else key_condition.push('!e.metaKey')
if fnKeyStatus[1] is 1
key_condition.push('e.altKey')
else key_condition.push('!e.altKey')
if fnKeyStatus[2] is 1
key_condition.push('e.shiftKey')
else key_condition.push('!e.shiftKey')
conditions.push('(' + key_condition.join(' && ') + ')')
gFunc = 'return ' + (conditions.join(' || ') or false) + ';';
new Function('e', gFunc)
constructor: (key, elem, callback)->
key = if {}.toString.call(key) is '[object Array]' then key else [key]
@__id__ = idGenerator()
_passion = parseKey(key)
if typeof elem isnt 'string'
callback = elem
elem = document
$(document).on('keydown.' + @__id__, (e)->
if _passion(e)
if callback?
callback.call($(elem))
false
)
kill: ()->
$(document).off('keydown.' + @__id__)
window.NinjaKey = (key, elem, callback)->
new NinjaKey(key, elem, callback) |
[
{
"context": "be der Kleidung des Zauberers\"\n new_password: \"Neues Passwort\"\n new_password_verify: \"Verifizieren\"\n emai",
"end": 4182,
"score": 0.9986779093742371,
"start": 4168,
"tag": "PASSWORD",
"value": "Neues Passwort"
},
{
"context": "sword: \"Neues Passwort\"\... | app/locale/de.coffee | cochee/codecombat | 1 | module.exports = nativeDescription: "Deutsch", englishDescription: "German", translation:
common:
loading: "Laden..."
modal:
close: "Schliessen"
okay: "Okay"
not_found:
page_not_found: "Seite nicht gefunden"
nav:
sign_up: "Anmelden"
log_in: "Einloggen"
log_out: "Ausloggen"
play: "Start"
editor: "Editor"
blog: "Blog"
forum: "Forum"
admin: "Admin"
home: "Home"
contribute: "Helfen"
legal: "Rechtliches"
about: "Über"
contact: "Kontakt"
twitter_follow: "Twitter"
forms:
name: "Name"
email: "Email"
message: "Nachricht"
cancel: "Abbrechen"
login:
log_in: "Einloggen"
sign_up: "Neuen Account anlegen"
or: ", oder "
recover: "Account wiederherstellen"
signup:
description: "Es ist kostenlos. Nur noch ein paar Dinge, dann kannst Du loslegen."
email_announcements: "Erhalte Benachrichtigungen per Email"
coppa: "Älter als 13 oder nicht in den USA"
coppa_why: "(Warum?)"
creating: "Erzeuge Account..."
sign_up: "Neuen Account anlegen"
or: "oder "
log_in: "mit Passwort einloggen"
home:
slogan: "Lern spielend JavaScript"
no_ie: "CodeCombat läuft nicht im Internet Explorer 9 oder älteren Browsern. Tut uns Leid!"
no_mobile: "CodeCombat ist nicht für Mobilgeräte optimiert und funktioniert möglicherweise nicht."
play: "Play"
play:
choose_your_level: "Wähle Deinen Level"
adventurer_prefix: "Du kannst zu jedem Level springen, oder die Levels diskutieren "
adventurer_forum: "im Abenteurerforum"
adventurer_suffix: "."
campaign_beginner: "Anfängerkampagne"
campaign_beginner_description: "... in der Du die Zauberei der Programmierung lernst."
campaign_dev: "Beliebiges schwierigeres Level"
campaign_dev_description: "... in welchem Du die Bedienung erlernst, indem Du etwas schwierigeres machst."
campaign_multiplayer: "Multiplayerarena"
campaign_multiplayer_description: "... in der Du in gegen einen anderen Spieler programmierst."
campaign_player_created: "Von Spielern erstellt"
campaign_player_created_description: "... in welchem du gegen die Kreativität eines <a href=\"/contribute#artisan\">Artisan Zauberers</a> kämpfst."
level_difficulty: "Schwierigkeit:"
contact:
contact_us: "Kontaktiere CodeCombat"
welcome: "Schön von Dir zu hören! Benutze dieses Formular um uns eine Email zu schicken."
contribute_prefix: "Wenn Du Interesse hast, uns zu unterstützen dann sieh Dir die "
contribute_page: "Unterstützer Seite"
contribute_suffix: " an!"
forum_prefix: "Für alle öffentlichen Themen, benutze "
forum_page: "unser Forum"
forum_suffix: " stattdessen."
sending: "Senden..."
send: "Sende Feedback"
diplomat_suggestion:
title: "Hilf CodeCombat zu übersetzen!"
sub_heading: "Wir brauchen Deine Sprachfähigkeiten."
pitch_body: "Wir entwickeln CodeCombat in Englisch, aber wir haben Spieler in der ganzen Welt. Viele von ihnen wollen in Deutsch spielen, sprechen aber kein Englisch. Wenn Du also beide Sprachen beherrscht, melde Dich an um ein Diplomat zu werden und hilf die Website und die Levels zu Deutsch zu übersetzen."
missing_translations: "Solange wir nicht alles zu Deutsch übesetzt haben siehst Du Englisch, wo Deutsch noch nicht zur Verfügung steht."
learn_more: "Finde heraus wie Du Diplomat werden kannst"
subscribe_as_diplomat: "Schreibe Dich als Diplomat ein"
account_settings:
title: "Accounteinstellungen"
not_logged_in: "Logge Dich ein um einen Account anzulegen oder zu ändern."
autosave: "Sichere Änderungen automatisch"
me_tab: "Ich"
picture_tab: "Bild"
wizard_tab: "Zauberer"
password_tab: "Passwort"
emails_tab: "Emails"
language_tab: "Sprache"
gravatar_select: "Wähle ein Gravatar Photo aus"
gravatar_add_photos: "Füge Vorschaubilder und Photos zu Deinem Gravatar Account hinzu für Deine email Bilder"
gravatar_add_more_photos: "Füge mehr Photos für Deinen Gravatar Account hinzu, um hier mehr Bilder wählen zu können"
wizard_color: "Die Farbe der Kleidung des Zauberers"
new_password: "Neues Passwort"
new_password_verify: "Verifizieren"
email_subscriptions: "Email Abonements"
email_announcements: "Ankündigungen"
email_announcements_description: "Bekomme die aktuellesten Nachrichten und Entwicklungen bei CodeCombat."
contributor_emails: "Untersützer Emails"
contribute_prefix: "Wir suchen nach Leuten, die mitmachen! Schau Dir die"
contribute_page: "Unterstützer Seite"
contribute_suffix: " an um mehr zu erfahren."
email_toggle: "Alles wählen"
language: "Sprache"
saving: "Saving..."
error_saving: "Fehler beim Speichern"
saved: "Änderungen gespeichert"
password_mismatch: "Passwörter stimmen nicht überein."
account_profile:
edit_settings: "Einstellungen ändern"
profile_for_prefix: "Profil von "
profile_for_suffix: ""
profile: "Profil"
user_not_found: "Kein Nutzer gefunden. URL überprüfen?"
gravatar_not_found_mine: "Wir konnten das Profil nicht finden:"
gravatar_signup_prefix: "Melde Dich an unter "
gravatar_signup_suffix: " um los zu legen!"
gravatar_not_found_other: "Leider ist kein Profil mit der Email Adresse verknüpft."
gravatar_contact: "Kontakt"
gravatar_websites: "Websites"
gravatar_accounts: "Gesehen in"
gravatar_profile_link: "Gravatar Profil"
play_level:
level_load_error: "Level konnte nicht geladen werden."
done: "Fertig"
grid: "Raster"
customize_wizard: "Bearbeite den Zauberer"
home: "Home"
guide: "Führung"
multiplayer: "Multiplayer"
restart: "Neustart"
goals: "Ziele"
action_timeline: "Aktionszeitstrahl"
click_to_select: "Klicke auf eine Einheit, um sie auszuwählen."
reload_title: "Gesamten Code neu laden?"
reload_really: "Bist Du sicher, dass Du das Level zum Neuanfang neu laden willst?"
reload_confirm: "Alles neu laden"
victory_title_prefix: ""
victory_title_suffix: " Abgeschlossen"
victory_sign_up: "Melde Dich an, um Updates zu erhalten"
victory_sign_up_poke: "Möchtest Du Neuigkeiten per Mail erhalten? Erzeuge einen kostenlosen Account und wir halten Dich auf dem Laufenden."
victory_rate_the_level: "Bewerte das Level:"
victory_play_next_level: "Spiel das nächste Level"
victory_go_home: "Geh auf die Startseite"
victory_review: "Erzähl uns davon!"
victory_hour_of_code_done: "Bist Du fertig?"
victory_hour_of_code_done_yes: "Ja, ich bin mit meiner Code-Stunde fertig!"
multiplayer_title: "Multiplayer Einstellungen"
multiplayer_link_description: "Gib diesen Link jedem der mitmachen will."
multiplayer_hint_label: "Hinweis:"
multiplayer_hint: " Klick den Link um alles auszuwählen, dann drück ⌘-C oder Strg-C um den Link zu kopieren."
multiplayer_coming_soon: "Mehr Multiplayerfeatures werden kommen!"
guide_title: "Führung"
tome_minion_spells: "Die Zaubersprüche Deiner Knechte"
tome_read_only_spells: "Nur-lesen Zauberspüche"
tome_other_units: "Andere Einheiten"
tome_cast_button_castable: "Führe aus"
tome_cast_button_casting: "Ausführen"
tome_cast_button_cast: "Zauberspuch ausführen"
tome_autocast_delay: "Verzögerung der automatischen Ausführung"
tome_autocast_1: "1 Sekunde"
tome_autocast_3: "3 Sekunden"
tome_autocast_5: "5 Sekunden"
tome_autocast_manual: "Manuell"
tome_select_spell: "Wähle einen Zauber"
tome_select_a_thang: "Wähle jemanden aus um "
tome_available_spells: "Verfügbare Zauber"
hud_continue: "Weiter (drücke Shift+Space)"
| 38455 | module.exports = nativeDescription: "Deutsch", englishDescription: "German", translation:
common:
loading: "Laden..."
modal:
close: "Schliessen"
okay: "Okay"
not_found:
page_not_found: "Seite nicht gefunden"
nav:
sign_up: "Anmelden"
log_in: "Einloggen"
log_out: "Ausloggen"
play: "Start"
editor: "Editor"
blog: "Blog"
forum: "Forum"
admin: "Admin"
home: "Home"
contribute: "Helfen"
legal: "Rechtliches"
about: "Über"
contact: "Kontakt"
twitter_follow: "Twitter"
forms:
name: "Name"
email: "Email"
message: "Nachricht"
cancel: "Abbrechen"
login:
log_in: "Einloggen"
sign_up: "Neuen Account anlegen"
or: ", oder "
recover: "Account wiederherstellen"
signup:
description: "Es ist kostenlos. Nur noch ein paar Dinge, dann kannst Du loslegen."
email_announcements: "Erhalte Benachrichtigungen per Email"
coppa: "Älter als 13 oder nicht in den USA"
coppa_why: "(Warum?)"
creating: "Erzeuge Account..."
sign_up: "Neuen Account anlegen"
or: "oder "
log_in: "mit Passwort einloggen"
home:
slogan: "Lern spielend JavaScript"
no_ie: "CodeCombat läuft nicht im Internet Explorer 9 oder älteren Browsern. Tut uns Leid!"
no_mobile: "CodeCombat ist nicht für Mobilgeräte optimiert und funktioniert möglicherweise nicht."
play: "Play"
play:
choose_your_level: "Wähle Deinen Level"
adventurer_prefix: "Du kannst zu jedem Level springen, oder die Levels diskutieren "
adventurer_forum: "im Abenteurerforum"
adventurer_suffix: "."
campaign_beginner: "Anfängerkampagne"
campaign_beginner_description: "... in der Du die Zauberei der Programmierung lernst."
campaign_dev: "Beliebiges schwierigeres Level"
campaign_dev_description: "... in welchem Du die Bedienung erlernst, indem Du etwas schwierigeres machst."
campaign_multiplayer: "Multiplayerarena"
campaign_multiplayer_description: "... in der Du in gegen einen anderen Spieler programmierst."
campaign_player_created: "Von Spielern erstellt"
campaign_player_created_description: "... in welchem du gegen die Kreativität eines <a href=\"/contribute#artisan\">Artisan Zauberers</a> kämpfst."
level_difficulty: "Schwierigkeit:"
contact:
contact_us: "Kontaktiere CodeCombat"
welcome: "Schön von Dir zu hören! Benutze dieses Formular um uns eine Email zu schicken."
contribute_prefix: "Wenn Du Interesse hast, uns zu unterstützen dann sieh Dir die "
contribute_page: "Unterstützer Seite"
contribute_suffix: " an!"
forum_prefix: "Für alle öffentlichen Themen, benutze "
forum_page: "unser Forum"
forum_suffix: " stattdessen."
sending: "Senden..."
send: "Sende Feedback"
diplomat_suggestion:
title: "Hilf CodeCombat zu übersetzen!"
sub_heading: "Wir brauchen Deine Sprachfähigkeiten."
pitch_body: "Wir entwickeln CodeCombat in Englisch, aber wir haben Spieler in der ganzen Welt. Viele von ihnen wollen in Deutsch spielen, sprechen aber kein Englisch. Wenn Du also beide Sprachen beherrscht, melde Dich an um ein Diplomat zu werden und hilf die Website und die Levels zu Deutsch zu übersetzen."
missing_translations: "Solange wir nicht alles zu Deutsch übesetzt haben siehst Du Englisch, wo Deutsch noch nicht zur Verfügung steht."
learn_more: "Finde heraus wie Du Diplomat werden kannst"
subscribe_as_diplomat: "Schreibe Dich als Diplomat ein"
account_settings:
title: "Accounteinstellungen"
not_logged_in: "Logge Dich ein um einen Account anzulegen oder zu ändern."
autosave: "Sichere Änderungen automatisch"
me_tab: "Ich"
picture_tab: "Bild"
wizard_tab: "Zauberer"
password_tab: "Passwort"
emails_tab: "Emails"
language_tab: "Sprache"
gravatar_select: "Wähle ein Gravatar Photo aus"
gravatar_add_photos: "Füge Vorschaubilder und Photos zu Deinem Gravatar Account hinzu für Deine email Bilder"
gravatar_add_more_photos: "Füge mehr Photos für Deinen Gravatar Account hinzu, um hier mehr Bilder wählen zu können"
wizard_color: "Die Farbe der Kleidung des Zauberers"
new_password: "<PASSWORD>"
new_password_verify: "<PASSWORD>"
email_subscriptions: "Email Abonements"
email_announcements: "Ankündigungen"
email_announcements_description: "Bekomme die aktuellesten Nachrichten und Entwicklungen bei CodeCombat."
contributor_emails: "Untersützer Emails"
contribute_prefix: "Wir suchen nach Leuten, die mitmachen! Schau Dir die"
contribute_page: "Unterstützer Seite"
contribute_suffix: " an um mehr zu erfahren."
email_toggle: "Alles wählen"
language: "Sprache"
saving: "Saving..."
error_saving: "Fehler beim Speichern"
saved: "Änderungen gespeichert"
password_mismatch: "<PASSWORD> stimmen nicht überein."
account_profile:
edit_settings: "Einstellungen ändern"
profile_for_prefix: "Profil von "
profile_for_suffix: ""
profile: "Profil"
user_not_found: "Kein Nutzer gefunden. URL überprüfen?"
gravatar_not_found_mine: "Wir konnten das Profil nicht finden:"
gravatar_signup_prefix: "Melde Dich an unter "
gravatar_signup_suffix: " um los zu legen!"
gravatar_not_found_other: "Leider ist kein Profil mit der Email Adresse verknüpft."
gravatar_contact: "Kontakt"
gravatar_websites: "Websites"
gravatar_accounts: "Gesehen in"
gravatar_profile_link: "Gravatar Profil"
play_level:
level_load_error: "Level konnte nicht geladen werden."
done: "Fertig"
grid: "Raster"
customize_wizard: "Bearbeite den Zauberer"
home: "Home"
guide: "Führung"
multiplayer: "Multiplayer"
restart: "Neustart"
goals: "Ziele"
action_timeline: "Aktionszeitstrahl"
click_to_select: "Klicke auf eine Einheit, um sie auszuwählen."
reload_title: "Gesamten Code neu laden?"
reload_really: "Bist Du sicher, dass Du das Level zum Neuanfang neu laden willst?"
reload_confirm: "Alles neu laden"
victory_title_prefix: ""
victory_title_suffix: " Abgeschlossen"
victory_sign_up: "Melde Dich an, um Updates zu erhalten"
victory_sign_up_poke: "Möchtest Du Neuigkeiten per Mail erhalten? Erzeuge einen kostenlosen Account und wir halten Dich auf dem Laufenden."
victory_rate_the_level: "Bewerte das Level:"
victory_play_next_level: "Spiel das nächste Level"
victory_go_home: "Geh auf die Startseite"
victory_review: "Erzähl uns davon!"
victory_hour_of_code_done: "Bist Du fertig?"
victory_hour_of_code_done_yes: "Ja, ich bin mit meiner Code-Stunde fertig!"
multiplayer_title: "Multiplayer Einstellungen"
multiplayer_link_description: "Gib diesen Link jedem der mitmachen will."
multiplayer_hint_label: "Hinweis:"
multiplayer_hint: " Klick den Link um alles auszuwählen, dann drück ⌘-C oder Strg-C um den Link zu kopieren."
multiplayer_coming_soon: "Mehr Multiplayerfeatures werden kommen!"
guide_title: "Führung"
tome_minion_spells: "Die Zaubersprüche Deiner Knechte"
tome_read_only_spells: "Nur-lesen Zauberspüche"
tome_other_units: "Andere Einheiten"
tome_cast_button_castable: "Führe aus"
tome_cast_button_casting: "Ausführen"
tome_cast_button_cast: "Zauberspuch ausführen"
tome_autocast_delay: "Verzögerung der automatischen Ausführung"
tome_autocast_1: "1 Sekunde"
tome_autocast_3: "3 Sekunden"
tome_autocast_5: "5 Sekunden"
tome_autocast_manual: "Manuell"
tome_select_spell: "Wähle einen Zauber"
tome_select_a_thang: "Wähle jemanden aus um "
tome_available_spells: "Verfügbare Zauber"
hud_continue: "Weiter (drücke Shift+Space)"
| true | module.exports = nativeDescription: "Deutsch", englishDescription: "German", translation:
common:
loading: "Laden..."
modal:
close: "Schliessen"
okay: "Okay"
not_found:
page_not_found: "Seite nicht gefunden"
nav:
sign_up: "Anmelden"
log_in: "Einloggen"
log_out: "Ausloggen"
play: "Start"
editor: "Editor"
blog: "Blog"
forum: "Forum"
admin: "Admin"
home: "Home"
contribute: "Helfen"
legal: "Rechtliches"
about: "Über"
contact: "Kontakt"
twitter_follow: "Twitter"
forms:
name: "Name"
email: "Email"
message: "Nachricht"
cancel: "Abbrechen"
login:
log_in: "Einloggen"
sign_up: "Neuen Account anlegen"
or: ", oder "
recover: "Account wiederherstellen"
signup:
description: "Es ist kostenlos. Nur noch ein paar Dinge, dann kannst Du loslegen."
email_announcements: "Erhalte Benachrichtigungen per Email"
coppa: "Älter als 13 oder nicht in den USA"
coppa_why: "(Warum?)"
creating: "Erzeuge Account..."
sign_up: "Neuen Account anlegen"
or: "oder "
log_in: "mit Passwort einloggen"
home:
slogan: "Lern spielend JavaScript"
no_ie: "CodeCombat läuft nicht im Internet Explorer 9 oder älteren Browsern. Tut uns Leid!"
no_mobile: "CodeCombat ist nicht für Mobilgeräte optimiert und funktioniert möglicherweise nicht."
play: "Play"
play:
choose_your_level: "Wähle Deinen Level"
adventurer_prefix: "Du kannst zu jedem Level springen, oder die Levels diskutieren "
adventurer_forum: "im Abenteurerforum"
adventurer_suffix: "."
campaign_beginner: "Anfängerkampagne"
campaign_beginner_description: "... in der Du die Zauberei der Programmierung lernst."
campaign_dev: "Beliebiges schwierigeres Level"
campaign_dev_description: "... in welchem Du die Bedienung erlernst, indem Du etwas schwierigeres machst."
campaign_multiplayer: "Multiplayerarena"
campaign_multiplayer_description: "... in der Du in gegen einen anderen Spieler programmierst."
campaign_player_created: "Von Spielern erstellt"
campaign_player_created_description: "... in welchem du gegen die Kreativität eines <a href=\"/contribute#artisan\">Artisan Zauberers</a> kämpfst."
level_difficulty: "Schwierigkeit:"
contact:
contact_us: "Kontaktiere CodeCombat"
welcome: "Schön von Dir zu hören! Benutze dieses Formular um uns eine Email zu schicken."
contribute_prefix: "Wenn Du Interesse hast, uns zu unterstützen dann sieh Dir die "
contribute_page: "Unterstützer Seite"
contribute_suffix: " an!"
forum_prefix: "Für alle öffentlichen Themen, benutze "
forum_page: "unser Forum"
forum_suffix: " stattdessen."
sending: "Senden..."
send: "Sende Feedback"
diplomat_suggestion:
title: "Hilf CodeCombat zu übersetzen!"
sub_heading: "Wir brauchen Deine Sprachfähigkeiten."
pitch_body: "Wir entwickeln CodeCombat in Englisch, aber wir haben Spieler in der ganzen Welt. Viele von ihnen wollen in Deutsch spielen, sprechen aber kein Englisch. Wenn Du also beide Sprachen beherrscht, melde Dich an um ein Diplomat zu werden und hilf die Website und die Levels zu Deutsch zu übersetzen."
missing_translations: "Solange wir nicht alles zu Deutsch übesetzt haben siehst Du Englisch, wo Deutsch noch nicht zur Verfügung steht."
learn_more: "Finde heraus wie Du Diplomat werden kannst"
subscribe_as_diplomat: "Schreibe Dich als Diplomat ein"
account_settings:
title: "Accounteinstellungen"
not_logged_in: "Logge Dich ein um einen Account anzulegen oder zu ändern."
autosave: "Sichere Änderungen automatisch"
me_tab: "Ich"
picture_tab: "Bild"
wizard_tab: "Zauberer"
password_tab: "Passwort"
emails_tab: "Emails"
language_tab: "Sprache"
gravatar_select: "Wähle ein Gravatar Photo aus"
gravatar_add_photos: "Füge Vorschaubilder und Photos zu Deinem Gravatar Account hinzu für Deine email Bilder"
gravatar_add_more_photos: "Füge mehr Photos für Deinen Gravatar Account hinzu, um hier mehr Bilder wählen zu können"
wizard_color: "Die Farbe der Kleidung des Zauberers"
new_password: "PI:PASSWORD:<PASSWORD>END_PI"
new_password_verify: "PI:PASSWORD:<PASSWORD>END_PI"
email_subscriptions: "Email Abonements"
email_announcements: "Ankündigungen"
email_announcements_description: "Bekomme die aktuellesten Nachrichten und Entwicklungen bei CodeCombat."
contributor_emails: "Untersützer Emails"
contribute_prefix: "Wir suchen nach Leuten, die mitmachen! Schau Dir die"
contribute_page: "Unterstützer Seite"
contribute_suffix: " an um mehr zu erfahren."
email_toggle: "Alles wählen"
language: "Sprache"
saving: "Saving..."
error_saving: "Fehler beim Speichern"
saved: "Änderungen gespeichert"
password_mismatch: "PI:PASSWORD:<PASSWORD>END_PI stimmen nicht überein."
account_profile:
edit_settings: "Einstellungen ändern"
profile_for_prefix: "Profil von "
profile_for_suffix: ""
profile: "Profil"
user_not_found: "Kein Nutzer gefunden. URL überprüfen?"
gravatar_not_found_mine: "Wir konnten das Profil nicht finden:"
gravatar_signup_prefix: "Melde Dich an unter "
gravatar_signup_suffix: " um los zu legen!"
gravatar_not_found_other: "Leider ist kein Profil mit der Email Adresse verknüpft."
gravatar_contact: "Kontakt"
gravatar_websites: "Websites"
gravatar_accounts: "Gesehen in"
gravatar_profile_link: "Gravatar Profil"
play_level:
level_load_error: "Level konnte nicht geladen werden."
done: "Fertig"
grid: "Raster"
customize_wizard: "Bearbeite den Zauberer"
home: "Home"
guide: "Führung"
multiplayer: "Multiplayer"
restart: "Neustart"
goals: "Ziele"
action_timeline: "Aktionszeitstrahl"
click_to_select: "Klicke auf eine Einheit, um sie auszuwählen."
reload_title: "Gesamten Code neu laden?"
reload_really: "Bist Du sicher, dass Du das Level zum Neuanfang neu laden willst?"
reload_confirm: "Alles neu laden"
victory_title_prefix: ""
victory_title_suffix: " Abgeschlossen"
victory_sign_up: "Melde Dich an, um Updates zu erhalten"
victory_sign_up_poke: "Möchtest Du Neuigkeiten per Mail erhalten? Erzeuge einen kostenlosen Account und wir halten Dich auf dem Laufenden."
victory_rate_the_level: "Bewerte das Level:"
victory_play_next_level: "Spiel das nächste Level"
victory_go_home: "Geh auf die Startseite"
victory_review: "Erzähl uns davon!"
victory_hour_of_code_done: "Bist Du fertig?"
victory_hour_of_code_done_yes: "Ja, ich bin mit meiner Code-Stunde fertig!"
multiplayer_title: "Multiplayer Einstellungen"
multiplayer_link_description: "Gib diesen Link jedem der mitmachen will."
multiplayer_hint_label: "Hinweis:"
multiplayer_hint: " Klick den Link um alles auszuwählen, dann drück ⌘-C oder Strg-C um den Link zu kopieren."
multiplayer_coming_soon: "Mehr Multiplayerfeatures werden kommen!"
guide_title: "Führung"
tome_minion_spells: "Die Zaubersprüche Deiner Knechte"
tome_read_only_spells: "Nur-lesen Zauberspüche"
tome_other_units: "Andere Einheiten"
tome_cast_button_castable: "Führe aus"
tome_cast_button_casting: "Ausführen"
tome_cast_button_cast: "Zauberspuch ausführen"
tome_autocast_delay: "Verzögerung der automatischen Ausführung"
tome_autocast_1: "1 Sekunde"
tome_autocast_3: "3 Sekunden"
tome_autocast_5: "5 Sekunden"
tome_autocast_manual: "Manuell"
tome_select_spell: "Wähle einen Zauber"
tome_select_a_thang: "Wähle jemanden aus um "
tome_available_spells: "Verfügbare Zauber"
hud_continue: "Weiter (drücke Shift+Space)"
|
[
{
"context": "ld_key\n\n <ToolComponent\n key={index}\n task={@props.task}\n tool_",
"end": 5003,
"score": 0.7253356575965881,
"start": 4998,
"tag": "KEY",
"value": "index"
}
] | app/assets/javascripts/components/transcribe/tools/composite-tool/index.cjsx | johnscancella/scribeAPI | 0 | React = require 'react'
{Navigation} = require 'react-router'
DraggableModal = require 'components/draggable-modal'
DoneButton = require './done-button'
SmallButton = require 'components/buttons/small-button'
PrevButton = require './prev-button'
HelpButton = require 'components/buttons/help-button'
BadSubjectButton = require 'components/buttons/bad-subject-button'
IllegibleSubjectButton = require 'components/buttons/illegible-subject-button'
SkippableToolMixin = require 'lib/skippable-tool-mixin'
CompositeTool = React.createClass
displayName: 'CompositeTool'
mixins: [Navigation, SkippableToolMixin]
getInitialState: ->
initAnnotation = @props.annotation ? {}
if @props.subject.data.ocrText
# first annotation
annotation_key = @props.task.tool_config.options[0].value
initAnnotation[annotation_key] = @props.subject.data.ocrText
return {
annotation: initAnnotation,
viewerSize: @props.viewerSize,
active_field_key: (c.value for c in @props.task.tool_config.options)[0]
}
getDefaultProps: ->
annotation: {}
task: null
subject: null
# this can go into a mixin? (common across all transcribe tools)
getPosition: (data) ->
return x: null, y: null if ! data.x?
yPad = 20
switch data.toolName
when 'rectangleTool'
x = data.x
y = parseFloat(data.y) + parseFloat(data.height) + yPad
when 'textRowTool'
x = data.x
y = data.yLower + yPad
else # default for pointTool
x = data.x
y = data.y + yPad if data.y?
x = @props.subject.width / 2 if ! x?
y = @props.subject.height / 2 if ! y?
return {x,y}
onViewerResize: (size) ->
@setState
viewerSize: size
handleChange: (annotation) ->
@setState annotation: annotation
@props.onChange annotation # forward annotation to parent
# Fires when user hits <enter> in an input
# If there are more inputs, move focus to next input
# Otherwise commit annotation (which is default behavior when there's only one input
handleCompletedField: ->
field_keys = (c.value for c in @props.task.tool_config.options)
next_field_key = field_keys[ field_keys.indexOf(@state.active_field_key) + 1 ]
if next_field_key?
@setState active_field_key: next_field_key
, =>
@forceUpdate()
else
@commitAnnotation()
# User moved focus to an input:
handleFieldFocus: (annotation_key) ->
@setState active_field_key: annotation_key
# this can go into a mixin? (common across all transcribe tools)
commitAnnotation: ->
@props.onComplete @state.annotation
if @props.transcribeMode is 'page' or @props.transcribeMode is 'single'
if @props.isLastSubject and not @props.task.next_task?
@props.returnToMarking()
else if @props.transcribeMode == 'verify'
@transitionTo 'verify'
# this can go into a mixin? (common across all transcribe tools)
returnToMarking: ->
@commitAnnotation()
# transition back to mark
@transitionTo 'mark', {},
subject_set_id: @props.subject.subject_set_id
selected_subject_id: @props.subject.parent_subject_id
page: @props.subjectCurrentPage
render: ->
buttons = []
# TK: buttons.push <PrevButton onClick={=> console.log "Prev button clicked!"} />
if @props.onShowHelp?
buttons.push <HelpButton onClick={@props.onShowHelp} key="help-button"/>
buttons.push <SmallButton label='Skip' key="skip-button" className="secondary floated-left" onClick={@skipToNext} />
if @props.onBadSubject?
buttons.push <BadSubjectButton key="bad-subject-button" label={"Bad #{@props.project.term('mark')}"} active={@props.badSubject} onClick={@props.onBadSubject} />
if @props.onIllegibleSubject?
buttons.push <IllegibleSubjectButton active={@props.illegibleSubject} onClick={@props.onIllegibleSubject} key="illegible-subject-button"/>
buttonLabel =
if @props.task.next_task?
'Continue'
else
if @props.isLastSubject and ( @props.transcribeMode is 'page' or @props.transcribeMode is 'single' )
'Save and Return to Marking'
else if @props.transcribeMode is 'verify'
'Save and Return to Verify'
else 'Save'
buttons.push <SmallButton label={buttonLabel} key="done-button" onClick={@commitAnnotation} />
{x,y} = @getPosition @props.subject.region
<DraggableModal
x={x*@props.scale.horizontal + @props.scale.offsetX}
y={y*@props.scale.vertical + @props.scale.offsetY}
buttons={buttons}
classes="transcribe-tool composite"
>
<label>{@props.task.instruction}</label>
{
for sub_tool, index in @props.task.tool_config.options
ToolComponent = @props.transcribeTools[sub_tool.tool]
annotation_key = sub_tool.value
focus = annotation_key is @state.active_field_key
<ToolComponent
key={index}
task={@props.task}
tool_config={sub_tool.tool_config}
subject={@props.subject}
workflow={@props.workflow}
standalone={false}
viewerSize={@props.viewerSize}
onChange={@handleChange}
onComplete={@handleCompletedField}
onInputFocus={@handleFieldFocus}
label={sub_tool.label ? ''}
focus={focus}
scale={@props.scale}
annotation_key={annotation_key}
annotation={@state.annotation}
/>
}
</DraggableModal>
module.exports = CompositeTool
| 126707 | React = require 'react'
{Navigation} = require 'react-router'
DraggableModal = require 'components/draggable-modal'
DoneButton = require './done-button'
SmallButton = require 'components/buttons/small-button'
PrevButton = require './prev-button'
HelpButton = require 'components/buttons/help-button'
BadSubjectButton = require 'components/buttons/bad-subject-button'
IllegibleSubjectButton = require 'components/buttons/illegible-subject-button'
SkippableToolMixin = require 'lib/skippable-tool-mixin'
CompositeTool = React.createClass
displayName: 'CompositeTool'
mixins: [Navigation, SkippableToolMixin]
getInitialState: ->
initAnnotation = @props.annotation ? {}
if @props.subject.data.ocrText
# first annotation
annotation_key = @props.task.tool_config.options[0].value
initAnnotation[annotation_key] = @props.subject.data.ocrText
return {
annotation: initAnnotation,
viewerSize: @props.viewerSize,
active_field_key: (c.value for c in @props.task.tool_config.options)[0]
}
getDefaultProps: ->
annotation: {}
task: null
subject: null
# this can go into a mixin? (common across all transcribe tools)
getPosition: (data) ->
return x: null, y: null if ! data.x?
yPad = 20
switch data.toolName
when 'rectangleTool'
x = data.x
y = parseFloat(data.y) + parseFloat(data.height) + yPad
when 'textRowTool'
x = data.x
y = data.yLower + yPad
else # default for pointTool
x = data.x
y = data.y + yPad if data.y?
x = @props.subject.width / 2 if ! x?
y = @props.subject.height / 2 if ! y?
return {x,y}
onViewerResize: (size) ->
@setState
viewerSize: size
handleChange: (annotation) ->
@setState annotation: annotation
@props.onChange annotation # forward annotation to parent
# Fires when user hits <enter> in an input
# If there are more inputs, move focus to next input
# Otherwise commit annotation (which is default behavior when there's only one input
handleCompletedField: ->
field_keys = (c.value for c in @props.task.tool_config.options)
next_field_key = field_keys[ field_keys.indexOf(@state.active_field_key) + 1 ]
if next_field_key?
@setState active_field_key: next_field_key
, =>
@forceUpdate()
else
@commitAnnotation()
# User moved focus to an input:
handleFieldFocus: (annotation_key) ->
@setState active_field_key: annotation_key
# this can go into a mixin? (common across all transcribe tools)
commitAnnotation: ->
@props.onComplete @state.annotation
if @props.transcribeMode is 'page' or @props.transcribeMode is 'single'
if @props.isLastSubject and not @props.task.next_task?
@props.returnToMarking()
else if @props.transcribeMode == 'verify'
@transitionTo 'verify'
# this can go into a mixin? (common across all transcribe tools)
returnToMarking: ->
@commitAnnotation()
# transition back to mark
@transitionTo 'mark', {},
subject_set_id: @props.subject.subject_set_id
selected_subject_id: @props.subject.parent_subject_id
page: @props.subjectCurrentPage
render: ->
buttons = []
# TK: buttons.push <PrevButton onClick={=> console.log "Prev button clicked!"} />
if @props.onShowHelp?
buttons.push <HelpButton onClick={@props.onShowHelp} key="help-button"/>
buttons.push <SmallButton label='Skip' key="skip-button" className="secondary floated-left" onClick={@skipToNext} />
if @props.onBadSubject?
buttons.push <BadSubjectButton key="bad-subject-button" label={"Bad #{@props.project.term('mark')}"} active={@props.badSubject} onClick={@props.onBadSubject} />
if @props.onIllegibleSubject?
buttons.push <IllegibleSubjectButton active={@props.illegibleSubject} onClick={@props.onIllegibleSubject} key="illegible-subject-button"/>
buttonLabel =
if @props.task.next_task?
'Continue'
else
if @props.isLastSubject and ( @props.transcribeMode is 'page' or @props.transcribeMode is 'single' )
'Save and Return to Marking'
else if @props.transcribeMode is 'verify'
'Save and Return to Verify'
else 'Save'
buttons.push <SmallButton label={buttonLabel} key="done-button" onClick={@commitAnnotation} />
{x,y} = @getPosition @props.subject.region
<DraggableModal
x={x*@props.scale.horizontal + @props.scale.offsetX}
y={y*@props.scale.vertical + @props.scale.offsetY}
buttons={buttons}
classes="transcribe-tool composite"
>
<label>{@props.task.instruction}</label>
{
for sub_tool, index in @props.task.tool_config.options
ToolComponent = @props.transcribeTools[sub_tool.tool]
annotation_key = sub_tool.value
focus = annotation_key is @state.active_field_key
<ToolComponent
key={<KEY>}
task={@props.task}
tool_config={sub_tool.tool_config}
subject={@props.subject}
workflow={@props.workflow}
standalone={false}
viewerSize={@props.viewerSize}
onChange={@handleChange}
onComplete={@handleCompletedField}
onInputFocus={@handleFieldFocus}
label={sub_tool.label ? ''}
focus={focus}
scale={@props.scale}
annotation_key={annotation_key}
annotation={@state.annotation}
/>
}
</DraggableModal>
module.exports = CompositeTool
| true | React = require 'react'
{Navigation} = require 'react-router'
DraggableModal = require 'components/draggable-modal'
DoneButton = require './done-button'
SmallButton = require 'components/buttons/small-button'
PrevButton = require './prev-button'
HelpButton = require 'components/buttons/help-button'
BadSubjectButton = require 'components/buttons/bad-subject-button'
IllegibleSubjectButton = require 'components/buttons/illegible-subject-button'
SkippableToolMixin = require 'lib/skippable-tool-mixin'
CompositeTool = React.createClass
displayName: 'CompositeTool'
mixins: [Navigation, SkippableToolMixin]
getInitialState: ->
initAnnotation = @props.annotation ? {}
if @props.subject.data.ocrText
# first annotation
annotation_key = @props.task.tool_config.options[0].value
initAnnotation[annotation_key] = @props.subject.data.ocrText
return {
annotation: initAnnotation,
viewerSize: @props.viewerSize,
active_field_key: (c.value for c in @props.task.tool_config.options)[0]
}
getDefaultProps: ->
annotation: {}
task: null
subject: null
# this can go into a mixin? (common across all transcribe tools)
getPosition: (data) ->
return x: null, y: null if ! data.x?
yPad = 20
switch data.toolName
when 'rectangleTool'
x = data.x
y = parseFloat(data.y) + parseFloat(data.height) + yPad
when 'textRowTool'
x = data.x
y = data.yLower + yPad
else # default for pointTool
x = data.x
y = data.y + yPad if data.y?
x = @props.subject.width / 2 if ! x?
y = @props.subject.height / 2 if ! y?
return {x,y}
onViewerResize: (size) ->
@setState
viewerSize: size
handleChange: (annotation) ->
@setState annotation: annotation
@props.onChange annotation # forward annotation to parent
# Fires when user hits <enter> in an input
# If there are more inputs, move focus to next input
# Otherwise commit annotation (which is default behavior when there's only one input
handleCompletedField: ->
field_keys = (c.value for c in @props.task.tool_config.options)
next_field_key = field_keys[ field_keys.indexOf(@state.active_field_key) + 1 ]
if next_field_key?
@setState active_field_key: next_field_key
, =>
@forceUpdate()
else
@commitAnnotation()
# User moved focus to an input:
handleFieldFocus: (annotation_key) ->
@setState active_field_key: annotation_key
# this can go into a mixin? (common across all transcribe tools)
commitAnnotation: ->
@props.onComplete @state.annotation
if @props.transcribeMode is 'page' or @props.transcribeMode is 'single'
if @props.isLastSubject and not @props.task.next_task?
@props.returnToMarking()
else if @props.transcribeMode == 'verify'
@transitionTo 'verify'
# this can go into a mixin? (common across all transcribe tools)
returnToMarking: ->
@commitAnnotation()
# transition back to mark
@transitionTo 'mark', {},
subject_set_id: @props.subject.subject_set_id
selected_subject_id: @props.subject.parent_subject_id
page: @props.subjectCurrentPage
render: ->
buttons = []
# TK: buttons.push <PrevButton onClick={=> console.log "Prev button clicked!"} />
if @props.onShowHelp?
buttons.push <HelpButton onClick={@props.onShowHelp} key="help-button"/>
buttons.push <SmallButton label='Skip' key="skip-button" className="secondary floated-left" onClick={@skipToNext} />
if @props.onBadSubject?
buttons.push <BadSubjectButton key="bad-subject-button" label={"Bad #{@props.project.term('mark')}"} active={@props.badSubject} onClick={@props.onBadSubject} />
if @props.onIllegibleSubject?
buttons.push <IllegibleSubjectButton active={@props.illegibleSubject} onClick={@props.onIllegibleSubject} key="illegible-subject-button"/>
buttonLabel =
if @props.task.next_task?
'Continue'
else
if @props.isLastSubject and ( @props.transcribeMode is 'page' or @props.transcribeMode is 'single' )
'Save and Return to Marking'
else if @props.transcribeMode is 'verify'
'Save and Return to Verify'
else 'Save'
buttons.push <SmallButton label={buttonLabel} key="done-button" onClick={@commitAnnotation} />
{x,y} = @getPosition @props.subject.region
<DraggableModal
x={x*@props.scale.horizontal + @props.scale.offsetX}
y={y*@props.scale.vertical + @props.scale.offsetY}
buttons={buttons}
classes="transcribe-tool composite"
>
<label>{@props.task.instruction}</label>
{
for sub_tool, index in @props.task.tool_config.options
ToolComponent = @props.transcribeTools[sub_tool.tool]
annotation_key = sub_tool.value
focus = annotation_key is @state.active_field_key
<ToolComponent
key={PI:KEY:<KEY>END_PI}
task={@props.task}
tool_config={sub_tool.tool_config}
subject={@props.subject}
workflow={@props.workflow}
standalone={false}
viewerSize={@props.viewerSize}
onChange={@handleChange}
onComplete={@handleCompletedField}
onInputFocus={@handleFieldFocus}
label={sub_tool.label ? ''}
focus={focus}
scale={@props.scale}
annotation_key={annotation_key}
annotation={@state.annotation}
/>
}
</DraggableModal>
module.exports = CompositeTool
|
[
{
"context": "- - - - - - - - - - - - - - - #\n# Copyright © 2015 Denis Luchkin-Zhou #\n# See L",
"end": 277,
"score": 0.9998669028282166,
"start": 259,
"tag": "NAME",
"value": "Denis Luchkin-Zhou"
}
] | gulp/client-clean.coffee | jluchiji/tranzit | 0 | # --------------------------------------------------------------------------- #
# wyvernzora.ninja build script. #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Copyright © 2015 Denis Luchkin-Zhou #
# See LICENSE.md for terms of distribution. #
# --------------------------------------------------------------------------- #
module.exports = (gulp, config) ->
gulp.task 'client:clean', ->
del = require 'del'
vinyl = require 'vinyl-paths'
gulp.src './dist/app', read: no
.pipe vinyl del
| 97910 | # --------------------------------------------------------------------------- #
# wyvernzora.ninja build script. #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Copyright © 2015 <NAME> #
# See LICENSE.md for terms of distribution. #
# --------------------------------------------------------------------------- #
module.exports = (gulp, config) ->
gulp.task 'client:clean', ->
del = require 'del'
vinyl = require 'vinyl-paths'
gulp.src './dist/app', read: no
.pipe vinyl del
| true | # --------------------------------------------------------------------------- #
# wyvernzora.ninja build script. #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Copyright © 2015 PI:NAME:<NAME>END_PI #
# See LICENSE.md for terms of distribution. #
# --------------------------------------------------------------------------- #
module.exports = (gulp, config) ->
gulp.task 'client:clean', ->
del = require 'del'
vinyl = require 'vinyl-paths'
gulp.src './dist/app', read: no
.pipe vinyl del
|
[
{
"context": ".get()\nPlaylist = require './playlist'\n\nKEY_F12 = 123\n\nmodule.exports = (angular, document, Notificatio",
"end": 188,
"score": 0.7704259753227234,
"start": 185,
"tag": "KEY",
"value": "123"
}
] | src/player.coffee | hendricha/tube-sound | 0 | fs = require 'fs'
$ = require 'jQuery'
youtubeVideo = require 'youtube-video'
window = global.window.nwDispatcher.requireNwGui().Window.get()
Playlist = require './playlist'
KEY_F12 = 123
module.exports = (angular, document, Notification) ->
global.document = document
$(document).on 'keyup', (event) ->
return unless event.keyCode is 123
event.stopPropagation()
window.showDevTools()
player = angular.module 'player', []
player.controller 'controlsController', ($scope, $rootScope) ->
setState = (state) ->
$scope.state = state
$scope.$evalAsync()
videoPlayer = null
durationInterval = null
videoPlayerOptions =
selector: true
elementId: 'videoPlayer'
width: 1
height: 1
autoplay: true
onPlay: -> setState 'playing'
onPause: -> setState 'paused'
onEnd: -> $scope.next()
$scope.state = 'paused'
$scope.shuffle = no
$scope.repeat = no
$scope.videoLength = 0
$scope.timeElapsed = 0
$scope.progress = 0
$scope.filter = ''
$scope.toggleAdd = ->
$rootScope.$broadcast 'requestToggleAdd'
$scope.toggleShuffle = ->
$scope.shuffle = not $scope.shuffle
$rootScope.$broadcast 'requestShuffleChange', $scope.shuffle
$scope.toggleRepeat = ->
$scope.repeat = not $scope.repeat
$rootScope.$broadcast 'requestRepeatChange', $scope.repeat
$scope.calculateTime = require './calculateTime'
setProgress = (progress) ->
$('.progress .bar').css width: "#{progress * 100}%"
$rootScope.$on 'selectedSong', (event, song) ->
window.title = song.title
setState 'loading'
$('#videoPlayer').remove()
$('#controls').append '<div id="videoPlayer"></div>'
youtubeVideo song.YTId, videoPlayerOptions, (err, player) ->
return console.log err if err
notification = new Notification "Tube Sound",
icon: "http://img.youtube.com/vi/#{ song.YTId }/default.jpg"
body: song.title
$scope.videoLength = player.getDuration()
durationInterval = setInterval ->
$scope.timeElapsed = videoPlayer.getCurrentTime()
setProgress $scope.timeElapsed / $scope.videoLength
$scope.$apply()
, 100
videoPlayer = player
$scope.jump = ($event) ->
videoPlayer?.seekTo parseInt $event.pageX / $(document).width() * $scope.videoLength
$scope.playPause = ->
switch $scope.state
when 'paused' then videoPlayer?.playVideo()
when 'playing' then videoPlayer?.pauseVideo()
$scope.add = require './add'
$scope.previous = -> $rootScope.$broadcast 'requestPreviousSong'
$scope.next = -> $rootScope.$broadcast 'requestNextSong'
$scope.setFilter = -> $rootScope.$broadcast 'requestFilter', $scope.filter
player.controller 'playlistController', ($scope, $rootScope) ->
$scope.playlist = new Playlist $rootScope
$rootScope.$on 'playlistReloaded', -> $scope.$evalAsync()
$rootScope.$on 'requestPreviousSong', -> $scope.playlist.playPreviousSong()
$rootScope.$on 'requestNextSong', -> $scope.playlist.playNextSong()
$rootScope.$on 'requestRepeatChange', (event, repeat) ->
$scope.playlist.repeat = repeat
$rootScope.$on 'requestShuffleChange', (event, shuffle) ->
$scope.playlist.shuffle = shuffle
$rootScope.$on 'requestFilter', (event, filter) ->
$scope.playlist.setFilter filter
player.controller 'addController', require './add'
player.controller 'confirmController', require('./confirm').controller
| 51677 | fs = require 'fs'
$ = require 'jQuery'
youtubeVideo = require 'youtube-video'
window = global.window.nwDispatcher.requireNwGui().Window.get()
Playlist = require './playlist'
KEY_F12 = <KEY>
module.exports = (angular, document, Notification) ->
global.document = document
$(document).on 'keyup', (event) ->
return unless event.keyCode is 123
event.stopPropagation()
window.showDevTools()
player = angular.module 'player', []
player.controller 'controlsController', ($scope, $rootScope) ->
setState = (state) ->
$scope.state = state
$scope.$evalAsync()
videoPlayer = null
durationInterval = null
videoPlayerOptions =
selector: true
elementId: 'videoPlayer'
width: 1
height: 1
autoplay: true
onPlay: -> setState 'playing'
onPause: -> setState 'paused'
onEnd: -> $scope.next()
$scope.state = 'paused'
$scope.shuffle = no
$scope.repeat = no
$scope.videoLength = 0
$scope.timeElapsed = 0
$scope.progress = 0
$scope.filter = ''
$scope.toggleAdd = ->
$rootScope.$broadcast 'requestToggleAdd'
$scope.toggleShuffle = ->
$scope.shuffle = not $scope.shuffle
$rootScope.$broadcast 'requestShuffleChange', $scope.shuffle
$scope.toggleRepeat = ->
$scope.repeat = not $scope.repeat
$rootScope.$broadcast 'requestRepeatChange', $scope.repeat
$scope.calculateTime = require './calculateTime'
setProgress = (progress) ->
$('.progress .bar').css width: "#{progress * 100}%"
$rootScope.$on 'selectedSong', (event, song) ->
window.title = song.title
setState 'loading'
$('#videoPlayer').remove()
$('#controls').append '<div id="videoPlayer"></div>'
youtubeVideo song.YTId, videoPlayerOptions, (err, player) ->
return console.log err if err
notification = new Notification "Tube Sound",
icon: "http://img.youtube.com/vi/#{ song.YTId }/default.jpg"
body: song.title
$scope.videoLength = player.getDuration()
durationInterval = setInterval ->
$scope.timeElapsed = videoPlayer.getCurrentTime()
setProgress $scope.timeElapsed / $scope.videoLength
$scope.$apply()
, 100
videoPlayer = player
$scope.jump = ($event) ->
videoPlayer?.seekTo parseInt $event.pageX / $(document).width() * $scope.videoLength
$scope.playPause = ->
switch $scope.state
when 'paused' then videoPlayer?.playVideo()
when 'playing' then videoPlayer?.pauseVideo()
$scope.add = require './add'
$scope.previous = -> $rootScope.$broadcast 'requestPreviousSong'
$scope.next = -> $rootScope.$broadcast 'requestNextSong'
$scope.setFilter = -> $rootScope.$broadcast 'requestFilter', $scope.filter
player.controller 'playlistController', ($scope, $rootScope) ->
$scope.playlist = new Playlist $rootScope
$rootScope.$on 'playlistReloaded', -> $scope.$evalAsync()
$rootScope.$on 'requestPreviousSong', -> $scope.playlist.playPreviousSong()
$rootScope.$on 'requestNextSong', -> $scope.playlist.playNextSong()
$rootScope.$on 'requestRepeatChange', (event, repeat) ->
$scope.playlist.repeat = repeat
$rootScope.$on 'requestShuffleChange', (event, shuffle) ->
$scope.playlist.shuffle = shuffle
$rootScope.$on 'requestFilter', (event, filter) ->
$scope.playlist.setFilter filter
player.controller 'addController', require './add'
player.controller 'confirmController', require('./confirm').controller
| true | fs = require 'fs'
$ = require 'jQuery'
youtubeVideo = require 'youtube-video'
window = global.window.nwDispatcher.requireNwGui().Window.get()
Playlist = require './playlist'
KEY_F12 = PI:KEY:<KEY>END_PI
module.exports = (angular, document, Notification) ->
global.document = document
$(document).on 'keyup', (event) ->
return unless event.keyCode is 123
event.stopPropagation()
window.showDevTools()
player = angular.module 'player', []
player.controller 'controlsController', ($scope, $rootScope) ->
setState = (state) ->
$scope.state = state
$scope.$evalAsync()
videoPlayer = null
durationInterval = null
videoPlayerOptions =
selector: true
elementId: 'videoPlayer'
width: 1
height: 1
autoplay: true
onPlay: -> setState 'playing'
onPause: -> setState 'paused'
onEnd: -> $scope.next()
$scope.state = 'paused'
$scope.shuffle = no
$scope.repeat = no
$scope.videoLength = 0
$scope.timeElapsed = 0
$scope.progress = 0
$scope.filter = ''
$scope.toggleAdd = ->
$rootScope.$broadcast 'requestToggleAdd'
$scope.toggleShuffle = ->
$scope.shuffle = not $scope.shuffle
$rootScope.$broadcast 'requestShuffleChange', $scope.shuffle
$scope.toggleRepeat = ->
$scope.repeat = not $scope.repeat
$rootScope.$broadcast 'requestRepeatChange', $scope.repeat
$scope.calculateTime = require './calculateTime'
setProgress = (progress) ->
$('.progress .bar').css width: "#{progress * 100}%"
$rootScope.$on 'selectedSong', (event, song) ->
window.title = song.title
setState 'loading'
$('#videoPlayer').remove()
$('#controls').append '<div id="videoPlayer"></div>'
youtubeVideo song.YTId, videoPlayerOptions, (err, player) ->
return console.log err if err
notification = new Notification "Tube Sound",
icon: "http://img.youtube.com/vi/#{ song.YTId }/default.jpg"
body: song.title
$scope.videoLength = player.getDuration()
durationInterval = setInterval ->
$scope.timeElapsed = videoPlayer.getCurrentTime()
setProgress $scope.timeElapsed / $scope.videoLength
$scope.$apply()
, 100
videoPlayer = player
$scope.jump = ($event) ->
videoPlayer?.seekTo parseInt $event.pageX / $(document).width() * $scope.videoLength
$scope.playPause = ->
switch $scope.state
when 'paused' then videoPlayer?.playVideo()
when 'playing' then videoPlayer?.pauseVideo()
$scope.add = require './add'
$scope.previous = -> $rootScope.$broadcast 'requestPreviousSong'
$scope.next = -> $rootScope.$broadcast 'requestNextSong'
$scope.setFilter = -> $rootScope.$broadcast 'requestFilter', $scope.filter
player.controller 'playlistController', ($scope, $rootScope) ->
$scope.playlist = new Playlist $rootScope
$rootScope.$on 'playlistReloaded', -> $scope.$evalAsync()
$rootScope.$on 'requestPreviousSong', -> $scope.playlist.playPreviousSong()
$rootScope.$on 'requestNextSong', -> $scope.playlist.playNextSong()
$rootScope.$on 'requestRepeatChange', (event, repeat) ->
$scope.playlist.repeat = repeat
$rootScope.$on 'requestShuffleChange', (event, shuffle) ->
$scope.playlist.shuffle = shuffle
$rootScope.$on 'requestFilter', (event, filter) ->
$scope.playlist.setFilter filter
player.controller 'addController', require './add'
player.controller 'confirmController', require('./confirm').controller
|
[
{
"context": "# Copyright (c) 2013 Rod Vagg, MIT License\n# Copyright (c) 2014 Riceball LEE, M",
"end": 29,
"score": 0.9998617172241211,
"start": 21,
"tag": "NAME",
"value": "Rod Vagg"
},
{
"context": "c) 2013 Rod Vagg, MIT License\n# Copyright (c) 2014 Riceball LEE, MIT License\nxte... | src/abstract-iterator.coffee | snowyu/node-abstract-iterator | 0 | # Copyright (c) 2013 Rod Vagg, MIT License
# Copyright (c) 2014 Riceball LEE, MIT License
xtend = require("xtend")
minimatch = require('minimatch')
Errors = require('abstract-error')
consts = require('./consts')
inherits = require("inherits-ex")
isArray = require("util-ex/lib/is/type/array")
isString = require("util-ex/lib/is/type/string")
isFunction = require("util-ex/lib/is/type/function")
isBuffer = require("util-ex/lib/is/type/buffer")
AbstractError = Errors.AbstractError
NotImplementedError = Errors.NotImplementedError
NotFoundError = Errors.NotFoundError
InvalidArgumentError = Errors.InvalidArgumentError
createError = Errors.createError
AlreadyEndError = createError("AlreadyEnd", 0x53)
AlreadyRunError = createError("AlreadyRun", 0x54)
FILTER_INCLUDED = consts.FILTER_INCLUDED
FILTER_EXCLUDED = consts.FILTER_EXCLUDED
FILTER_STOPPED = consts.FILTER_STOPPED
Errors.AlreadyEndError = AlreadyEndError
Errors.AlreadyRunError = AlreadyRunError
module.exports = class AbstractIterator
@AlreadyEndError: AlreadyEndError
@AlreadyRunError: AlreadyRunError
constructor: (@db, options) ->
@_ended = false
@_nexting = false
@options = @initOptions(options)
options = @options
isKeysIterator = options and isArray options.range
if isKeysIterator
@_resultOfKeys = options.range
@_indexOfKeys = -1
return not isKeysIterator
initOptions: (options)->
options = xtend(options)
options.reverse = !!options.reverse
range = options.range
if isString(range)
range = range.trim()
if range.length >= 2
skipStart = if !options.reverse then range[0] is "(" else range[range.length-1] is ")"
skipEnd = if !options.reverse then range[range.length-1] is ")" else range[0] is "("
range = range.substring(1, range.length-1)
range = range.split(",").map (item)->
item = item.trim()
item = null if item is ""
return item
if !options.reverse
[start,end] = range
startOp = 'gt'
endOp = 'lt'
else
[end, start] = range
startOp = 'lt'
endOp = 'gt'
startOp = startOp + 'e' unless skipStart
endOp = endOp + 'e' unless skipEnd
options[startOp] = start
options[endOp] = end
options.keys = options.keys isnt false
options.values = options.values isnt false
options.limit = (if "limit" of options then options.limit else -1)
options.keyAsBuffer = options.keyAsBuffer is true
options.valueAsBuffer = options.valueAsBuffer is true
if options.next
if options.reverse isnt true
options.gt = options.next
options.gte= options.next
else
options.lt = options.next
options.lte= options.next
["start", "end", "gt", "gte", "lt", "lte"].forEach (o) ->
if options[o] and isBuffer(options[o]) and options[o].length is 0
delete options[o]
if options.keys and isString(options.match) and options.match.length > 0
@match = (item)->
minimatch(item[0], options.match)
if isFunction(options.filter)
@filter = (item)->
options.filter item[0], item[1]
@encodeOptions options
options
encodeOptions: (options)->
decodeResult: (result)->
_next: (callback) ->
self = this
if @_nextSync
setImmediate ->
try
result = self._nextSync()
self._nexting = false
catch e
self._nexting = false
callback e
return
if result
callback null, result[0], result[1]
else
callback()
return
else
setImmediate ->
self._nexting = false
callback()
return
_end: (callback) ->
self = this
if @_endSync
setImmediate ->
try
result = self._endSync()
callback null, result
catch e
callback e
else
setImmediate ->
callback()
nextKeysSync: ->
@_nexting = true
if @_indexOfKeys is -1
@_resultOfKeys = @db._mGetSync @_resultOfKeys, @options
@_indexOfKeys++
result = @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
if result
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
result =
key: result[0]
value: result[1]
@_nexting = false
return result
nextSync: ->
return throw new AlreadyEndError("cannot call next() after end()") if @_ended
return throw new AlreadyRunError("cannot call next() before previous next() has completed") if @_nexting
return false if @_filterStopped
if @_indexOfKeys?
return @nextKeysSync()
else if @_nextSync
@_nexting = true
result = @_nextSync()
if result isnt false
@decodeResult result
if @filter then switch @filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
@_nexting = false
@nextSync()
return
when FILTER_STOPPED #halt
@_filterStopped = true
if @match and not @match(result)
@_nexting = false
@nextSync()
return
result =
key: result[0]
value: result[1]
@last = result[0]
@_nexting = false
return result
else
throw new NotImplementedError()
_endKeys: ->
delete @_resultOfKeys
@_indexOfKeys = -2
# @_ended = true
freeSync: ->
if @_indexOfKeys?
@_endKeys()
if @_endSync
@_ended = true
return @_endSync()
else
throw new NotImplementedError()
endSync: @::freeSync
nextKeys: (callback) ->
@_nexting = true
if @_indexOfKeys is -1
self = this
@db._mGet @_resultOfKeys, @options, (err, arr)->
self._nexting = false
return callback(err) if err
self._resultOfKeys = arr
self._indexOfKeys++
self.next(callback)
return @
else if @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
@_nexting = false
else
result = false
@_nexting = false
if result is false
callback()
else
callback(undefined, result[0], result[1])
@
next: (callback) ->
throw new InvalidArgumentError("next() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("cannot call next() after end()")) if @_ended
return callback(new AlreadyRunError("cannot call next() before previous next() has completed")) if @_nexting
return callback() if @_filterStopped
if @_indexOfKeys?
@nextKeys callback
else
@_nexting = true
self = this
@_next (err, key, value)->
self._nexting = false
if !err and (key? or value?)
result = [key, value]
self.decodeResult result
if self.filter then switch self.filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
self.next callback
return
when FILTER_STOPPED #halt
self._filterStopped = true
if self.match and not self.match(result)
self.next callback
return
key = result[0]
value = result[1]
self.last = result[0]
callback.apply null, arguments
@
free: (callback) ->
throw new InvalidArgumentError("end() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("end() already called on iterator")) if @_ended
if @_indexOfKeys?
@_endKeys()
@_ended = true
@_end callback
end: @::free
| 73178 | # Copyright (c) 2013 <NAME>, MIT License
# Copyright (c) 2014 <NAME>, MIT License
xtend = require("xtend")
minimatch = require('minimatch')
Errors = require('abstract-error')
consts = require('./consts')
inherits = require("inherits-ex")
isArray = require("util-ex/lib/is/type/array")
isString = require("util-ex/lib/is/type/string")
isFunction = require("util-ex/lib/is/type/function")
isBuffer = require("util-ex/lib/is/type/buffer")
AbstractError = Errors.AbstractError
NotImplementedError = Errors.NotImplementedError
NotFoundError = Errors.NotFoundError
InvalidArgumentError = Errors.InvalidArgumentError
createError = Errors.createError
AlreadyEndError = createError("AlreadyEnd", 0x53)
AlreadyRunError = createError("AlreadyRun", 0x54)
FILTER_INCLUDED = consts.FILTER_INCLUDED
FILTER_EXCLUDED = consts.FILTER_EXCLUDED
FILTER_STOPPED = consts.FILTER_STOPPED
Errors.AlreadyEndError = AlreadyEndError
Errors.AlreadyRunError = AlreadyRunError
module.exports = class AbstractIterator
@AlreadyEndError: AlreadyEndError
@AlreadyRunError: AlreadyRunError
constructor: (@db, options) ->
@_ended = false
@_nexting = false
@options = @initOptions(options)
options = @options
isKeysIterator = options and isArray options.range
if isKeysIterator
@_resultOfKeys = options.range
@_indexOfKeys = -1
return not isKeysIterator
initOptions: (options)->
options = xtend(options)
options.reverse = !!options.reverse
range = options.range
if isString(range)
range = range.trim()
if range.length >= 2
skipStart = if !options.reverse then range[0] is "(" else range[range.length-1] is ")"
skipEnd = if !options.reverse then range[range.length-1] is ")" else range[0] is "("
range = range.substring(1, range.length-1)
range = range.split(",").map (item)->
item = item.trim()
item = null if item is ""
return item
if !options.reverse
[start,end] = range
startOp = 'gt'
endOp = 'lt'
else
[end, start] = range
startOp = 'lt'
endOp = 'gt'
startOp = startOp + 'e' unless skipStart
endOp = endOp + 'e' unless skipEnd
options[startOp] = start
options[endOp] = end
options.keys = options.keys isnt false
options.values = options.values isnt false
options.limit = (if "limit" of options then options.limit else -1)
options.keyAsBuffer = options.keyAsBuffer is true
options.valueAsBuffer = options.valueAsBuffer is true
if options.next
if options.reverse isnt true
options.gt = options.next
options.gte= options.next
else
options.lt = options.next
options.lte= options.next
["start", "end", "gt", "gte", "lt", "lte"].forEach (o) ->
if options[o] and isBuffer(options[o]) and options[o].length is 0
delete options[o]
if options.keys and isString(options.match) and options.match.length > 0
@match = (item)->
minimatch(item[0], options.match)
if isFunction(options.filter)
@filter = (item)->
options.filter item[0], item[1]
@encodeOptions options
options
encodeOptions: (options)->
decodeResult: (result)->
_next: (callback) ->
self = this
if @_nextSync
setImmediate ->
try
result = self._nextSync()
self._nexting = false
catch e
self._nexting = false
callback e
return
if result
callback null, result[0], result[1]
else
callback()
return
else
setImmediate ->
self._nexting = false
callback()
return
_end: (callback) ->
self = this
if @_endSync
setImmediate ->
try
result = self._endSync()
callback null, result
catch e
callback e
else
setImmediate ->
callback()
nextKeysSync: ->
@_nexting = true
if @_indexOfKeys is -1
@_resultOfKeys = @db._mGetSync @_resultOfKeys, @options
@_indexOfKeys++
result = @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
if result
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
result =
key: result[0]
value: result[1]
@_nexting = false
return result
nextSync: ->
return throw new AlreadyEndError("cannot call next() after end()") if @_ended
return throw new AlreadyRunError("cannot call next() before previous next() has completed") if @_nexting
return false if @_filterStopped
if @_indexOfKeys?
return @nextKeysSync()
else if @_nextSync
@_nexting = true
result = @_nextSync()
if result isnt false
@decodeResult result
if @filter then switch @filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
@_nexting = false
@nextSync()
return
when FILTER_STOPPED #halt
@_filterStopped = true
if @match and not @match(result)
@_nexting = false
@nextSync()
return
result =
key: result[0]
value: result[1]
@last = result[0]
@_nexting = false
return result
else
throw new NotImplementedError()
_endKeys: ->
delete @_resultOfKeys
@_indexOfKeys = -2
# @_ended = true
freeSync: ->
if @_indexOfKeys?
@_endKeys()
if @_endSync
@_ended = true
return @_endSync()
else
throw new NotImplementedError()
endSync: @::freeSync
nextKeys: (callback) ->
@_nexting = true
if @_indexOfKeys is -1
self = this
@db._mGet @_resultOfKeys, @options, (err, arr)->
self._nexting = false
return callback(err) if err
self._resultOfKeys = arr
self._indexOfKeys++
self.next(callback)
return @
else if @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
@_nexting = false
else
result = false
@_nexting = false
if result is false
callback()
else
callback(undefined, result[0], result[1])
@
next: (callback) ->
throw new InvalidArgumentError("next() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("cannot call next() after end()")) if @_ended
return callback(new AlreadyRunError("cannot call next() before previous next() has completed")) if @_nexting
return callback() if @_filterStopped
if @_indexOfKeys?
@nextKeys callback
else
@_nexting = true
self = this
@_next (err, key, value)->
self._nexting = false
if !err and (key? or value?)
result = [key, value]
self.decodeResult result
if self.filter then switch self.filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
self.next callback
return
when FILTER_STOPPED #halt
self._filterStopped = true
if self.match and not self.match(result)
self.next callback
return
key = result[0]
value = result[1]
self.last = result[0]
callback.apply null, arguments
@
free: (callback) ->
throw new InvalidArgumentError("end() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("end() already called on iterator")) if @_ended
if @_indexOfKeys?
@_endKeys()
@_ended = true
@_end callback
end: @::free
| true | # Copyright (c) 2013 PI:NAME:<NAME>END_PI, MIT License
# Copyright (c) 2014 PI:NAME:<NAME>END_PI, MIT License
xtend = require("xtend")
minimatch = require('minimatch')
Errors = require('abstract-error')
consts = require('./consts')
inherits = require("inherits-ex")
isArray = require("util-ex/lib/is/type/array")
isString = require("util-ex/lib/is/type/string")
isFunction = require("util-ex/lib/is/type/function")
isBuffer = require("util-ex/lib/is/type/buffer")
AbstractError = Errors.AbstractError
NotImplementedError = Errors.NotImplementedError
NotFoundError = Errors.NotFoundError
InvalidArgumentError = Errors.InvalidArgumentError
createError = Errors.createError
AlreadyEndError = createError("AlreadyEnd", 0x53)
AlreadyRunError = createError("AlreadyRun", 0x54)
FILTER_INCLUDED = consts.FILTER_INCLUDED
FILTER_EXCLUDED = consts.FILTER_EXCLUDED
FILTER_STOPPED = consts.FILTER_STOPPED
Errors.AlreadyEndError = AlreadyEndError
Errors.AlreadyRunError = AlreadyRunError
module.exports = class AbstractIterator
@AlreadyEndError: AlreadyEndError
@AlreadyRunError: AlreadyRunError
constructor: (@db, options) ->
@_ended = false
@_nexting = false
@options = @initOptions(options)
options = @options
isKeysIterator = options and isArray options.range
if isKeysIterator
@_resultOfKeys = options.range
@_indexOfKeys = -1
return not isKeysIterator
initOptions: (options)->
options = xtend(options)
options.reverse = !!options.reverse
range = options.range
if isString(range)
range = range.trim()
if range.length >= 2
skipStart = if !options.reverse then range[0] is "(" else range[range.length-1] is ")"
skipEnd = if !options.reverse then range[range.length-1] is ")" else range[0] is "("
range = range.substring(1, range.length-1)
range = range.split(",").map (item)->
item = item.trim()
item = null if item is ""
return item
if !options.reverse
[start,end] = range
startOp = 'gt'
endOp = 'lt'
else
[end, start] = range
startOp = 'lt'
endOp = 'gt'
startOp = startOp + 'e' unless skipStart
endOp = endOp + 'e' unless skipEnd
options[startOp] = start
options[endOp] = end
options.keys = options.keys isnt false
options.values = options.values isnt false
options.limit = (if "limit" of options then options.limit else -1)
options.keyAsBuffer = options.keyAsBuffer is true
options.valueAsBuffer = options.valueAsBuffer is true
if options.next
if options.reverse isnt true
options.gt = options.next
options.gte= options.next
else
options.lt = options.next
options.lte= options.next
["start", "end", "gt", "gte", "lt", "lte"].forEach (o) ->
if options[o] and isBuffer(options[o]) and options[o].length is 0
delete options[o]
if options.keys and isString(options.match) and options.match.length > 0
@match = (item)->
minimatch(item[0], options.match)
if isFunction(options.filter)
@filter = (item)->
options.filter item[0], item[1]
@encodeOptions options
options
encodeOptions: (options)->
decodeResult: (result)->
_next: (callback) ->
self = this
if @_nextSync
setImmediate ->
try
result = self._nextSync()
self._nexting = false
catch e
self._nexting = false
callback e
return
if result
callback null, result[0], result[1]
else
callback()
return
else
setImmediate ->
self._nexting = false
callback()
return
_end: (callback) ->
self = this
if @_endSync
setImmediate ->
try
result = self._endSync()
callback null, result
catch e
callback e
else
setImmediate ->
callback()
nextKeysSync: ->
@_nexting = true
if @_indexOfKeys is -1
@_resultOfKeys = @db._mGetSync @_resultOfKeys, @options
@_indexOfKeys++
result = @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
if result
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
result =
key: result[0]
value: result[1]
@_nexting = false
return result
nextSync: ->
return throw new AlreadyEndError("cannot call next() after end()") if @_ended
return throw new AlreadyRunError("cannot call next() before previous next() has completed") if @_nexting
return false if @_filterStopped
if @_indexOfKeys?
return @nextKeysSync()
else if @_nextSync
@_nexting = true
result = @_nextSync()
if result isnt false
@decodeResult result
if @filter then switch @filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
@_nexting = false
@nextSync()
return
when FILTER_STOPPED #halt
@_filterStopped = true
if @match and not @match(result)
@_nexting = false
@nextSync()
return
result =
key: result[0]
value: result[1]
@last = result[0]
@_nexting = false
return result
else
throw new NotImplementedError()
_endKeys: ->
delete @_resultOfKeys
@_indexOfKeys = -2
# @_ended = true
freeSync: ->
if @_indexOfKeys?
@_endKeys()
if @_endSync
@_ended = true
return @_endSync()
else
throw new NotImplementedError()
endSync: @::freeSync
nextKeys: (callback) ->
@_nexting = true
if @_indexOfKeys is -1
self = this
@db._mGet @_resultOfKeys, @options, (err, arr)->
self._nexting = false
return callback(err) if err
self._resultOfKeys = arr
self._indexOfKeys++
self.next(callback)
return @
else if @_indexOfKeys >= 0 and @_indexOfKeys < @_resultOfKeys.length
result = @_resultOfKeys.slice(@_indexOfKeys, @_indexOfKeys+=2)
@decodeResult result
@_nexting = false
else
result = false
@_nexting = false
if result is false
callback()
else
callback(undefined, result[0], result[1])
@
next: (callback) ->
throw new InvalidArgumentError("next() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("cannot call next() after end()")) if @_ended
return callback(new AlreadyRunError("cannot call next() before previous next() has completed")) if @_nexting
return callback() if @_filterStopped
if @_indexOfKeys?
@nextKeys callback
else
@_nexting = true
self = this
@_next (err, key, value)->
self._nexting = false
if !err and (key? or value?)
result = [key, value]
self.decodeResult result
if self.filter then switch self.filter(result)
when FILTER_EXCLUDED
# skip this and read the next.
self.next callback
return
when FILTER_STOPPED #halt
self._filterStopped = true
if self.match and not self.match(result)
self.next callback
return
key = result[0]
value = result[1]
self.last = result[0]
callback.apply null, arguments
@
free: (callback) ->
throw new InvalidArgumentError("end() requires a callback argument") unless typeof callback is "function"
return callback(new AlreadyEndError("end() already called on iterator")) if @_ended
if @_indexOfKeys?
@_endKeys()
@_ended = true
@_end callback
end: @::free
|
[
{
"context": " expect(error).to.be.eql({\n key: 'private-space-members',\n values: {\n max_membe",
"end": 4351,
"score": 0.9986604452133179,
"start": 4330,
"tag": "KEY",
"value": "private-space-members"
},
{
"context": " expect(error).to... | app/modules/projects/create/import/import-project.service.spec.coffee | threefoldtech/Threefold-Circles-front | 0 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: projects/create/import/import-project.service.spec.coffee
###
describe "tgImportProjectService", ->
$provide = null
importProjectService = null
mocks = {}
_mockCurrentUserService = ->
mocks.currentUserService = {
loadProjects: sinon.stub(),
getUser: sinon.stub(),
canCreatePrivateProjects: sinon.stub(),
canCreatePublicProjects: sinon.stub()
}
$provide.value("tgCurrentUserService", mocks.currentUserService)
_mockAuth = ->
mocks.auth = {
refresh: sinon.stub()
}
$provide.value("$tgAuth", mocks.auth)
_mockLightboxFactory = ->
mocks.lightboxFactory = {
create: sinon.stub()
}
$provide.value("tgLightboxFactory", mocks.lightboxFactory)
_mockTranslate = ->
mocks.translate = {
instant: sinon.stub()
}
$provide.value("$translate", mocks.translate)
_mockConfirm = ->
mocks.confirm = {
success: sinon.stub(),
notify: sinon.stub()
}
$provide.value("$tgConfirm", mocks.confirm)
_mockLocation = ->
mocks.location = {
path: sinon.stub()
}
$provide.value("$location", mocks.location)
_mockNavUrls = ->
mocks.navUrls = {
resolve: sinon.stub()
}
$provide.value("$tgNavUrls", mocks.navUrls)
_mocks = ->
module (_$provide_) ->
$provide = _$provide_
_mockCurrentUserService()
_mockAuth()
_mockLightboxFactory()
_mockTranslate()
_mockConfirm()
_mockLocation()
_mockNavUrls()
return null
_inject = ->
inject (_tgImportProjectService_) ->
importProjectService = _tgImportProjectService_
_setup = ->
_mocks()
_inject()
beforeEach ->
module "taigaProjects"
_setup()
it "import success async mode", (done) ->
result = {
status: 202,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('xxx')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.confirm.success).have.been.calledOnce
done()
it "import success sync mode", (done) ->
result = {
status: 201,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('msg')
mocks.navUrls.resolve.withArgs('project-admin-project-profile-details', {project: 'project-slug'}).returns('url')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.location.path).have.been.calledWith('url')
expect(mocks.confirm.notify).have.been.calledWith('success', 'msg')
done()
it "private get restriction errors, private & member error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'private-space-members',
values: {
max_memberships: 1,
members: 10
}
})
it "private get restriction errors, private limit error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 20
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'private-space',
values: {
max_memberships: null,
members: 10
}
})
it "private get restriction errors, members error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'private-members',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public & member error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'public-space-members',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public limit error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 20
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'public-space',
values: {
max_memberships: null,
members: 10
}
})
it "public get restriction errors, members error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'public-members',
values: {
max_memberships: 1,
members: 10
}
})
| 52362 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: projects/create/import/import-project.service.spec.coffee
###
describe "tgImportProjectService", ->
$provide = null
importProjectService = null
mocks = {}
_mockCurrentUserService = ->
mocks.currentUserService = {
loadProjects: sinon.stub(),
getUser: sinon.stub(),
canCreatePrivateProjects: sinon.stub(),
canCreatePublicProjects: sinon.stub()
}
$provide.value("tgCurrentUserService", mocks.currentUserService)
_mockAuth = ->
mocks.auth = {
refresh: sinon.stub()
}
$provide.value("$tgAuth", mocks.auth)
_mockLightboxFactory = ->
mocks.lightboxFactory = {
create: sinon.stub()
}
$provide.value("tgLightboxFactory", mocks.lightboxFactory)
_mockTranslate = ->
mocks.translate = {
instant: sinon.stub()
}
$provide.value("$translate", mocks.translate)
_mockConfirm = ->
mocks.confirm = {
success: sinon.stub(),
notify: sinon.stub()
}
$provide.value("$tgConfirm", mocks.confirm)
_mockLocation = ->
mocks.location = {
path: sinon.stub()
}
$provide.value("$location", mocks.location)
_mockNavUrls = ->
mocks.navUrls = {
resolve: sinon.stub()
}
$provide.value("$tgNavUrls", mocks.navUrls)
_mocks = ->
module (_$provide_) ->
$provide = _$provide_
_mockCurrentUserService()
_mockAuth()
_mockLightboxFactory()
_mockTranslate()
_mockConfirm()
_mockLocation()
_mockNavUrls()
return null
_inject = ->
inject (_tgImportProjectService_) ->
importProjectService = _tgImportProjectService_
_setup = ->
_mocks()
_inject()
beforeEach ->
module "taigaProjects"
_setup()
it "import success async mode", (done) ->
result = {
status: 202,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('xxx')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.confirm.success).have.been.calledOnce
done()
it "import success sync mode", (done) ->
result = {
status: 201,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('msg')
mocks.navUrls.resolve.withArgs('project-admin-project-profile-details', {project: 'project-slug'}).returns('url')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.location.path).have.been.calledWith('url')
expect(mocks.confirm.notify).have.been.calledWith('success', 'msg')
done()
it "private get restriction errors, private & member error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: 1,
members: 10
}
})
it "private get restriction errors, private limit error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 20
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: null,
members: 10
}
})
it "private get restriction errors, members error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public & member error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public limit error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 20
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: null,
members: 10
}
})
it "public get restriction errors, members error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: '<KEY>',
values: {
max_memberships: 1,
members: 10
}
})
| true | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: projects/create/import/import-project.service.spec.coffee
###
describe "tgImportProjectService", ->
$provide = null
importProjectService = null
mocks = {}
_mockCurrentUserService = ->
mocks.currentUserService = {
loadProjects: sinon.stub(),
getUser: sinon.stub(),
canCreatePrivateProjects: sinon.stub(),
canCreatePublicProjects: sinon.stub()
}
$provide.value("tgCurrentUserService", mocks.currentUserService)
_mockAuth = ->
mocks.auth = {
refresh: sinon.stub()
}
$provide.value("$tgAuth", mocks.auth)
_mockLightboxFactory = ->
mocks.lightboxFactory = {
create: sinon.stub()
}
$provide.value("tgLightboxFactory", mocks.lightboxFactory)
_mockTranslate = ->
mocks.translate = {
instant: sinon.stub()
}
$provide.value("$translate", mocks.translate)
_mockConfirm = ->
mocks.confirm = {
success: sinon.stub(),
notify: sinon.stub()
}
$provide.value("$tgConfirm", mocks.confirm)
_mockLocation = ->
mocks.location = {
path: sinon.stub()
}
$provide.value("$location", mocks.location)
_mockNavUrls = ->
mocks.navUrls = {
resolve: sinon.stub()
}
$provide.value("$tgNavUrls", mocks.navUrls)
_mocks = ->
module (_$provide_) ->
$provide = _$provide_
_mockCurrentUserService()
_mockAuth()
_mockLightboxFactory()
_mockTranslate()
_mockConfirm()
_mockLocation()
_mockNavUrls()
return null
_inject = ->
inject (_tgImportProjectService_) ->
importProjectService = _tgImportProjectService_
_setup = ->
_mocks()
_inject()
beforeEach ->
module "taigaProjects"
_setup()
it "import success async mode", (done) ->
result = {
status: 202,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('xxx')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.confirm.success).have.been.calledOnce
done()
it "import success sync mode", (done) ->
result = {
status: 201,
data: {
slug: 'project-slug'
}
}
mocks.translate.instant.returns('msg')
mocks.navUrls.resolve.withArgs('project-admin-project-profile-details', {project: 'project-slug'}).returns('url')
mocks.currentUserService.loadProjects.promise().resolve()
importProjectService.importSuccess(result).then () ->
expect(mocks.location.path).have.been.calledWith('url')
expect(mocks.confirm.notify).have.been.calledWith('success', 'msg')
done()
it "private get restriction errors, private & member error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: 1,
members: 10
}
})
it "private get restriction errors, private limit error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 20
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: null,
members: 10
}
})
it "private get restriction errors, members error", () ->
result = {
headers: {
isPrivate: true,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_private_projects: 1
}))
mocks.currentUserService.canCreatePrivateProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public & member error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: 1,
members: 10
}
})
it "public get restriction errors, public limit error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 20
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: false
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: null,
members: 10
}
})
it "public get restriction errors, members error", () ->
result = {
headers: {
isPrivate: false,
memberships: 10
}
}
mocks.currentUserService.getUser.returns(Immutable.fromJS({
max_memberships_public_projects: 1
}))
mocks.currentUserService.canCreatePublicProjects.returns({
valid: true
})
error = importProjectService.getRestrictionError(result)
expect(error).to.be.eql({
key: 'PI:KEY:<KEY>END_PI',
values: {
max_memberships: 1,
members: 10
}
})
|
[
{
"context": "export default\n name: 'Neptune'\n type: 'planet'\n radius: 24624\n mass: 102.41e",
"end": 31,
"score": 0.9988413453102112,
"start": 24,
"tag": "NAME",
"value": "Neptune"
},
{
"context": ".00508664}\n satellites:\n triton:\n name: 'Triton'\n type: 'moo... | src/data/bodies/neptune.coffee | skepticalimagination/solaris-model | 5 | export default
name: 'Neptune'
type: 'planet'
radius: 24624
mass: 102.41e24
tilt: 29.56
elements:
format: 'jpl-1800-2050'
base: {a: 30.06992276, e: 0.00859048, i: 1.77004347, L: -55.12002969, lp: 44.96476227, node: 131.78422574}
cy: {a: 0.00026291, e: 0.00005105, i: 0.00035372, L: 218.45945325, lp: -0.32241464, node: -0.00508664}
satellites:
triton:
name: 'Triton'
type: 'moon'
radius: 1352.6
mass: 214.7e20
tilt: 0.010 # http://ssd.jpl.nasa.gov/?sat_elem
elements:
format: 'jpl-satellites-table'
base: {a: 354759, e: 0, i: 156.865, L: 596.007, lp: 243.75, node: 177.608}
day: {M: 61.2572638}
| 116374 | export default
name: '<NAME>'
type: 'planet'
radius: 24624
mass: 102.41e24
tilt: 29.56
elements:
format: 'jpl-1800-2050'
base: {a: 30.06992276, e: 0.00859048, i: 1.77004347, L: -55.12002969, lp: 44.96476227, node: 131.78422574}
cy: {a: 0.00026291, e: 0.00005105, i: 0.00035372, L: 218.45945325, lp: -0.32241464, node: -0.00508664}
satellites:
triton:
name: '<NAME>'
type: 'moon'
radius: 1352.6
mass: 214.7e20
tilt: 0.010 # http://ssd.jpl.nasa.gov/?sat_elem
elements:
format: 'jpl-satellites-table'
base: {a: 354759, e: 0, i: 156.865, L: 596.007, lp: 243.75, node: 177.608}
day: {M: 61.2572638}
| true | export default
name: 'PI:NAME:<NAME>END_PI'
type: 'planet'
radius: 24624
mass: 102.41e24
tilt: 29.56
elements:
format: 'jpl-1800-2050'
base: {a: 30.06992276, e: 0.00859048, i: 1.77004347, L: -55.12002969, lp: 44.96476227, node: 131.78422574}
cy: {a: 0.00026291, e: 0.00005105, i: 0.00035372, L: 218.45945325, lp: -0.32241464, node: -0.00508664}
satellites:
triton:
name: 'PI:NAME:<NAME>END_PI'
type: 'moon'
radius: 1352.6
mass: 214.7e20
tilt: 0.010 # http://ssd.jpl.nasa.gov/?sat_elem
elements:
format: 'jpl-satellites-table'
base: {a: 354759, e: 0, i: 156.865, L: 596.007, lp: 243.75, node: 177.608}
day: {M: 61.2572638}
|
[
{
"context": " + credentials.provider,\n\t\t\t\t\tform: {\n\t\t\t\t\t\ttoken: credentials.refresh_token,\n\t\t\t\t\t\tkey: cache.public_key,\n\t\t\t\t\t\tsecret:",
"end": 497,
"score": 0.719798743724823,
"start": 478,
"tag": "PASSWORD",
"value": "credentials.refresh"
}
] | coffee/lib/authentication.coffee | danhstevens/sdk-node | 0 | request = require 'request'
Q = require 'q'
module.exports = (csrf_generator, cache, requestio) ->
a = {
refresh_tokens: (credentials, session, force) ->
defer = Q.defer()
credentials.refreshed = false
now = new Date()
if credentials.refresh_token and ((credentials.expires and now.getTime() > credentials.expires) or force)
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/refresh_token/' + credentials.provider,
form: {
token: credentials.refresh_token,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
if (e)
defer.reject e
return defer.promise
else
if typeof body is "string"
try
body = JSON.parse body
catch e
defer.reject e
if typeof body == "object" and body.access_token and body.expires_in
credentials.expires = new Date().getTime() + body.expires_in * 1000
for k of body
credentials[k] = body[k]
if (session?)
session.oauth = session.oauth || {}
session.oauth[credentials.provider] = credentials
credentials.refreshed = true
credentials.last_refresh = new Date().getTime()
defer.resolve credentials
else
defer.resolve credentials
else
defer.resolve credentials
return defer.promise
redirect: (provider, urlToRedirect, req, res, next) ->
if cache.logging
cache.log "[oauthio] Redirect to " + cache.oauthd_url + cache.oauthd_base + '/' + provider + " with k=" + cache.public_key + " and redirect_uri=" + urlToRedirect + " from " + (req.get && req.get('Host'))
csrf_token = csrf_generator(req.session)
res.writeHead 302, Location: cache.oauthd_url + cache.oauthd_base + '/' + provider + '?k=' + cache.public_key + '&opts=' + encodeURIComponent(JSON.stringify({state: csrf_token})) + '&redirect_type=server&redirect_uri=' + encodeURIComponent(urlToRedirect)
res.end()
next()
auth: (provider, session, opts) ->
defer = Q.defer()
if typeof session == "function"
return a.redirect provider, session
if opts?.code
return a.authenticate(opts.code, session)
if opts?.credentials
a.refresh_tokens(opts.credentials, session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
return defer.promise
if (not opts?.credentials) and (not opts?.code)
if session.oauth[provider]
a.refresh_tokens(session.oauth[provider], session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
else
defer.reject new Error('Cannot authenticate from session for provider \'' + provider + '\'')
return defer.promise
defer.reject new Error('Could not authenticate, parameters are missing or wrong')
return defer.promise
construct_request_object: (credentials) ->
request_object = {}
for k of credentials
request_object[k] = credentials[k]
request_object.get = (url, options) ->
return requestio.make_request(request_object, 'GET', url, options)
request_object.post = (url, options) ->
return requestio.make_request(request_object, 'POST',url, options)
request_object.patch = (url, options) ->
return requestio.make_request(request_object, 'PATCH', url, options)
request_object.put = (url, options) ->
return requestio.make_request(request_object, 'PUT', url, options)
request_object.del = (url, options) ->
return requestio.make_request(request_object, 'DELETE', url, options)
request_object.me = (options) ->
return requestio.make_me_request(request_object, options)
request_object.getCredentials = () ->
return credentials
request_object.wasRefreshed = () ->
return credentials.refreshed
return request_object
authenticate: (code, session) ->
defer = Q.defer()
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/access_token',
form: {
code: code,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
doNext = ->
if e
defer.reject e
return
try
response = JSON.parse body
catch e
defer.reject new Error 'OAuth.io response could not be parsed'
return
if cache.logging
cache.hideInLog response.access_token if response.access_token
cache.hideInLog response.id_token if response.id_token
cache.hideInLog response.oauth_token if response.oauth_token
cache.hideInLog response.oauth_token_secret if response.oauth_token_secret
cache.hideInLog response.code if response.code
cache.hideInLog response.state if response.state
cache.log "[oauthio] From POST " + cache.oauthd_url + cache.oauthd_base + '/access_token (' + r.statusCode + '): ', body
if (response.status? and response.status == 'error' and response.message?)
defer.reject new Error 'OAuth.io / oauthd responded with : ' + response.message
if (not response.state?)
defer.reject new Error 'State is missing from response'
return
if (not session?.csrf_tokens? or response.state not in session.csrf_tokens)
if cache.logging
cache.log '[oauthio] State is not matching: "' + response.state + '" not in session (' + session?.oauthio_logging + '):', session?.csrf_tokens
defer.reject new Error 'State is not matching'
if response.expires_in
response.expires = new Date().getTime() + response.expires_in * 1000
response = a.construct_request_object response
if (session?)
session.oauth = session.oauth || {}
session.oauth[response.provider] = response
defer.resolve response
return
if (typeof session.reload) == 'function'
session.reload doNext
else
cache.log '[oauthio] [warn] req.session should have a "reload" method'
doNext()
return defer.promise
}
return a
| 133054 | request = require 'request'
Q = require 'q'
module.exports = (csrf_generator, cache, requestio) ->
a = {
refresh_tokens: (credentials, session, force) ->
defer = Q.defer()
credentials.refreshed = false
now = new Date()
if credentials.refresh_token and ((credentials.expires and now.getTime() > credentials.expires) or force)
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/refresh_token/' + credentials.provider,
form: {
token: <PASSWORD>_token,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
if (e)
defer.reject e
return defer.promise
else
if typeof body is "string"
try
body = JSON.parse body
catch e
defer.reject e
if typeof body == "object" and body.access_token and body.expires_in
credentials.expires = new Date().getTime() + body.expires_in * 1000
for k of body
credentials[k] = body[k]
if (session?)
session.oauth = session.oauth || {}
session.oauth[credentials.provider] = credentials
credentials.refreshed = true
credentials.last_refresh = new Date().getTime()
defer.resolve credentials
else
defer.resolve credentials
else
defer.resolve credentials
return defer.promise
redirect: (provider, urlToRedirect, req, res, next) ->
if cache.logging
cache.log "[oauthio] Redirect to " + cache.oauthd_url + cache.oauthd_base + '/' + provider + " with k=" + cache.public_key + " and redirect_uri=" + urlToRedirect + " from " + (req.get && req.get('Host'))
csrf_token = csrf_generator(req.session)
res.writeHead 302, Location: cache.oauthd_url + cache.oauthd_base + '/' + provider + '?k=' + cache.public_key + '&opts=' + encodeURIComponent(JSON.stringify({state: csrf_token})) + '&redirect_type=server&redirect_uri=' + encodeURIComponent(urlToRedirect)
res.end()
next()
auth: (provider, session, opts) ->
defer = Q.defer()
if typeof session == "function"
return a.redirect provider, session
if opts?.code
return a.authenticate(opts.code, session)
if opts?.credentials
a.refresh_tokens(opts.credentials, session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
return defer.promise
if (not opts?.credentials) and (not opts?.code)
if session.oauth[provider]
a.refresh_tokens(session.oauth[provider], session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
else
defer.reject new Error('Cannot authenticate from session for provider \'' + provider + '\'')
return defer.promise
defer.reject new Error('Could not authenticate, parameters are missing or wrong')
return defer.promise
construct_request_object: (credentials) ->
request_object = {}
for k of credentials
request_object[k] = credentials[k]
request_object.get = (url, options) ->
return requestio.make_request(request_object, 'GET', url, options)
request_object.post = (url, options) ->
return requestio.make_request(request_object, 'POST',url, options)
request_object.patch = (url, options) ->
return requestio.make_request(request_object, 'PATCH', url, options)
request_object.put = (url, options) ->
return requestio.make_request(request_object, 'PUT', url, options)
request_object.del = (url, options) ->
return requestio.make_request(request_object, 'DELETE', url, options)
request_object.me = (options) ->
return requestio.make_me_request(request_object, options)
request_object.getCredentials = () ->
return credentials
request_object.wasRefreshed = () ->
return credentials.refreshed
return request_object
authenticate: (code, session) ->
defer = Q.defer()
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/access_token',
form: {
code: code,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
doNext = ->
if e
defer.reject e
return
try
response = JSON.parse body
catch e
defer.reject new Error 'OAuth.io response could not be parsed'
return
if cache.logging
cache.hideInLog response.access_token if response.access_token
cache.hideInLog response.id_token if response.id_token
cache.hideInLog response.oauth_token if response.oauth_token
cache.hideInLog response.oauth_token_secret if response.oauth_token_secret
cache.hideInLog response.code if response.code
cache.hideInLog response.state if response.state
cache.log "[oauthio] From POST " + cache.oauthd_url + cache.oauthd_base + '/access_token (' + r.statusCode + '): ', body
if (response.status? and response.status == 'error' and response.message?)
defer.reject new Error 'OAuth.io / oauthd responded with : ' + response.message
if (not response.state?)
defer.reject new Error 'State is missing from response'
return
if (not session?.csrf_tokens? or response.state not in session.csrf_tokens)
if cache.logging
cache.log '[oauthio] State is not matching: "' + response.state + '" not in session (' + session?.oauthio_logging + '):', session?.csrf_tokens
defer.reject new Error 'State is not matching'
if response.expires_in
response.expires = new Date().getTime() + response.expires_in * 1000
response = a.construct_request_object response
if (session?)
session.oauth = session.oauth || {}
session.oauth[response.provider] = response
defer.resolve response
return
if (typeof session.reload) == 'function'
session.reload doNext
else
cache.log '[oauthio] [warn] req.session should have a "reload" method'
doNext()
return defer.promise
}
return a
| true | request = require 'request'
Q = require 'q'
module.exports = (csrf_generator, cache, requestio) ->
a = {
refresh_tokens: (credentials, session, force) ->
defer = Q.defer()
credentials.refreshed = false
now = new Date()
if credentials.refresh_token and ((credentials.expires and now.getTime() > credentials.expires) or force)
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/refresh_token/' + credentials.provider,
form: {
token: PI:PASSWORD:<PASSWORD>END_PI_token,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
if (e)
defer.reject e
return defer.promise
else
if typeof body is "string"
try
body = JSON.parse body
catch e
defer.reject e
if typeof body == "object" and body.access_token and body.expires_in
credentials.expires = new Date().getTime() + body.expires_in * 1000
for k of body
credentials[k] = body[k]
if (session?)
session.oauth = session.oauth || {}
session.oauth[credentials.provider] = credentials
credentials.refreshed = true
credentials.last_refresh = new Date().getTime()
defer.resolve credentials
else
defer.resolve credentials
else
defer.resolve credentials
return defer.promise
redirect: (provider, urlToRedirect, req, res, next) ->
if cache.logging
cache.log "[oauthio] Redirect to " + cache.oauthd_url + cache.oauthd_base + '/' + provider + " with k=" + cache.public_key + " and redirect_uri=" + urlToRedirect + " from " + (req.get && req.get('Host'))
csrf_token = csrf_generator(req.session)
res.writeHead 302, Location: cache.oauthd_url + cache.oauthd_base + '/' + provider + '?k=' + cache.public_key + '&opts=' + encodeURIComponent(JSON.stringify({state: csrf_token})) + '&redirect_type=server&redirect_uri=' + encodeURIComponent(urlToRedirect)
res.end()
next()
auth: (provider, session, opts) ->
defer = Q.defer()
if typeof session == "function"
return a.redirect provider, session
if opts?.code
return a.authenticate(opts.code, session)
if opts?.credentials
a.refresh_tokens(opts.credentials, session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
return defer.promise
if (not opts?.credentials) and (not opts?.code)
if session.oauth[provider]
a.refresh_tokens(session.oauth[provider], session, opts?.force_refresh)
.then (credentials) ->
defer.resolve(a.construct_request_object(credentials))
else
defer.reject new Error('Cannot authenticate from session for provider \'' + provider + '\'')
return defer.promise
defer.reject new Error('Could not authenticate, parameters are missing or wrong')
return defer.promise
construct_request_object: (credentials) ->
request_object = {}
for k of credentials
request_object[k] = credentials[k]
request_object.get = (url, options) ->
return requestio.make_request(request_object, 'GET', url, options)
request_object.post = (url, options) ->
return requestio.make_request(request_object, 'POST',url, options)
request_object.patch = (url, options) ->
return requestio.make_request(request_object, 'PATCH', url, options)
request_object.put = (url, options) ->
return requestio.make_request(request_object, 'PUT', url, options)
request_object.del = (url, options) ->
return requestio.make_request(request_object, 'DELETE', url, options)
request_object.me = (options) ->
return requestio.make_me_request(request_object, options)
request_object.getCredentials = () ->
return credentials
request_object.wasRefreshed = () ->
return credentials.refreshed
return request_object
authenticate: (code, session) ->
defer = Q.defer()
request.post {
url: cache.oauthd_url + cache.oauthd_base + '/access_token',
form: {
code: code,
key: cache.public_key,
secret: cache.secret_key
}
}, (e, r, body) ->
doNext = ->
if e
defer.reject e
return
try
response = JSON.parse body
catch e
defer.reject new Error 'OAuth.io response could not be parsed'
return
if cache.logging
cache.hideInLog response.access_token if response.access_token
cache.hideInLog response.id_token if response.id_token
cache.hideInLog response.oauth_token if response.oauth_token
cache.hideInLog response.oauth_token_secret if response.oauth_token_secret
cache.hideInLog response.code if response.code
cache.hideInLog response.state if response.state
cache.log "[oauthio] From POST " + cache.oauthd_url + cache.oauthd_base + '/access_token (' + r.statusCode + '): ', body
if (response.status? and response.status == 'error' and response.message?)
defer.reject new Error 'OAuth.io / oauthd responded with : ' + response.message
if (not response.state?)
defer.reject new Error 'State is missing from response'
return
if (not session?.csrf_tokens? or response.state not in session.csrf_tokens)
if cache.logging
cache.log '[oauthio] State is not matching: "' + response.state + '" not in session (' + session?.oauthio_logging + '):', session?.csrf_tokens
defer.reject new Error 'State is not matching'
if response.expires_in
response.expires = new Date().getTime() + response.expires_in * 1000
response = a.construct_request_object response
if (session?)
session.oauth = session.oauth || {}
session.oauth[response.provider] = response
defer.resolve response
return
if (typeof session.reload) == 'function'
session.reload doNext
else
cache.log '[oauthio] [warn] req.session should have a "reload" method'
doNext()
return defer.promise
}
return a
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9947676062583923,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "00000000000000000000\", \"hex\")\n key = new Buffer(\"0123456789abcdef0123456789abcdef\" + \"... | test/simple/test-crypto-padding-aes256.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
aes256 = (decipherFinal) ->
encrypt = (val, pad) ->
c = crypto.createCipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "utf8", "binary") + c.final("binary")
decrypt = (val, pad) ->
c = crypto.createDecipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "binary", "utf8") + c[decipherFinal]("utf8")
iv = new Buffer("00000000000000000000000000000000", "hex")
key = new Buffer("0123456789abcdef0123456789abcdef" + "0123456789abcdef0123456789abcdef", "hex")
# echo 0123456789abcdef0123456789abcdef \
# | openssl enc -e -aes256 -nopad -K <key> -iv <iv> \
# | openssl enc -d -aes256 -nopad -K <key> -iv <iv>
plaintext = "0123456789abcdef0123456789abcdef" # multiple of block size
encrypted = encrypt(plaintext, false)
decrypted = decrypt(encrypted, false)
assert.equal decrypted, plaintext
# echo 0123456789abcdef0123456789abcde \
# | openssl enc -e -aes256 -K <key> -iv <iv> \
# | openssl enc -d -aes256 -K <key> -iv <iv>
plaintext = "0123456789abcdef0123456789abcde" # not a multiple
encrypted = encrypt(plaintext, true)
decrypted = decrypt(encrypted, true)
assert.equal decrypted, plaintext
return
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OpenSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
aes256 "final"
aes256 "finaltol"
| 29901 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
aes256 = (decipherFinal) ->
encrypt = (val, pad) ->
c = crypto.createCipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "utf8", "binary") + c.final("binary")
decrypt = (val, pad) ->
c = crypto.createDecipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "binary", "utf8") + c[decipherFinal]("utf8")
iv = new Buffer("00000000000000000000000000000000", "hex")
key = new Buffer("<KEY>" + "<KEY>", "hex")
# echo <KEY> \
# | openssl enc -e -aes256 -nopad -K <key> -iv <iv> \
# | openssl enc -d -aes256 -nopad -K <key> -iv <iv>
plaintext = "<KEY>" # multiple of block size
encrypted = encrypt(plaintext, false)
decrypted = decrypt(encrypted, false)
assert.equal decrypted, plaintext
# echo <KEY> \
# | openssl enc -e -aes256 -K <key> -iv <iv> \
# | openssl enc -d -aes256 -K <key> -iv <iv>
plaintext = "<KEY>" # not a multiple
encrypted = encrypt(plaintext, true)
decrypted = decrypt(encrypted, true)
assert.equal decrypted, plaintext
return
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OpenSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
aes256 "final"
aes256 "finaltol"
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
aes256 = (decipherFinal) ->
encrypt = (val, pad) ->
c = crypto.createCipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "utf8", "binary") + c.final("binary")
decrypt = (val, pad) ->
c = crypto.createDecipheriv("aes256", key, iv)
c.setAutoPadding pad
c.update(val, "binary", "utf8") + c[decipherFinal]("utf8")
iv = new Buffer("00000000000000000000000000000000", "hex")
key = new Buffer("PI:KEY:<KEY>END_PI" + "PI:KEY:<KEY>END_PI", "hex")
# echo PI:KEY:<KEY>END_PI \
# | openssl enc -e -aes256 -nopad -K <key> -iv <iv> \
# | openssl enc -d -aes256 -nopad -K <key> -iv <iv>
plaintext = "PI:KEY:<KEY>END_PI" # multiple of block size
encrypted = encrypt(plaintext, false)
decrypted = decrypt(encrypted, false)
assert.equal decrypted, plaintext
# echo PI:KEY:<KEY>END_PI \
# | openssl enc -e -aes256 -K <key> -iv <iv> \
# | openssl enc -d -aes256 -K <key> -iv <iv>
plaintext = "PI:KEY:<KEY>END_PI" # not a multiple
encrypted = encrypt(plaintext, true)
decrypted = decrypt(encrypted, true)
assert.equal decrypted, plaintext
return
common = require("../common")
assert = require("assert")
try
crypto = require("crypto")
catch e
console.log "Not compiled with OpenSSL support."
process.exit()
crypto.DEFAULT_ENCODING = "buffer"
aes256 "final"
aes256 "finaltol"
|
[
{
"context": " created on 25/12/2016 All rights reserved by @NeZha\n# Today is chrismas :) but Dan Shen Gou st",
"end": 439,
"score": 0.9996554255485535,
"start": 433,
"tag": "USERNAME",
"value": "@NeZha"
},
{
"context": "served by @NeZha\n# Today is chrismas :) bu... | src/examples/kinect-one-texture.coffee | CallmeNezha/Crystal | 0 | # ______ _____ _________ _____ _____
# / /_ / / \___ / / /__/ /
# / \/ / ___ / / / / ___
# / / \ / /\__\ / /___ / ___ / / \
# _/____ / \___ / _\___ _/_______ / _/___ / _/___ / _\___/\_
# created on 25/12/2016 All rights reserved by @NeZha
# Today is chrismas :) but Dan Shen Gou still keep finding source code of the world instead of meeting
# a girl to make little human(in biology aspect of source code), so the saying: Ge Si Qi Zhi
fs = require 'fs'
env = require '../../env'
THREE = require "#{env.PATH.THREE}build/three"
Kinect = require "./Crystal_Geo.node"
console.log(Kinect)
do ->
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/STLLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/3MFLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/OBJLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/controls/OrbitControls.js", 'utf-8'); eval file
vertexShader =
"""
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = 3.0;
gl_Position = projectionMatrix * mvPosition;
}
"""
fragmentShader =
"""
uniform sampler2D map;
varying vec2 vUv;
void main() {
vec4 color = texture2D(map, vUv);
gl_FragColor = vec4(color.x , color.y, color.z, 1.0);
}
"""
class View
constructor: ->
@scene = new THREE.Scene()
@camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.01, 10000)
@clock = new THREE.Clock()
@objects = {}
@_control = null
init: (renderer) ->
renderer.setClearColor(0xDBDBDB)
renderer.gammaInput = on
renderer.gammaOutput = on
# renderer.shadowMap.enabled = true
@_control = new THREE.OrbitControls(@camera, renderer.domElement)
@_control.enableZoom = yes
@camera.position.set(20, 20, 0)
@camera.lookAt(new THREE.Vector3())
# Lights
@scene.add(new THREE.HemisphereLight(0xffffbb, 0x080820, 1))
@_addShadowedLight(1, 1, 1)
Kinect.CreateKinect()
width = 512
height = 424
@_depthframe = new Float32Array(width * height)
# geometry = new THREE.PlaneGeometry(1, 1)
geometry = new THREE.BufferGeometry()
vertices = new Float32Array( width * height * 3)
```
for ( var i = 0, j = 0, l = vertices.length; i < l; i += 3, j ++ ) {
vertices[ i ] = j % width;
vertices[ i + 1 ] = Math.floor( j / width );
}
```
geometry.addAttribute( 'position', new THREE.BufferAttribute( vertices, 3 ) )
@_texture = new THREE.Texture(@_generateTexture())
@_texture.needsUpdate = true
uniforms =
"map": { value: @_texture }
"width": {value: width}
"height": {value: height}
material = new THREE.ShaderMaterial(uniforms: uniforms
, vertexShader: vertexShader
, fragmentShader: fragmentShader)
#material = new THREE.MeshBasicMaterial( {color: 0x102000, side: THREE.DoubleSide} )
mesh = new THREE.Points(geometry, material)
mesh.scale.set(0.01, 0.01, 0.01)
@scene.add(mesh)
return
onRender: ->
dt = this.clock.getDelta()
@_control.update()
success = Kinect.GetDepthBuffer(@_depthframe)
console.log("#{success} GetDepthBuffer returns")
@_putImageData()
@_texture.needsUpdate = true
return
onExit: ->
Kinect.DestroyKinect()
return
##
# Add some direct light from sky
#
_addShadowedLight: (x, y, z, d = 10000, color = 0xffffbb, intensity = 0.2) ->
directLight = new THREE.DirectionalLight(color, intensity)
directLight.position.set(x, y, z)
@scene.add(directLight)
return
_generateTexture: ->
canvas = document.createElement('canvas')
canvas.width = 512
canvas.height = 424
@_context = canvas.getContext('2d')
image = @_context.getImageData(0, 0, 512, 424)
for i in [0...512 * 424]
image.data[i] = Math.random() * 255
@_context.putImageData(image, 0, 0)
return canvas
_putImageData: ->
image = @_context.getImageData(0, 0, 512, 424)
for ipxl in [0...512 * 424 * 4] by 4
intensity = @_depthframe[ipxl / 4] % 255
image.data[ipxl ] = intensity
image.data[ipxl + 1] = intensity
image.data[ipxl + 2] = intensity
image.data[ipxl + 3] = 255
@_context.putImageData(image, 0, 0)
return
module.exports = View if module?
| 83932 | # ______ _____ _________ _____ _____
# / /_ / / \___ / / /__/ /
# / \/ / ___ / / / / ___
# / / \ / /\__\ / /___ / ___ / / \
# _/____ / \___ / _\___ _/_______ / _/___ / _/___ / _\___/\_
# created on 25/12/2016 All rights reserved by @NeZha
# Today is chrismas :) but <NAME> still keep finding source code of the world instead of meeting
# a girl to make little human(in biology aspect of source code), so the saying: Ge Si <NAME>hi
fs = require 'fs'
env = require '../../env'
THREE = require "#{env.PATH.THREE}build/three"
Kinect = require "./Crystal_Geo.node"
console.log(Kinect)
do ->
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/STLLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/3MFLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/OBJLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/controls/OrbitControls.js", 'utf-8'); eval file
vertexShader =
"""
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = 3.0;
gl_Position = projectionMatrix * mvPosition;
}
"""
fragmentShader =
"""
uniform sampler2D map;
varying vec2 vUv;
void main() {
vec4 color = texture2D(map, vUv);
gl_FragColor = vec4(color.x , color.y, color.z, 1.0);
}
"""
class View
constructor: ->
@scene = new THREE.Scene()
@camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.01, 10000)
@clock = new THREE.Clock()
@objects = {}
@_control = null
init: (renderer) ->
renderer.setClearColor(0xDBDBDB)
renderer.gammaInput = on
renderer.gammaOutput = on
# renderer.shadowMap.enabled = true
@_control = new THREE.OrbitControls(@camera, renderer.domElement)
@_control.enableZoom = yes
@camera.position.set(20, 20, 0)
@camera.lookAt(new THREE.Vector3())
# Lights
@scene.add(new THREE.HemisphereLight(0xffffbb, 0x080820, 1))
@_addShadowedLight(1, 1, 1)
Kinect.CreateKinect()
width = 512
height = 424
@_depthframe = new Float32Array(width * height)
# geometry = new THREE.PlaneGeometry(1, 1)
geometry = new THREE.BufferGeometry()
vertices = new Float32Array( width * height * 3)
```
for ( var i = 0, j = 0, l = vertices.length; i < l; i += 3, j ++ ) {
vertices[ i ] = j % width;
vertices[ i + 1 ] = Math.floor( j / width );
}
```
geometry.addAttribute( 'position', new THREE.BufferAttribute( vertices, 3 ) )
@_texture = new THREE.Texture(@_generateTexture())
@_texture.needsUpdate = true
uniforms =
"map": { value: @_texture }
"width": {value: width}
"height": {value: height}
material = new THREE.ShaderMaterial(uniforms: uniforms
, vertexShader: vertexShader
, fragmentShader: fragmentShader)
#material = new THREE.MeshBasicMaterial( {color: 0x102000, side: THREE.DoubleSide} )
mesh = new THREE.Points(geometry, material)
mesh.scale.set(0.01, 0.01, 0.01)
@scene.add(mesh)
return
onRender: ->
dt = this.clock.getDelta()
@_control.update()
success = Kinect.GetDepthBuffer(@_depthframe)
console.log("#{success} GetDepthBuffer returns")
@_putImageData()
@_texture.needsUpdate = true
return
onExit: ->
Kinect.DestroyKinect()
return
##
# Add some direct light from sky
#
_addShadowedLight: (x, y, z, d = 10000, color = 0xffffbb, intensity = 0.2) ->
directLight = new THREE.DirectionalLight(color, intensity)
directLight.position.set(x, y, z)
@scene.add(directLight)
return
_generateTexture: ->
canvas = document.createElement('canvas')
canvas.width = 512
canvas.height = 424
@_context = canvas.getContext('2d')
image = @_context.getImageData(0, 0, 512, 424)
for i in [0...512 * 424]
image.data[i] = Math.random() * 255
@_context.putImageData(image, 0, 0)
return canvas
_putImageData: ->
image = @_context.getImageData(0, 0, 512, 424)
for ipxl in [0...512 * 424 * 4] by 4
intensity = @_depthframe[ipxl / 4] % 255
image.data[ipxl ] = intensity
image.data[ipxl + 1] = intensity
image.data[ipxl + 2] = intensity
image.data[ipxl + 3] = 255
@_context.putImageData(image, 0, 0)
return
module.exports = View if module?
| true | # ______ _____ _________ _____ _____
# / /_ / / \___ / / /__/ /
# / \/ / ___ / / / / ___
# / / \ / /\__\ / /___ / ___ / / \
# _/____ / \___ / _\___ _/_______ / _/___ / _/___ / _\___/\_
# created on 25/12/2016 All rights reserved by @NeZha
# Today is chrismas :) but PI:NAME:<NAME>END_PI still keep finding source code of the world instead of meeting
# a girl to make little human(in biology aspect of source code), so the saying: Ge Si PI:NAME:<NAME>END_PIhi
fs = require 'fs'
env = require '../../env'
THREE = require "#{env.PATH.THREE}build/three"
Kinect = require "./Crystal_Geo.node"
console.log(Kinect)
do ->
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/STLLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/3MFLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/loaders/OBJLoader.js", 'utf-8'); eval file
file = fs.readFileSync("#{env.PATH.THREE}examples/js/controls/OrbitControls.js", 'utf-8'); eval file
vertexShader =
"""
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_PointSize = 3.0;
gl_Position = projectionMatrix * mvPosition;
}
"""
fragmentShader =
"""
uniform sampler2D map;
varying vec2 vUv;
void main() {
vec4 color = texture2D(map, vUv);
gl_FragColor = vec4(color.x , color.y, color.z, 1.0);
}
"""
class View
constructor: ->
@scene = new THREE.Scene()
@camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight, 0.01, 10000)
@clock = new THREE.Clock()
@objects = {}
@_control = null
init: (renderer) ->
renderer.setClearColor(0xDBDBDB)
renderer.gammaInput = on
renderer.gammaOutput = on
# renderer.shadowMap.enabled = true
@_control = new THREE.OrbitControls(@camera, renderer.domElement)
@_control.enableZoom = yes
@camera.position.set(20, 20, 0)
@camera.lookAt(new THREE.Vector3())
# Lights
@scene.add(new THREE.HemisphereLight(0xffffbb, 0x080820, 1))
@_addShadowedLight(1, 1, 1)
Kinect.CreateKinect()
width = 512
height = 424
@_depthframe = new Float32Array(width * height)
# geometry = new THREE.PlaneGeometry(1, 1)
geometry = new THREE.BufferGeometry()
vertices = new Float32Array( width * height * 3)
```
for ( var i = 0, j = 0, l = vertices.length; i < l; i += 3, j ++ ) {
vertices[ i ] = j % width;
vertices[ i + 1 ] = Math.floor( j / width );
}
```
geometry.addAttribute( 'position', new THREE.BufferAttribute( vertices, 3 ) )
@_texture = new THREE.Texture(@_generateTexture())
@_texture.needsUpdate = true
uniforms =
"map": { value: @_texture }
"width": {value: width}
"height": {value: height}
material = new THREE.ShaderMaterial(uniforms: uniforms
, vertexShader: vertexShader
, fragmentShader: fragmentShader)
#material = new THREE.MeshBasicMaterial( {color: 0x102000, side: THREE.DoubleSide} )
mesh = new THREE.Points(geometry, material)
mesh.scale.set(0.01, 0.01, 0.01)
@scene.add(mesh)
return
onRender: ->
dt = this.clock.getDelta()
@_control.update()
success = Kinect.GetDepthBuffer(@_depthframe)
console.log("#{success} GetDepthBuffer returns")
@_putImageData()
@_texture.needsUpdate = true
return
onExit: ->
Kinect.DestroyKinect()
return
##
# Add some direct light from sky
#
_addShadowedLight: (x, y, z, d = 10000, color = 0xffffbb, intensity = 0.2) ->
directLight = new THREE.DirectionalLight(color, intensity)
directLight.position.set(x, y, z)
@scene.add(directLight)
return
_generateTexture: ->
canvas = document.createElement('canvas')
canvas.width = 512
canvas.height = 424
@_context = canvas.getContext('2d')
image = @_context.getImageData(0, 0, 512, 424)
for i in [0...512 * 424]
image.data[i] = Math.random() * 255
@_context.putImageData(image, 0, 0)
return canvas
_putImageData: ->
image = @_context.getImageData(0, 0, 512, 424)
for ipxl in [0...512 * 424 * 4] by 4
intensity = @_depthframe[ipxl / 4] % 255
image.data[ipxl ] = intensity
image.data[ipxl + 1] = intensity
image.data[ipxl + 2] = intensity
image.data[ipxl + 3] = 255
@_context.putImageData(image, 0, 0)
return
module.exports = View if module?
|
[
{
"context": "b.com/tween.js/examples/05_spline.html\n#\n# @author mrdoob / http://mrdoob.com/\n# @author alteredq / http://",
"end": 133,
"score": 0.9991781115531921,
"start": 127,
"tag": "USERNAME",
"value": "mrdoob"
},
{
"context": "\n#\n# @author mrdoob / http://mrdoob.com/\n# @a... | source/javascripts/new_src/core/spline.coffee | andrew-aladev/three.js | 0 | # Spline from Tween.js, slightly optimized (and trashed)
# http://sole.github.com/tween.js/examples/05_spline.html
#
# @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author aladjev.andrew@gmail.com
#= require new_src/core/vector_3
class Spline
contructor: (points) ->
@points = points
@c = []
@v3 =
x: 0
y: 0
z: 0
# Catmull-Rom
interpolate = (p0, p1, p2, p3, t, t2, t3) ->
v0 = (p2 - p0) * 0.5
v1 = (p3 - p1) * 0.5
(2 * (p1 - p2) + v0 + v1) * t3 + (-3 * (p1 - p2) - 2 * v0 - v1) * t2 + v0 * t + p1
initFromArray: (a) ->
@points = []
i = 0
while i < a.length
@points[i] =
x: a[i][0]
y: a[i][1]
z: a[i][2]
i++
getPoint: (k) ->
point = (@points.length - 1) * k
intPoint = Math.floor(point)
weight = point - intPoint
@c[0] = (if intPoint is 0 then intPoint else intPoint - 1)
@c[1] = intPoint
@c[2] = (if intPoint > @points.length - 2 then @points.length - 1 else intPoint + 1)
@c[3] = (if intPoint > @points.length - 3 then @points.length - 1 else intPoint + 2)
@pa = @points[@c[0]]
@pb = @points[@c[1]]
@pc = @points[@c[2]]
@pd = @points[@c[3]]
@w2 = weight * weight
@w3 = weight * @w2
@v3.x = interpolate(@pa.x, @pb.x, @pc.x, @pd.x, weight, @w2, @w3)
@v3.y = interpolate(@pa.y, @pb.y, @pc.y, @pd.y, weight, @w2, @w3)
@v3.z = interpolate(@pa.z, @pb.z, @pc.z, @pd.z, weight, @w2, @w3)
@v3
getControlPointsArray: ->
i = undefined
p = undefined
l = @points.length
coords = []
i = 0
while i < l
p = @points[i]
coords[i] = [ p.x, p.y, p.z ]
i++
coords
# approximate length by summing linear segments
getLength: (nSubDivisions) ->
i = undefined
index = undefined
nSamples = undefined
position = undefined
point = 0
intPoint = 0
oldIntPoint = 0
oldPosition = new THREE.Vector3()
tmpVec = new THREE.Vector3()
chunkLengths = []
totalLength = 0
# first point has 0 length
chunkLengths[0] = 0
nSubDivisions = 100 unless nSubDivisions
nSamples = @points.length * nSubDivisions
oldPosition.copy @points[0]
i = 1
while i < nSamples
index = i / nSamples
position = @getPoint(index)
tmpVec.copy position
totalLength += tmpVec.distanceTo(oldPosition)
oldPosition.copy position
point = (@points.length - 1) * index
intPoint = Math.floor(point)
unless intPoint is oldIntPoint
chunkLengths[intPoint] = totalLength
oldIntPoint = intPoint
i++
# last point ends with total length
chunkLengths[chunkLengths.length] = totalLength
chunks: chunkLengths
total: totalLength
reparametrizeByArcLength: (samplingCoef) ->
i = undefined
j = undefined
index = undefined
indexCurrent = undefined
indexNext = undefined
linearDistance = undefined
realDistance = undefined
sampling = undefined
position = undefined
newpoints = []
tmpVec = new THREE.Vector3()
sl = @getLength()
newpoints.push tmpVec.copy(@points[0]).clone()
i = 1
while i < @points.length
# tmpVec.copy( this.points[ i - 1 ] );
# linearDistance = tmpVec.distanceTo( this.points[ i ] );
realDistance = sl.chunks[i] - sl.chunks[i - 1]
sampling = Math.ceil(samplingCoef * realDistance / sl.total)
indexCurrent = (i - 1) / (@points.length - 1)
indexNext = i / (@points.length - 1)
j = 1
while j < sampling - 1
index = indexCurrent + j * (1 / sampling) * (indexNext - indexCurrent)
position = @getPoint(index)
newpoints.push tmpVec.copy(position).clone()
j++
newpoints.push tmpVec.copy(@points[i]).clone()
i++
@points = newpoints
namespace "THREE", (exports) ->
exports.Spline = Spline | 192157 | # Spline from Tween.js, slightly optimized (and trashed)
# http://sole.github.com/tween.js/examples/05_spline.html
#
# @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author <EMAIL>
#= require new_src/core/vector_3
class Spline
contructor: (points) ->
@points = points
@c = []
@v3 =
x: 0
y: 0
z: 0
# Catmull-Rom
interpolate = (p0, p1, p2, p3, t, t2, t3) ->
v0 = (p2 - p0) * 0.5
v1 = (p3 - p1) * 0.5
(2 * (p1 - p2) + v0 + v1) * t3 + (-3 * (p1 - p2) - 2 * v0 - v1) * t2 + v0 * t + p1
initFromArray: (a) ->
@points = []
i = 0
while i < a.length
@points[i] =
x: a[i][0]
y: a[i][1]
z: a[i][2]
i++
getPoint: (k) ->
point = (@points.length - 1) * k
intPoint = Math.floor(point)
weight = point - intPoint
@c[0] = (if intPoint is 0 then intPoint else intPoint - 1)
@c[1] = intPoint
@c[2] = (if intPoint > @points.length - 2 then @points.length - 1 else intPoint + 1)
@c[3] = (if intPoint > @points.length - 3 then @points.length - 1 else intPoint + 2)
@pa = @points[@c[0]]
@pb = @points[@c[1]]
@pc = @points[@c[2]]
@pd = @points[@c[3]]
@w2 = weight * weight
@w3 = weight * @w2
@v3.x = interpolate(@pa.x, @pb.x, @pc.x, @pd.x, weight, @w2, @w3)
@v3.y = interpolate(@pa.y, @pb.y, @pc.y, @pd.y, weight, @w2, @w3)
@v3.z = interpolate(@pa.z, @pb.z, @pc.z, @pd.z, weight, @w2, @w3)
@v3
getControlPointsArray: ->
i = undefined
p = undefined
l = @points.length
coords = []
i = 0
while i < l
p = @points[i]
coords[i] = [ p.x, p.y, p.z ]
i++
coords
# approximate length by summing linear segments
getLength: (nSubDivisions) ->
i = undefined
index = undefined
nSamples = undefined
position = undefined
point = 0
intPoint = 0
oldIntPoint = 0
oldPosition = new THREE.Vector3()
tmpVec = new THREE.Vector3()
chunkLengths = []
totalLength = 0
# first point has 0 length
chunkLengths[0] = 0
nSubDivisions = 100 unless nSubDivisions
nSamples = @points.length * nSubDivisions
oldPosition.copy @points[0]
i = 1
while i < nSamples
index = i / nSamples
position = @getPoint(index)
tmpVec.copy position
totalLength += tmpVec.distanceTo(oldPosition)
oldPosition.copy position
point = (@points.length - 1) * index
intPoint = Math.floor(point)
unless intPoint is oldIntPoint
chunkLengths[intPoint] = totalLength
oldIntPoint = intPoint
i++
# last point ends with total length
chunkLengths[chunkLengths.length] = totalLength
chunks: chunkLengths
total: totalLength
reparametrizeByArcLength: (samplingCoef) ->
i = undefined
j = undefined
index = undefined
indexCurrent = undefined
indexNext = undefined
linearDistance = undefined
realDistance = undefined
sampling = undefined
position = undefined
newpoints = []
tmpVec = new THREE.Vector3()
sl = @getLength()
newpoints.push tmpVec.copy(@points[0]).clone()
i = 1
while i < @points.length
# tmpVec.copy( this.points[ i - 1 ] );
# linearDistance = tmpVec.distanceTo( this.points[ i ] );
realDistance = sl.chunks[i] - sl.chunks[i - 1]
sampling = Math.ceil(samplingCoef * realDistance / sl.total)
indexCurrent = (i - 1) / (@points.length - 1)
indexNext = i / (@points.length - 1)
j = 1
while j < sampling - 1
index = indexCurrent + j * (1 / sampling) * (indexNext - indexCurrent)
position = @getPoint(index)
newpoints.push tmpVec.copy(position).clone()
j++
newpoints.push tmpVec.copy(@points[i]).clone()
i++
@points = newpoints
namespace "THREE", (exports) ->
exports.Spline = Spline | true | # Spline from Tween.js, slightly optimized (and trashed)
# http://sole.github.com/tween.js/examples/05_spline.html
#
# @author mrdoob / http://mrdoob.com/
# @author alteredq / http://alteredqualia.com/
# @author PI:EMAIL:<EMAIL>END_PI
#= require new_src/core/vector_3
class Spline
contructor: (points) ->
@points = points
@c = []
@v3 =
x: 0
y: 0
z: 0
# Catmull-Rom
interpolate = (p0, p1, p2, p3, t, t2, t3) ->
v0 = (p2 - p0) * 0.5
v1 = (p3 - p1) * 0.5
(2 * (p1 - p2) + v0 + v1) * t3 + (-3 * (p1 - p2) - 2 * v0 - v1) * t2 + v0 * t + p1
initFromArray: (a) ->
@points = []
i = 0
while i < a.length
@points[i] =
x: a[i][0]
y: a[i][1]
z: a[i][2]
i++
getPoint: (k) ->
point = (@points.length - 1) * k
intPoint = Math.floor(point)
weight = point - intPoint
@c[0] = (if intPoint is 0 then intPoint else intPoint - 1)
@c[1] = intPoint
@c[2] = (if intPoint > @points.length - 2 then @points.length - 1 else intPoint + 1)
@c[3] = (if intPoint > @points.length - 3 then @points.length - 1 else intPoint + 2)
@pa = @points[@c[0]]
@pb = @points[@c[1]]
@pc = @points[@c[2]]
@pd = @points[@c[3]]
@w2 = weight * weight
@w3 = weight * @w2
@v3.x = interpolate(@pa.x, @pb.x, @pc.x, @pd.x, weight, @w2, @w3)
@v3.y = interpolate(@pa.y, @pb.y, @pc.y, @pd.y, weight, @w2, @w3)
@v3.z = interpolate(@pa.z, @pb.z, @pc.z, @pd.z, weight, @w2, @w3)
@v3
getControlPointsArray: ->
i = undefined
p = undefined
l = @points.length
coords = []
i = 0
while i < l
p = @points[i]
coords[i] = [ p.x, p.y, p.z ]
i++
coords
# approximate length by summing linear segments
getLength: (nSubDivisions) ->
i = undefined
index = undefined
nSamples = undefined
position = undefined
point = 0
intPoint = 0
oldIntPoint = 0
oldPosition = new THREE.Vector3()
tmpVec = new THREE.Vector3()
chunkLengths = []
totalLength = 0
# first point has 0 length
chunkLengths[0] = 0
nSubDivisions = 100 unless nSubDivisions
nSamples = @points.length * nSubDivisions
oldPosition.copy @points[0]
i = 1
while i < nSamples
index = i / nSamples
position = @getPoint(index)
tmpVec.copy position
totalLength += tmpVec.distanceTo(oldPosition)
oldPosition.copy position
point = (@points.length - 1) * index
intPoint = Math.floor(point)
unless intPoint is oldIntPoint
chunkLengths[intPoint] = totalLength
oldIntPoint = intPoint
i++
# last point ends with total length
chunkLengths[chunkLengths.length] = totalLength
chunks: chunkLengths
total: totalLength
reparametrizeByArcLength: (samplingCoef) ->
i = undefined
j = undefined
index = undefined
indexCurrent = undefined
indexNext = undefined
linearDistance = undefined
realDistance = undefined
sampling = undefined
position = undefined
newpoints = []
tmpVec = new THREE.Vector3()
sl = @getLength()
newpoints.push tmpVec.copy(@points[0]).clone()
i = 1
while i < @points.length
# tmpVec.copy( this.points[ i - 1 ] );
# linearDistance = tmpVec.distanceTo( this.points[ i ] );
realDistance = sl.chunks[i] - sl.chunks[i - 1]
sampling = Math.ceil(samplingCoef * realDistance / sl.total)
indexCurrent = (i - 1) / (@points.length - 1)
indexNext = i / (@points.length - 1)
j = 1
while j < sampling - 1
index = indexCurrent + j * (1 / sampling) * (indexNext - indexCurrent)
position = @getPoint(index)
newpoints.push tmpVec.copy(position).clone()
j++
newpoints.push tmpVec.copy(@points[i]).clone()
i++
@points = newpoints
namespace "THREE", (exports) ->
exports.Spline = Spline |
[
{
"context": "opulation + @sw.population\n\n# ---\n#\n# **(c) 2012 [Reg Braithwaite](http://braythwayt.com)** ([@raganwald](http://tw",
"end": 3942,
"score": 0.9998764991760254,
"start": 3927,
"tag": "NAME",
"value": "Reg Braithwaite"
},
{
"context": "2012 [Reg Braithwaite](http://br... | lib/api.coffee | raganwald/recursiveuniverse | 4 | # This module is part of [recursiveuniver.se](http://recursiveuniver.se).
#
# ## API Module
#
# The API Module provides convenience methods for interacting with squares from the outside
# ### Baseline Setup
_ = require('underscore')
YouAreDaChef = require('YouAreDaChef').YouAreDaChef
exports ?= window or this
exports.mixInto = ({Square, Cell}) ->
# ### Mix functionality in. `to_json` and `toString` are simple methods for cells, but
# memoized for squares.
_.extend Cell.prototype,
to_json: ->
[@value]
toString: ->
'' + @value
YouAreDaChef(Square)
.after 'initialize', ->
@to_json = _.memoize( ->
a =
nw: @nw.to_json()
ne: @ne.to_json()
se: @se.to_json()
sw: @sw.to_json()
b =
top: _.map( _.zip(a.nw, a.ne), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
bottom: _.map( _.zip(a.sw, a.se), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
b.top.concat(b.bottom)
)
@toString = _.memoize( ->
(_.map @to_json(), (row) ->
([' ', '*'][c] for c in row).join('')
).join('\n')
)
_.extend Square,
from_string: (str) ->
strs = str.split('\n')
json = _.map strs, (ln) ->
{'.': 0, ' ': 0, 'O': 1, '+': 1, '*': 1}[c] for c in ln
@from_json(json)
from_json: (json) ->
dims = [json.length].concat json.map( (row) -> row.length )
sz = Math.pow(2, Math.ceil(Math.log(Math.max(dims...)) / Math.log(2)))
_.each [0..json.length - 1], (i) ->
if json[i].length < sz
json[i] = json[i].concat _.map( [1..(sz - json[i].length)], -> 0 )
if json.length < sz
json = json.concat _.map( [1..(sz - json.length)], ->
_.map [1..sz], -> 0
)
if json.length is 1
if json[0][0] instanceof Cell
json[0][0]
else if json[0][0] is 0
Cell.Dead
else if json[0][0] is 1
Cell.Alive
else
throw 'a 1x1 square must contain a zero, one, or Cell'
else
half_length = json.length / 2
Square.canonicalize
nw: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(0, half_length)
)
ne: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(half_length)
)
se: @from_json(
json.slice(half_length).map (row) ->
row.slice(half_length)
)
sw: @from_json(
json.slice(half_length).map (row) ->
row.slice(0, half_length)
)
# ### Padding and cropping squares
#
# When displaying squares, it is convenient to crop them to the smallest square that contains
# live cells.
_.extend Cell.prototype,
isEmpty: ->
@value is 0
YouAreDaChef(Square)
.after 'initialize', ->
@isEmpty = _.memoize( ->
(@nw is @ne is @se is @sw) and @nw.isEmpty()
)
_.extend Square.prototype,
trim: ->
if @nw?.sw?.isEmpty() and @nw.nw.isEmpty() and @nw.ne.isEmpty() and \
@ne.nw.isEmpty() and @ne.ne.isEmpty() and @ne.se.isEmpty() and \
@se.ne.isEmpty() and @se.se.isEmpty() and @se.sw.isEmpty() and \
@sw.se.isEmpty() and @sw.sw.isEmpty() and @sw.nw.isEmpty()
Square.canonicalize
nw: @nw.se
ne: @ne.sw
se: @se.nw
sw: @sw.ne
.trim()
else
this
# ### Querying squares
YouAreDaChef(Cell)
.after 'initialize', ->
@population = @value
YouAreDaChef(Square)
.after 'initialize', ->
@population = @nw.population + @ne.population + @se.population + @sw.population
# ---
#
# **(c) 2012 [Reg Braithwaite](http://braythwayt.com)** ([@raganwald](http://twitter.com/raganwald))
#
# Cafe au Life is freely distributable under the terms of the [MIT license](http://en.wikipedia.org/wiki/MIT_License).
#
# The annotated source code was generated directly from the [original source][source] using [Docco][docco].
#
# [source]: https://github.com/raganwald/cafeaulife/blob/master/lib
# [docco]: http://jashkenas.github.com/docco/ | 58342 | # This module is part of [recursiveuniver.se](http://recursiveuniver.se).
#
# ## API Module
#
# The API Module provides convenience methods for interacting with squares from the outside
# ### Baseline Setup
_ = require('underscore')
YouAreDaChef = require('YouAreDaChef').YouAreDaChef
exports ?= window or this
exports.mixInto = ({Square, Cell}) ->
# ### Mix functionality in. `to_json` and `toString` are simple methods for cells, but
# memoized for squares.
_.extend Cell.prototype,
to_json: ->
[@value]
toString: ->
'' + @value
YouAreDaChef(Square)
.after 'initialize', ->
@to_json = _.memoize( ->
a =
nw: @nw.to_json()
ne: @ne.to_json()
se: @se.to_json()
sw: @sw.to_json()
b =
top: _.map( _.zip(a.nw, a.ne), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
bottom: _.map( _.zip(a.sw, a.se), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
b.top.concat(b.bottom)
)
@toString = _.memoize( ->
(_.map @to_json(), (row) ->
([' ', '*'][c] for c in row).join('')
).join('\n')
)
_.extend Square,
from_string: (str) ->
strs = str.split('\n')
json = _.map strs, (ln) ->
{'.': 0, ' ': 0, 'O': 1, '+': 1, '*': 1}[c] for c in ln
@from_json(json)
from_json: (json) ->
dims = [json.length].concat json.map( (row) -> row.length )
sz = Math.pow(2, Math.ceil(Math.log(Math.max(dims...)) / Math.log(2)))
_.each [0..json.length - 1], (i) ->
if json[i].length < sz
json[i] = json[i].concat _.map( [1..(sz - json[i].length)], -> 0 )
if json.length < sz
json = json.concat _.map( [1..(sz - json.length)], ->
_.map [1..sz], -> 0
)
if json.length is 1
if json[0][0] instanceof Cell
json[0][0]
else if json[0][0] is 0
Cell.Dead
else if json[0][0] is 1
Cell.Alive
else
throw 'a 1x1 square must contain a zero, one, or Cell'
else
half_length = json.length / 2
Square.canonicalize
nw: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(0, half_length)
)
ne: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(half_length)
)
se: @from_json(
json.slice(half_length).map (row) ->
row.slice(half_length)
)
sw: @from_json(
json.slice(half_length).map (row) ->
row.slice(0, half_length)
)
# ### Padding and cropping squares
#
# When displaying squares, it is convenient to crop them to the smallest square that contains
# live cells.
_.extend Cell.prototype,
isEmpty: ->
@value is 0
YouAreDaChef(Square)
.after 'initialize', ->
@isEmpty = _.memoize( ->
(@nw is @ne is @se is @sw) and @nw.isEmpty()
)
_.extend Square.prototype,
trim: ->
if @nw?.sw?.isEmpty() and @nw.nw.isEmpty() and @nw.ne.isEmpty() and \
@ne.nw.isEmpty() and @ne.ne.isEmpty() and @ne.se.isEmpty() and \
@se.ne.isEmpty() and @se.se.isEmpty() and @se.sw.isEmpty() and \
@sw.se.isEmpty() and @sw.sw.isEmpty() and @sw.nw.isEmpty()
Square.canonicalize
nw: @nw.se
ne: @ne.sw
se: @se.nw
sw: @sw.ne
.trim()
else
this
# ### Querying squares
YouAreDaChef(Cell)
.after 'initialize', ->
@population = @value
YouAreDaChef(Square)
.after 'initialize', ->
@population = @nw.population + @ne.population + @se.population + @sw.population
# ---
#
# **(c) 2012 [<NAME>](http://braythwayt.com)** ([@raganwald](http://twitter.com/raganwald))
#
# Cafe au Life is freely distributable under the terms of the [MIT license](http://en.wikipedia.org/wiki/MIT_License).
#
# The annotated source code was generated directly from the [original source][source] using [Docco][docco].
#
# [source]: https://github.com/raganwald/cafeaulife/blob/master/lib
# [docco]: http://jashkenas.github.com/docco/ | true | # This module is part of [recursiveuniver.se](http://recursiveuniver.se).
#
# ## API Module
#
# The API Module provides convenience methods for interacting with squares from the outside
# ### Baseline Setup
_ = require('underscore')
YouAreDaChef = require('YouAreDaChef').YouAreDaChef
exports ?= window or this
exports.mixInto = ({Square, Cell}) ->
# ### Mix functionality in. `to_json` and `toString` are simple methods for cells, but
# memoized for squares.
_.extend Cell.prototype,
to_json: ->
[@value]
toString: ->
'' + @value
YouAreDaChef(Square)
.after 'initialize', ->
@to_json = _.memoize( ->
a =
nw: @nw.to_json()
ne: @ne.to_json()
se: @se.to_json()
sw: @sw.to_json()
b =
top: _.map( _.zip(a.nw, a.ne), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
bottom: _.map( _.zip(a.sw, a.se), ([left, right]) ->
if _.isArray(left)
left.concat(right)
else
[left, right]
)
b.top.concat(b.bottom)
)
@toString = _.memoize( ->
(_.map @to_json(), (row) ->
([' ', '*'][c] for c in row).join('')
).join('\n')
)
_.extend Square,
from_string: (str) ->
strs = str.split('\n')
json = _.map strs, (ln) ->
{'.': 0, ' ': 0, 'O': 1, '+': 1, '*': 1}[c] for c in ln
@from_json(json)
from_json: (json) ->
dims = [json.length].concat json.map( (row) -> row.length )
sz = Math.pow(2, Math.ceil(Math.log(Math.max(dims...)) / Math.log(2)))
_.each [0..json.length - 1], (i) ->
if json[i].length < sz
json[i] = json[i].concat _.map( [1..(sz - json[i].length)], -> 0 )
if json.length < sz
json = json.concat _.map( [1..(sz - json.length)], ->
_.map [1..sz], -> 0
)
if json.length is 1
if json[0][0] instanceof Cell
json[0][0]
else if json[0][0] is 0
Cell.Dead
else if json[0][0] is 1
Cell.Alive
else
throw 'a 1x1 square must contain a zero, one, or Cell'
else
half_length = json.length / 2
Square.canonicalize
nw: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(0, half_length)
)
ne: @from_json(
json.slice(0, half_length).map (row) ->
row.slice(half_length)
)
se: @from_json(
json.slice(half_length).map (row) ->
row.slice(half_length)
)
sw: @from_json(
json.slice(half_length).map (row) ->
row.slice(0, half_length)
)
# ### Padding and cropping squares
#
# When displaying squares, it is convenient to crop them to the smallest square that contains
# live cells.
_.extend Cell.prototype,
isEmpty: ->
@value is 0
YouAreDaChef(Square)
.after 'initialize', ->
@isEmpty = _.memoize( ->
(@nw is @ne is @se is @sw) and @nw.isEmpty()
)
_.extend Square.prototype,
trim: ->
if @nw?.sw?.isEmpty() and @nw.nw.isEmpty() and @nw.ne.isEmpty() and \
@ne.nw.isEmpty() and @ne.ne.isEmpty() and @ne.se.isEmpty() and \
@se.ne.isEmpty() and @se.se.isEmpty() and @se.sw.isEmpty() and \
@sw.se.isEmpty() and @sw.sw.isEmpty() and @sw.nw.isEmpty()
Square.canonicalize
nw: @nw.se
ne: @ne.sw
se: @se.nw
sw: @sw.ne
.trim()
else
this
# ### Querying squares
YouAreDaChef(Cell)
.after 'initialize', ->
@population = @value
YouAreDaChef(Square)
.after 'initialize', ->
@population = @nw.population + @ne.population + @se.population + @sw.population
# ---
#
# **(c) 2012 [PI:NAME:<NAME>END_PI](http://braythwayt.com)** ([@raganwald](http://twitter.com/raganwald))
#
# Cafe au Life is freely distributable under the terms of the [MIT license](http://en.wikipedia.org/wiki/MIT_License).
#
# The annotated source code was generated directly from the [original source][source] using [Docco][docco].
#
# [source]: https://github.com/raganwald/cafeaulife/blob/master/lib
# [docco]: http://jashkenas.github.com/docco/ |
[
{
"context": "-javascript/21963136#21963136\n ##\n ## i took Jeff Ward's e6() from here(http://jsfiddle.net/jcward/7hyaC",
"end": 5514,
"score": 0.9998025894165039,
"start": 5505,
"tag": "NAME",
"value": "Jeff Ward"
},
{
"context": "ok Jeff Ward's e6() from here(http://jsfiddle... | tools/strings.coffee | nero-networks/floyd | 0 |
##
## sprintf() for JavaScript -> http://www.diveintojavascript.com/projects/javascript-sprintf
sprintf = require 'sprintf'
##
##
module.exports = strings =
##
sprintf: sprintf.sprintf
##
vsprintf: sprintf.vsprintf
##
format: (format, parts...)->
## some hacks to make sure the format string is really a string ;-)
if !format
format = ''
if format.message && format.stack
format = format.stack
if typeof format is 'object'
format = floyd.tools.objects.inspect format
if parts.length == 1 && floyd.tools.objects.isArray parts[0]
parts = parts[0]
if parts.length
for i in [0..parts.length-1]
if parts[i] && parts[i].message && parts[i].stack
parts[i] = parts[i].stack
if typeof parts[i] is 'object'
parts[i] = floyd.tools.objects.inspect parts[i]
if typeof format isnt 'string'
format = format.toString()
match = format.match /%[^%\s]/g
size = if match then match.length else 0
length = parts.length - size
if length
for i in [0..length-1]
format += ' %s'
parts.unshift format
strings.sprintf.apply null, parts
##
part: (str, split, idx)->
list = str.split(split)
if idx < 0
idx = list.length + idx
list[idx]
##
tail: (str, num=1)->
return if !str
if (size = str.length) > num
return str.substr size - num
else
return str
##
substr: (str, from, to)->
to ?= str.length - 1
if to < 0
to = (str.length - 1) + to
str.substr from, to
##
capitalize: (str)->
str.charAt(0).toUpperCase() + str.substr 1
##
shorten: (str, len, append='...')->
if str && str.length > len
str = str.substr(0, len) + append
return str
##
replaceAll: (str, regex, rep)->
while str.indexOf(regex) isnt -1
str = str.replace regex, rep
return str
##
begins: (str, begin)->
str && begin && str.length > begin.length && str.substr(0, begin.length) is begin
##
ends: (str, end)->
str && end && str.length > end.length && str.substr(str.length-end.length) is end
##
## simple string hashing function
##
## nice algorithm designed to implement Java's String.hashCode() method
## http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/
##
hash: (str)->
if typeof str isnt 'string'
str = str.toString()
hash = i = 0
len = str.length # cache for efficiency
while i < len
hash = ((hash << 5) - hash) + str.charCodeAt(i++)
hash = hash & hash
return hash
##
##
##
isEmail: (str)->
!!str.match /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/
##
##
##
sanitize: (str)->
require('sanitizer').sanitize str
##
##
##
fromStream: (stream, fn)->
floyd.tools.objects.stream2Buffer stream, (err, data)=>
fn null, data.toString(), stream
##
##
##
table: (conf)->
rows = []
conf ?= {}
conf.cols ?= []
conf.delimiter ?= ' | '
conf.tab ?= 8
conf: conf
add: (row)->
if typeof row is 'string'
row = row.split ' | '
for i in [0..row.length-1]
col = conf.cols[i] ?= {}
col.width ?= 0
part = row[i]
if part && part.length > col.width
col.width = Math.ceil(part.length / conf.tab) * conf.tab
rows.push row
toString: ()->
out = ''
i=0
for row in rows
j=0
for part in row
out += part
if j < conf.cols.length-1
tabs = Math.ceil((conf.cols[j].width - part.length-1 + conf.tab) / conf.tab) - 1
out += '\t' while tabs-- > 0
out += conf.delimiter
j++
out += '\n' if ++i isnt rows.length
return out
###
## UUID generator
##
## nice hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log i, 'UUIDs per second'
##
## my firebug(acebug) console says:
## 23368 UUIDs per second
## 23486 UUIDs per second
## 23502 UUIDs per second
## 23338 UUIDs per second
###
uuid_old: ()->
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace /[xy]/g, (c)->
r = Math.random()*16|0
(if c is 'x' then r else r&0x3|0x8).toString(16)
###
## optimized UUID generator
##
## improvement on the hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
## look here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/21963136#21963136
##
## i took Jeff Ward's e6() from here(http://jsfiddle.net/jcward/7hyaC/1/) on 24/02/2014
## because it still had a slight increase on my desktop
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log j=i, 'UUIDs per second with uuid_old'
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid()
console.log i, 'UUIDs per second with uuid'
console.log 'the new uuid is', (i / j).toFixed(2), 'times faster'
##
##
## my firebug(acebug) console says:
##
## 26727 UUIDs per second with uuid_old
## 138970 UUIDs per second with uuid
## the new uuid is 5.20 times faster
##
## 26257 UUIDs per second with uuid_old
## 133505 UUIDs per second with uuid
## the new uuid is 5.08 times faster
##
## 28365 UUIDs per second with uuid_old
## 135616 UUIDs per second with uuid
## the new uuid is 4.78 times faster
##
## 28167 UUIDs per second with uuid_old
## 138498 UUIDs per second with uuid
## the new uuid is 4.92 times faster
##
## 26155 UUIDs per second with uuid_old
## 137794 UUIDs per second with uuid
## the new uuid is 5.27 times faster
###
uuid: ()->
k = ['x','x','-','x','-','4','-','y','-','x','x','x']
u = ''; i = 0; rb = Math.random()*0xffffffff|0
while i++ < 12
c = k[i-1]; r = rb&0xffff
v = if c is 'x' then r else (if c is 'y' then (r&0x3fff|0x8000) else (r&0xfff|0x4000))
u += if c is '-' then c else uuid_LUT[v>>8]+uuid_LUT[v&0xff]
rb = if i&1 then rb>>16 else Math.random()*0xffffffff|0
return u
##
## helper table for uuid
##
uuid_LUT = []
for i in [0..256]
uuid_LUT[i] = (if i<16 then '0' else '')+i.toString(16)
| 182032 |
##
## sprintf() for JavaScript -> http://www.diveintojavascript.com/projects/javascript-sprintf
sprintf = require 'sprintf'
##
##
module.exports = strings =
##
sprintf: sprintf.sprintf
##
vsprintf: sprintf.vsprintf
##
format: (format, parts...)->
## some hacks to make sure the format string is really a string ;-)
if !format
format = ''
if format.message && format.stack
format = format.stack
if typeof format is 'object'
format = floyd.tools.objects.inspect format
if parts.length == 1 && floyd.tools.objects.isArray parts[0]
parts = parts[0]
if parts.length
for i in [0..parts.length-1]
if parts[i] && parts[i].message && parts[i].stack
parts[i] = parts[i].stack
if typeof parts[i] is 'object'
parts[i] = floyd.tools.objects.inspect parts[i]
if typeof format isnt 'string'
format = format.toString()
match = format.match /%[^%\s]/g
size = if match then match.length else 0
length = parts.length - size
if length
for i in [0..length-1]
format += ' %s'
parts.unshift format
strings.sprintf.apply null, parts
##
part: (str, split, idx)->
list = str.split(split)
if idx < 0
idx = list.length + idx
list[idx]
##
tail: (str, num=1)->
return if !str
if (size = str.length) > num
return str.substr size - num
else
return str
##
substr: (str, from, to)->
to ?= str.length - 1
if to < 0
to = (str.length - 1) + to
str.substr from, to
##
capitalize: (str)->
str.charAt(0).toUpperCase() + str.substr 1
##
shorten: (str, len, append='...')->
if str && str.length > len
str = str.substr(0, len) + append
return str
##
replaceAll: (str, regex, rep)->
while str.indexOf(regex) isnt -1
str = str.replace regex, rep
return str
##
begins: (str, begin)->
str && begin && str.length > begin.length && str.substr(0, begin.length) is begin
##
ends: (str, end)->
str && end && str.length > end.length && str.substr(str.length-end.length) is end
##
## simple string hashing function
##
## nice algorithm designed to implement Java's String.hashCode() method
## http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/
##
hash: (str)->
if typeof str isnt 'string'
str = str.toString()
hash = i = 0
len = str.length # cache for efficiency
while i < len
hash = ((hash << 5) - hash) + str.charCodeAt(i++)
hash = hash & hash
return hash
##
##
##
isEmail: (str)->
!!str.match /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/
##
##
##
sanitize: (str)->
require('sanitizer').sanitize str
##
##
##
fromStream: (stream, fn)->
floyd.tools.objects.stream2Buffer stream, (err, data)=>
fn null, data.toString(), stream
##
##
##
table: (conf)->
rows = []
conf ?= {}
conf.cols ?= []
conf.delimiter ?= ' | '
conf.tab ?= 8
conf: conf
add: (row)->
if typeof row is 'string'
row = row.split ' | '
for i in [0..row.length-1]
col = conf.cols[i] ?= {}
col.width ?= 0
part = row[i]
if part && part.length > col.width
col.width = Math.ceil(part.length / conf.tab) * conf.tab
rows.push row
toString: ()->
out = ''
i=0
for row in rows
j=0
for part in row
out += part
if j < conf.cols.length-1
tabs = Math.ceil((conf.cols[j].width - part.length-1 + conf.tab) / conf.tab) - 1
out += '\t' while tabs-- > 0
out += conf.delimiter
j++
out += '\n' if ++i isnt rows.length
return out
###
## UUID generator
##
## nice hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log i, 'UUIDs per second'
##
## my firebug(acebug) console says:
## 23368 UUIDs per second
## 23486 UUIDs per second
## 23502 UUIDs per second
## 23338 UUIDs per second
###
uuid_old: ()->
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace /[xy]/g, (c)->
r = Math.random()*16|0
(if c is 'x' then r else r&0x3|0x8).toString(16)
###
## optimized UUID generator
##
## improvement on the hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
## look here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/21963136#21963136
##
## i took <NAME>'s e6() from here(http://jsfiddle.net/jcward/7hyaC/1/) on 24/02/2014
## because it still had a slight increase on my desktop
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log j=i, 'UUIDs per second with uuid_old'
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid()
console.log i, 'UUIDs per second with uuid'
console.log 'the new uuid is', (i / j).toFixed(2), 'times faster'
##
##
## my firebug(acebug) console says:
##
## 26727 UUIDs per second with uuid_old
## 138970 UUIDs per second with uuid
## the new uuid is 5.20 times faster
##
## 26257 UUIDs per second with uuid_old
## 133505 UUIDs per second with uuid
## the new uuid is 5.08 times faster
##
## 28365 UUIDs per second with uuid_old
## 135616 UUIDs per second with uuid
## the new uuid is 4.78 times faster
##
## 28167 UUIDs per second with uuid_old
## 138498 UUIDs per second with uuid
## the new uuid is 4.92 times faster
##
## 26155 UUIDs per second with uuid_old
## 137794 UUIDs per second with uuid
## the new uuid is 5.27 times faster
###
uuid: ()->
k = ['x','x','-','x','-','4','-','y','-','x','x','x']
u = ''; i = 0; rb = Math.random()*0xffffffff|0
while i++ < 12
c = k[i-1]; r = rb&0xffff
v = if c is 'x' then r else (if c is 'y' then (r&0x3fff|0x8000) else (r&0xfff|0x4000))
u += if c is '-' then c else uuid_LUT[v>>8]+uuid_LUT[v&0xff]
rb = if i&1 then rb>>16 else Math.random()*0xffffffff|0
return u
##
## helper table for uuid
##
uuid_LUT = []
for i in [0..256]
uuid_LUT[i] = (if i<16 then '0' else '')+i.toString(16)
| true |
##
## sprintf() for JavaScript -> http://www.diveintojavascript.com/projects/javascript-sprintf
sprintf = require 'sprintf'
##
##
module.exports = strings =
##
sprintf: sprintf.sprintf
##
vsprintf: sprintf.vsprintf
##
format: (format, parts...)->
## some hacks to make sure the format string is really a string ;-)
if !format
format = ''
if format.message && format.stack
format = format.stack
if typeof format is 'object'
format = floyd.tools.objects.inspect format
if parts.length == 1 && floyd.tools.objects.isArray parts[0]
parts = parts[0]
if parts.length
for i in [0..parts.length-1]
if parts[i] && parts[i].message && parts[i].stack
parts[i] = parts[i].stack
if typeof parts[i] is 'object'
parts[i] = floyd.tools.objects.inspect parts[i]
if typeof format isnt 'string'
format = format.toString()
match = format.match /%[^%\s]/g
size = if match then match.length else 0
length = parts.length - size
if length
for i in [0..length-1]
format += ' %s'
parts.unshift format
strings.sprintf.apply null, parts
##
part: (str, split, idx)->
list = str.split(split)
if idx < 0
idx = list.length + idx
list[idx]
##
tail: (str, num=1)->
return if !str
if (size = str.length) > num
return str.substr size - num
else
return str
##
substr: (str, from, to)->
to ?= str.length - 1
if to < 0
to = (str.length - 1) + to
str.substr from, to
##
capitalize: (str)->
str.charAt(0).toUpperCase() + str.substr 1
##
shorten: (str, len, append='...')->
if str && str.length > len
str = str.substr(0, len) + append
return str
##
replaceAll: (str, regex, rep)->
while str.indexOf(regex) isnt -1
str = str.replace regex, rep
return str
##
begins: (str, begin)->
str && begin && str.length > begin.length && str.substr(0, begin.length) is begin
##
ends: (str, end)->
str && end && str.length > end.length && str.substr(str.length-end.length) is end
##
## simple string hashing function
##
## nice algorithm designed to implement Java's String.hashCode() method
## http://werxltd.com/wp/2010/05/13/javascript-implementation-of-javas-string-hashcode-method/
##
hash: (str)->
if typeof str isnt 'string'
str = str.toString()
hash = i = 0
len = str.length # cache for efficiency
while i < len
hash = ((hash << 5) - hash) + str.charCodeAt(i++)
hash = hash & hash
return hash
##
##
##
isEmail: (str)->
!!str.match /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$/
##
##
##
sanitize: (str)->
require('sanitizer').sanitize str
##
##
##
fromStream: (stream, fn)->
floyd.tools.objects.stream2Buffer stream, (err, data)=>
fn null, data.toString(), stream
##
##
##
table: (conf)->
rows = []
conf ?= {}
conf.cols ?= []
conf.delimiter ?= ' | '
conf.tab ?= 8
conf: conf
add: (row)->
if typeof row is 'string'
row = row.split ' | '
for i in [0..row.length-1]
col = conf.cols[i] ?= {}
col.width ?= 0
part = row[i]
if part && part.length > col.width
col.width = Math.ceil(part.length / conf.tab) * conf.tab
rows.push row
toString: ()->
out = ''
i=0
for row in rows
j=0
for part in row
out += part
if j < conf.cols.length-1
tabs = Math.ceil((conf.cols[j].width - part.length-1 + conf.tab) / conf.tab) - 1
out += '\t' while tabs-- > 0
out += conf.delimiter
j++
out += '\n' if ++i isnt rows.length
return out
###
## UUID generator
##
## nice hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log i, 'UUIDs per second'
##
## my firebug(acebug) console says:
## 23368 UUIDs per second
## 23486 UUIDs per second
## 23502 UUIDs per second
## 23338 UUIDs per second
###
uuid_old: ()->
'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace /[xy]/g, (c)->
r = Math.random()*16|0
(if c is 'x' then r else r&0x3|0x8).toString(16)
###
## optimized UUID generator
##
## improvement on the hack from here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript#answer-2117523
##
## look here
## http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/21963136#21963136
##
## i took PI:NAME:<NAME>END_PI's e6() from here(http://jsfiddle.net/jcward/7hyaC/1/) on 24/02/2014
## because it still had a slight increase on my desktop
##
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid_old()
console.log j=i, 'UUIDs per second with uuid_old'
i=0; start = +new Date()
while (+new Date() - start) < 1000 && ++i
floyd.tools.strings.uuid()
console.log i, 'UUIDs per second with uuid'
console.log 'the new uuid is', (i / j).toFixed(2), 'times faster'
##
##
## my firebug(acebug) console says:
##
## 26727 UUIDs per second with uuid_old
## 138970 UUIDs per second with uuid
## the new uuid is 5.20 times faster
##
## 26257 UUIDs per second with uuid_old
## 133505 UUIDs per second with uuid
## the new uuid is 5.08 times faster
##
## 28365 UUIDs per second with uuid_old
## 135616 UUIDs per second with uuid
## the new uuid is 4.78 times faster
##
## 28167 UUIDs per second with uuid_old
## 138498 UUIDs per second with uuid
## the new uuid is 4.92 times faster
##
## 26155 UUIDs per second with uuid_old
## 137794 UUIDs per second with uuid
## the new uuid is 5.27 times faster
###
uuid: ()->
k = ['x','x','-','x','-','4','-','y','-','x','x','x']
u = ''; i = 0; rb = Math.random()*0xffffffff|0
while i++ < 12
c = k[i-1]; r = rb&0xffff
v = if c is 'x' then r else (if c is 'y' then (r&0x3fff|0x8000) else (r&0xfff|0x4000))
u += if c is '-' then c else uuid_LUT[v>>8]+uuid_LUT[v&0xff]
rb = if i&1 then rb>>16 else Math.random()*0xffffffff|0
return u
##
## helper table for uuid
##
uuid_LUT = []
for i in [0..256]
uuid_LUT[i] = (if i<16 then '0' else '')+i.toString(16)
|
[
{
"context": "eEach ->\n meshbluConfig =\n hostname: '127.0.0.1'\n port: @meshblu.address().port\n pr",
"end": 535,
"score": 0.9994380474090576,
"start": 526,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ol: 'http'\n uuid: 'a-uuid'\n ... | test/resolve-spec.coffee | octoblu/meshblu-json-schema-ref-parser | 0 | {describe,beforeEach,afterEach,it} = global
{expect} = require 'chai'
shmock = require 'shmock'
enableDestroy = require 'server-destroy'
MeshbluJsonSchemaResolver = require '..'
describe 'MeshbluJsonSchemaResolver', ->
beforeEach 'start Meshblu', ->
@meshblu = shmock()
enableDestroy @meshblu
afterEach 'destroy Meshblu', (done) ->
@meshblu.destroy done
describe 'Created with a meshbluConfig', ->
beforeEach ->
meshbluConfig =
hostname: '127.0.0.1'
port: @meshblu.address().port
protocol: 'http'
uuid: 'a-uuid'
token: 'super-secret'
@sut = new MeshbluJsonSchemaResolver {meshbluConfig}
it 'should exist', ->
expect(@sut).to.exist
describe 'When resolving a schema', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema).to.deep.equal @whateverSchema
describe 'When resolving a schema with a file reference ', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type:
$ref: '/etc/passwd'
@sut.resolve @whateverSchema, (@error, @resolvedSchema) => done()
it 'should not give us back /etc/passwd', ->
expect(@error).to.exist
describe 'When resolving a schema with a reference', ->
beforeEach 'start static file server', ->
@ref1Schema =
type: 'number'
description: '?'
@staticFileServer = shmock()
@staticFileServer
.get '/schema/ref1'
.reply 200, @ref1Schema
enableDestroy @staticFileServer
afterEach 'destroy Meshblu', (done) ->
@staticFileServer.destroy done
beforeEach 'do the thing', (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "http://127.0.0.1:#{@staticFileServer.address().port}/schema/ref1"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema.properties.name).to.deep.equal @ref1Schema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property and an "as" property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, {
shouldNotEndUpOnOriginalDevice: true
}
beforeEach (done) ->
@whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) =>
done(error)
it 'should not mutate whateverSchema', ->
expect(@whateverSchema).to.deep.equal
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
describe 'When resolving a schema with a reference to a meshblu device property that does not exist', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, {
yup: true
}
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
describe 'When resolving a schema with a reference to a meshblu device property but meshbludevice is disabled', ->
beforeEach 'sut', ->
@sut = new MeshbluJsonSchemaResolver skipInvalidMeshbluDevice: true
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
| 42587 | {describe,beforeEach,afterEach,it} = global
{expect} = require 'chai'
shmock = require 'shmock'
enableDestroy = require 'server-destroy'
MeshbluJsonSchemaResolver = require '..'
describe 'MeshbluJsonSchemaResolver', ->
beforeEach 'start Meshblu', ->
@meshblu = shmock()
enableDestroy @meshblu
afterEach 'destroy Meshblu', (done) ->
@meshblu.destroy done
describe 'Created with a meshbluConfig', ->
beforeEach ->
meshbluConfig =
hostname: '127.0.0.1'
port: @meshblu.address().port
protocol: 'http'
uuid: 'a-uuid'
token: '<PASSWORD>'
@sut = new MeshbluJsonSchemaResolver {meshbluConfig}
it 'should exist', ->
expect(@sut).to.exist
describe 'When resolving a schema', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema).to.deep.equal @whateverSchema
describe 'When resolving a schema with a file reference ', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type:
$ref: '/etc/passwd'
@sut.resolve @whateverSchema, (@error, @resolvedSchema) => done()
it 'should not give us back /etc/passwd', ->
expect(@error).to.exist
describe 'When resolving a schema with a reference', ->
beforeEach 'start static file server', ->
@ref1Schema =
type: 'number'
description: '?'
@staticFileServer = shmock()
@staticFileServer
.get '/schema/ref1'
.reply 200, @ref1Schema
enableDestroy @staticFileServer
afterEach 'destroy Meshblu', (done) ->
@staticFileServer.destroy done
beforeEach 'do the thing', (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "http://127.0.0.1:#{@staticFileServer.address().port}/schema/ref1"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema.properties.name).to.deep.equal @ref1Schema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property and an "as" property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, {
shouldNotEndUpOnOriginalDevice: true
}
beforeEach (done) ->
@whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) =>
done(error)
it 'should not mutate whateverSchema', ->
expect(@whateverSchema).to.deep.equal
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
describe 'When resolving a schema with a reference to a meshblu device property that does not exist', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, {
yup: true
}
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
describe 'When resolving a schema with a reference to a meshblu device property but meshbludevice is disabled', ->
beforeEach 'sut', ->
@sut = new MeshbluJsonSchemaResolver skipInvalidMeshbluDevice: true
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
| true | {describe,beforeEach,afterEach,it} = global
{expect} = require 'chai'
shmock = require 'shmock'
enableDestroy = require 'server-destroy'
MeshbluJsonSchemaResolver = require '..'
describe 'MeshbluJsonSchemaResolver', ->
beforeEach 'start Meshblu', ->
@meshblu = shmock()
enableDestroy @meshblu
afterEach 'destroy Meshblu', (done) ->
@meshblu.destroy done
describe 'Created with a meshbluConfig', ->
beforeEach ->
meshbluConfig =
hostname: '127.0.0.1'
port: @meshblu.address().port
protocol: 'http'
uuid: 'a-uuid'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
@sut = new MeshbluJsonSchemaResolver {meshbluConfig}
it 'should exist', ->
expect(@sut).to.exist
describe 'When resolving a schema', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema).to.deep.equal @whateverSchema
describe 'When resolving a schema with a file reference ', ->
beforeEach 'waiting to resolve', (done) ->
@whateverSchema =
type: 'object'
properties:
name:
type: 'string'
description:
type:
$ref: '/etc/passwd'
@sut.resolve @whateverSchema, (@error, @resolvedSchema) => done()
it 'should not give us back /etc/passwd', ->
expect(@error).to.exist
describe 'When resolving a schema with a reference', ->
beforeEach 'start static file server', ->
@ref1Schema =
type: 'number'
description: '?'
@staticFileServer = shmock()
@staticFileServer
.get '/schema/ref1'
.reply 200, @ref1Schema
enableDestroy @staticFileServer
afterEach 'destroy Meshblu', (done) ->
@staticFileServer.destroy done
beforeEach 'do the thing', (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "http://127.0.0.1:#{@staticFileServer.address().port}/schema/ref1"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
expect(@resolvedSchema.properties.name).to.deep.equal @ref1Schema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property and an "as" property', ->
beforeEach 'meshblu device', ->
aDevice =
some:
property:
type: 'object'
properties:
color:
type: 'string'
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, aDevice
beforeEach (done) ->
whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/some/property"
description:
type: 'string'
@sut.resolve whateverSchema, (error, @resolvedSchema) => done(error)
it 'should give us back the schema', ->
propertySchema =
type: 'object'
properties:
color:
type: 'string'
expect(@resolvedSchema.properties.name).to.deep.equal propertySchema
describe 'When resolving a schema with a reference to a meshblu device property', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.set 'x-meshblu-as', '5'
.reply 200, {
shouldNotEndUpOnOriginalDevice: true
}
beforeEach (done) ->
@whateverSchema =
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
@sut.resolve @whateverSchema, (error, @resolvedSchema) =>
done(error)
it 'should not mutate whateverSchema', ->
expect(@whateverSchema).to.deep.equal
type: 'object'
properties:
name:
$ref: "meshbludevice://5@127.0.0.1:#{@meshblu.address().port}/a-device-uuid"
description:
type: 'string'
describe 'When resolving a schema with a reference to a meshblu device property that does not exist', ->
beforeEach 'meshblu device', ->
@meshblu
.get '/v2/devices/a-device-uuid'
.reply 200, {
yup: true
}
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
describe 'When resolving a schema with a reference to a meshblu device property but meshbludevice is disabled', ->
beforeEach 'sut', ->
@sut = new MeshbluJsonSchemaResolver skipInvalidMeshbluDevice: true
beforeEach (done) ->
schema =
type: 'object'
properties:
name:
$ref: "meshbludevice://127.0.0.1:#{@meshblu.address().port}/a-device-uuid/#/doesNotExist"
description:
type: 'string'
@sut.resolve schema, (error, @resolvedSchema) =>
done error
it 'should return something to exist', ->
expect(@resolvedSchema.properties.name).to.not.exist
|
[
{
"context": "tions\n\n # private helpers\n key = (name, type) -> \"_#{type}_#{name}_callbacks\"\n key_compiled = (name, type) -> \"_#{type}_#{nam",
"end": 2632,
"score": 0.9822813272476196,
"start": 2605,
"tag": "KEY",
"value": "\"_#{type}_#{name}_callbacks"
},
{
"context": "{nam... | support/callbacks.coffee | printercu/costa | 1 | cs = require 'coffee-script'
_ = require 'underscore'
flow = require 'flow-coffee'
Args = require './args'
module.exports =
class Callbacks extends require('coffee_classkit').Module
@extendsWithProto().concern()
class @ClassMethods
defineCallbacks: (name) ->
for type in ['before', 'after']
@[key name, type] = []
@_compileCallbacks name, type
@
# TODO:
# On setting previously set callback new one is just prepending with
# new options. May be we should merge that callbacks into first one.
# But we woun't be able to declare duplicates...
#
# Find out how to extract skipped options. Maybe concat arrays with _or_.
setCallback: (name, type, args...) ->
[options, [filter]] = Args.findOptions args
item = [[filter, normalize_options options]]
origin = @[key name, type]
@[key name, type] = if options.prepend
item.concat origin
else
origin.concat item
@_compileCallbacks name, type
skipCallback: (name, type, args...) ->
[skip_options, [filter]] = Args.findOptions args
@[key name, type] = if filter
_.compact @[key name, type].map ([item, options]) ->
return arguments[0] if item != filter
if new_options = merge_skipped_options options, skip_options
[item, new_options]
else
[]
@_compileCallbacks name, type
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (context, name, callback, options) ->
blocks = @[key_compiled name, 'before']
.concat [callback], @[key_compiled name, 'after']
flow_opts =
context: context
blocks: blocks
if options
if typeof options is 'object'
flow_opts.error = options.error if options.error
flow_opts.final = options.final if options.final
else
flow_opts.error = options
flow_opts.final = -> options.apply(context, [null].concat(Array::slice.call(arguments)))
new flow flow_opts
_compileCallbacks: (name, type) ->
@[key_compiled name, type] = _.flatten(
for [filter, options] in @[key name, type]
if options.if.length or options.unless.length
[compile_options(options), filter]
else
[filter]
)
@
# instance methods
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (name, callback, options) ->
@constructor.prepareCallbacks @, name, callback, options
# private helpers
key = (name, type) -> "_#{type}_#{name}_callbacks"
key_compiled = (name, type) -> "_#{type}_#{name}_callbacks_compiled"
normalize_options = (options) ->
return options if typeof options is 'function'
if: _.compact _.flatten [options.if]
unless: _.compact _.flatten [options.unless]
merge_skipped_options = (options, skipOptions) ->
skip_opts = normalize_options skipOptions
return false unless skip_opts.if.length or skip_opts.unless.length
if: options.if.concat skip_opts.unless
unless: options.unless.concat skip_opts.if
compile_options = (options) ->
return options if typeof options is 'function'
return options.when if options.when
clauses = options.if.slice()
clauses.push "!(#{options.unless.join ' and '})" if options.unless.length
# OPTIMIZE: replace args... with err ?
eval cs.compile """
(args..., cb) ->
cb.skip() unless #{clauses.join ' and '}
cb.next args...
""", bare: true
| 136421 | cs = require 'coffee-script'
_ = require 'underscore'
flow = require 'flow-coffee'
Args = require './args'
module.exports =
class Callbacks extends require('coffee_classkit').Module
@extendsWithProto().concern()
class @ClassMethods
defineCallbacks: (name) ->
for type in ['before', 'after']
@[key name, type] = []
@_compileCallbacks name, type
@
# TODO:
# On setting previously set callback new one is just prepending with
# new options. May be we should merge that callbacks into first one.
# But we woun't be able to declare duplicates...
#
# Find out how to extract skipped options. Maybe concat arrays with _or_.
setCallback: (name, type, args...) ->
[options, [filter]] = Args.findOptions args
item = [[filter, normalize_options options]]
origin = @[key name, type]
@[key name, type] = if options.prepend
item.concat origin
else
origin.concat item
@_compileCallbacks name, type
skipCallback: (name, type, args...) ->
[skip_options, [filter]] = Args.findOptions args
@[key name, type] = if filter
_.compact @[key name, type].map ([item, options]) ->
return arguments[0] if item != filter
if new_options = merge_skipped_options options, skip_options
[item, new_options]
else
[]
@_compileCallbacks name, type
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (context, name, callback, options) ->
blocks = @[key_compiled name, 'before']
.concat [callback], @[key_compiled name, 'after']
flow_opts =
context: context
blocks: blocks
if options
if typeof options is 'object'
flow_opts.error = options.error if options.error
flow_opts.final = options.final if options.final
else
flow_opts.error = options
flow_opts.final = -> options.apply(context, [null].concat(Array::slice.call(arguments)))
new flow flow_opts
_compileCallbacks: (name, type) ->
@[key_compiled name, type] = _.flatten(
for [filter, options] in @[key name, type]
if options.if.length or options.unless.length
[compile_options(options), filter]
else
[filter]
)
@
# instance methods
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (name, callback, options) ->
@constructor.prepareCallbacks @, name, callback, options
# private helpers
key = (name, type) -> <KEY>"
key_compiled = (name, type) -> <KEY>"
normalize_options = (options) ->
return options if typeof options is 'function'
if: _.compact _.flatten [options.if]
unless: _.compact _.flatten [options.unless]
merge_skipped_options = (options, skipOptions) ->
skip_opts = normalize_options skipOptions
return false unless skip_opts.if.length or skip_opts.unless.length
if: options.if.concat skip_opts.unless
unless: options.unless.concat skip_opts.if
compile_options = (options) ->
return options if typeof options is 'function'
return options.when if options.when
clauses = options.if.slice()
clauses.push "!(#{options.unless.join ' and '})" if options.unless.length
# OPTIMIZE: replace args... with err ?
eval cs.compile """
(args..., cb) ->
cb.skip() unless #{clauses.join ' and '}
cb.next args...
""", bare: true
| true | cs = require 'coffee-script'
_ = require 'underscore'
flow = require 'flow-coffee'
Args = require './args'
module.exports =
class Callbacks extends require('coffee_classkit').Module
@extendsWithProto().concern()
class @ClassMethods
defineCallbacks: (name) ->
for type in ['before', 'after']
@[key name, type] = []
@_compileCallbacks name, type
@
# TODO:
# On setting previously set callback new one is just prepending with
# new options. May be we should merge that callbacks into first one.
# But we woun't be able to declare duplicates...
#
# Find out how to extract skipped options. Maybe concat arrays with _or_.
setCallback: (name, type, args...) ->
[options, [filter]] = Args.findOptions args
item = [[filter, normalize_options options]]
origin = @[key name, type]
@[key name, type] = if options.prepend
item.concat origin
else
origin.concat item
@_compileCallbacks name, type
skipCallback: (name, type, args...) ->
[skip_options, [filter]] = Args.findOptions args
@[key name, type] = if filter
_.compact @[key name, type].map ([item, options]) ->
return arguments[0] if item != filter
if new_options = merge_skipped_options options, skip_options
[item, new_options]
else
[]
@_compileCallbacks name, type
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (context, name, callback, options) ->
blocks = @[key_compiled name, 'before']
.concat [callback], @[key_compiled name, 'after']
flow_opts =
context: context
blocks: blocks
if options
if typeof options is 'object'
flow_opts.error = options.error if options.error
flow_opts.final = options.final if options.final
else
flow_opts.error = options
flow_opts.final = -> options.apply(context, [null].concat(Array::slice.call(arguments)))
new flow flow_opts
_compileCallbacks: (name, type) ->
@[key_compiled name, type] = _.flatten(
for [filter, options] in @[key name, type]
if options.if.length or options.unless.length
[compile_options(options), filter]
else
[filter]
)
@
# instance methods
runCallbacks: ->
(@prepareCallbacks arguments...) null
prepareCallbacks: (name, callback, options) ->
@constructor.prepareCallbacks @, name, callback, options
# private helpers
key = (name, type) -> PI:KEY:<KEY>END_PI"
key_compiled = (name, type) -> PI:KEY:<KEY>END_PI"
normalize_options = (options) ->
return options if typeof options is 'function'
if: _.compact _.flatten [options.if]
unless: _.compact _.flatten [options.unless]
merge_skipped_options = (options, skipOptions) ->
skip_opts = normalize_options skipOptions
return false unless skip_opts.if.length or skip_opts.unless.length
if: options.if.concat skip_opts.unless
unless: options.unless.concat skip_opts.if
compile_options = (options) ->
return options if typeof options is 'function'
return options.when if options.when
clauses = options.if.slice()
clauses.push "!(#{options.unless.join ' and '})" if options.unless.length
# OPTIMIZE: replace args... with err ?
eval cs.compile """
(args..., cb) ->
cb.skip() unless #{clauses.join ' and '}
cb.next args...
""", bare: true
|
[
{
"context": " 'application/json'\n Authorization: 'Basic cm9iaTppYm9y'\n success: (response) =>\n fetchKade(",
"end": 1383,
"score": 0.621341347694397,
"start": 1371,
"tag": "KEY",
"value": "cm9iaTppYm9y"
}
] | design/html/coffee/admin.coffee | element-doo/ekade | 0 | #--- ADMIN STUFF ---#
galleryModel = ->
# objects & stuff
@images = ko.observableArray []
@maxPages = ko.observable 0
@currPage = ko.observable 1
# flags
@changes = ko.observable 0
@isWorking = ko.observable false
# actions
@actionApprove = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = true
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionReject = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = false
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionMarkAllConfirmed = =>
@__markAll true
return
@actionMarkAllRejected = =>
@__markAll false
return
@actionSaveChanges = =>
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/MasovnaModeracija'
@isWorking true
data = []
@images().forEach (item) ->
kada =
kadaID: item.URI
odobrena: item.status
data.push kada if item.status isnt null
return
moderiraneKade =
moderacijeKada: data
jQuery.ajax
type: 'PUT'
url: requestUrl
data: JSON.stringify moderiraneKade
dataType: 'json'
headers:
'Content-Type': 'application/json'
Authorization: 'Basic cm9iaTppYm9y'
success: (response) =>
fetchKade()
gallery.isWorking false
return
error: (response) ->
console.warn 'Got error. ', response
return
return
@pagePrev = =>
@currPage (if @currPage() is 1 then 1 else @currPage() - 1)
fetchKade @currPage-1, 20
return
@pageNext = =>
max = @pages().length
gallery.currPage (if gallery.currPage() >= max then max else gallery.currPage() + 1)
fetchKade @currPage-1, 20
return
@pageNum = ->
max = gallery.pages().length
gallery.currPage (if @.page <= max or @.page >= 1 then @.page else 1 )
fetchKade gallery.currPage-1, 20
return
@pages = ko.computed =>
i = 0
pages = []
while i < @maxPages()
pages.push
page: i+1
i++
pages
# working class
@__cloneItem = (index) ->
item = gallery.images()[index]
newItem =
URI: item.URI
width: item.width
height: item.height
status: null
timestamp: item.timestamp
filename: item.filename
imgPath: item.imgPath
fullPath: item.fullPath
@__markAll = (status) ->
i = 0
while i < @images().length
newItem = @__cloneItem i
newItem.status = status
@images.splice i, 1, newItem
@changes gallery.changes()+1
i++
return
return
gallery = null
fetchKade = (offset = 0, limit = 100) ->
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/KadaIzvorPodataka/NemoderiraneKade'
imageBase = 'https://static.emajliramokade.com/'
jQuery.ajax
type: 'GET'
url: requestUrl
data:
offset: offset
limit: limit
dataType: 'json'
headers:
'Content-Type': 'application/json'
'Authorization': 'Basic cm9iaTppYm9y'
success: (response) =>
gallery.images []
response.forEach (item) ->
if item.slikeKade? and item.slikeKade.length isnt 0
kada = item.slikeKade
img =
URI: item.URI
width: kada.thumbnail.width
height: kada.thumbnail.height
status: null
timestamp: item.dodana
filename: kada.web.filename
imgPath: imageBase+'thumbnail/'+kada.URI+'/'+kada.thumbnail.filename
fullPath: imageBase+'web/'+kada.URI+'/'+kada.web.filename
gallery.images.push img
return
return
error: (response) ->
console.warn 'Got error. ', response
return
return
$ ->
$(window).on 'beforeunload', ->
# browser is not really asking for following string, but at least it asks for confirmation...
# don't have time for debugging it now, will do later.
'Promjenjeno stanje '+gallery.changes()+' slike/a, da li sigurno želite napustiti stranicu?'
gallery = new galleryModel()
fetchKade()
gallery.maxPages 3
ko.applyBindings gallery
return
| 65182 | #--- ADMIN STUFF ---#
galleryModel = ->
# objects & stuff
@images = ko.observableArray []
@maxPages = ko.observable 0
@currPage = ko.observable 1
# flags
@changes = ko.observable 0
@isWorking = ko.observable false
# actions
@actionApprove = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = true
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionReject = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = false
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionMarkAllConfirmed = =>
@__markAll true
return
@actionMarkAllRejected = =>
@__markAll false
return
@actionSaveChanges = =>
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/MasovnaModeracija'
@isWorking true
data = []
@images().forEach (item) ->
kada =
kadaID: item.URI
odobrena: item.status
data.push kada if item.status isnt null
return
moderiraneKade =
moderacijeKada: data
jQuery.ajax
type: 'PUT'
url: requestUrl
data: JSON.stringify moderiraneKade
dataType: 'json'
headers:
'Content-Type': 'application/json'
Authorization: 'Basic <KEY>'
success: (response) =>
fetchKade()
gallery.isWorking false
return
error: (response) ->
console.warn 'Got error. ', response
return
return
@pagePrev = =>
@currPage (if @currPage() is 1 then 1 else @currPage() - 1)
fetchKade @currPage-1, 20
return
@pageNext = =>
max = @pages().length
gallery.currPage (if gallery.currPage() >= max then max else gallery.currPage() + 1)
fetchKade @currPage-1, 20
return
@pageNum = ->
max = gallery.pages().length
gallery.currPage (if @.page <= max or @.page >= 1 then @.page else 1 )
fetchKade gallery.currPage-1, 20
return
@pages = ko.computed =>
i = 0
pages = []
while i < @maxPages()
pages.push
page: i+1
i++
pages
# working class
@__cloneItem = (index) ->
item = gallery.images()[index]
newItem =
URI: item.URI
width: item.width
height: item.height
status: null
timestamp: item.timestamp
filename: item.filename
imgPath: item.imgPath
fullPath: item.fullPath
@__markAll = (status) ->
i = 0
while i < @images().length
newItem = @__cloneItem i
newItem.status = status
@images.splice i, 1, newItem
@changes gallery.changes()+1
i++
return
return
gallery = null
fetchKade = (offset = 0, limit = 100) ->
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/KadaIzvorPodataka/NemoderiraneKade'
imageBase = 'https://static.emajliramokade.com/'
jQuery.ajax
type: 'GET'
url: requestUrl
data:
offset: offset
limit: limit
dataType: 'json'
headers:
'Content-Type': 'application/json'
'Authorization': 'Basic cm9iaTppYm9y'
success: (response) =>
gallery.images []
response.forEach (item) ->
if item.slikeKade? and item.slikeKade.length isnt 0
kada = item.slikeKade
img =
URI: item.URI
width: kada.thumbnail.width
height: kada.thumbnail.height
status: null
timestamp: item.dodana
filename: kada.web.filename
imgPath: imageBase+'thumbnail/'+kada.URI+'/'+kada.thumbnail.filename
fullPath: imageBase+'web/'+kada.URI+'/'+kada.web.filename
gallery.images.push img
return
return
error: (response) ->
console.warn 'Got error. ', response
return
return
$ ->
$(window).on 'beforeunload', ->
# browser is not really asking for following string, but at least it asks for confirmation...
# don't have time for debugging it now, will do later.
'Promjenjeno stanje '+gallery.changes()+' slike/a, da li sigurno želite napustiti stranicu?'
gallery = new galleryModel()
fetchKade()
gallery.maxPages 3
ko.applyBindings gallery
return
| true | #--- ADMIN STUFF ---#
galleryModel = ->
# objects & stuff
@images = ko.observableArray []
@maxPages = ko.observable 0
@currPage = ko.observable 1
# flags
@changes = ko.observable 0
@isWorking = ko.observable false
# actions
@actionApprove = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = true
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionReject = ->
i = gallery.images.indexOf @
newItem = gallery.__cloneItem i
newItem.status = false
gallery.images.splice i, 1, newItem
gallery.changes gallery.changes()+1
return
@actionMarkAllConfirmed = =>
@__markAll true
return
@actionMarkAllRejected = =>
@__markAll false
return
@actionSaveChanges = =>
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/MasovnaModeracija'
@isWorking true
data = []
@images().forEach (item) ->
kada =
kadaID: item.URI
odobrena: item.status
data.push kada if item.status isnt null
return
moderiraneKade =
moderacijeKada: data
jQuery.ajax
type: 'PUT'
url: requestUrl
data: JSON.stringify moderiraneKade
dataType: 'json'
headers:
'Content-Type': 'application/json'
Authorization: 'Basic PI:KEY:<KEY>END_PI'
success: (response) =>
fetchKade()
gallery.isWorking false
return
error: (response) ->
console.warn 'Got error. ', response
return
return
@pagePrev = =>
@currPage (if @currPage() is 1 then 1 else @currPage() - 1)
fetchKade @currPage-1, 20
return
@pageNext = =>
max = @pages().length
gallery.currPage (if gallery.currPage() >= max then max else gallery.currPage() + 1)
fetchKade @currPage-1, 20
return
@pageNum = ->
max = gallery.pages().length
gallery.currPage (if @.page <= max or @.page >= 1 then @.page else 1 )
fetchKade gallery.currPage-1, 20
return
@pages = ko.computed =>
i = 0
pages = []
while i < @maxPages()
pages.push
page: i+1
i++
pages
# working class
@__cloneItem = (index) ->
item = gallery.images()[index]
newItem =
URI: item.URI
width: item.width
height: item.height
status: null
timestamp: item.timestamp
filename: item.filename
imgPath: item.imgPath
fullPath: item.fullPath
@__markAll = (status) ->
i = 0
while i < @images().length
newItem = @__cloneItem i
newItem.status = status
@images.splice i, 1, newItem
@changes gallery.changes()+1
i++
return
return
gallery = null
fetchKade = (offset = 0, limit = 100) ->
requestUrl = 'https://admin.emajliramokade.com/platform/Moderiraj.svc/KadaIzvorPodataka/NemoderiraneKade'
imageBase = 'https://static.emajliramokade.com/'
jQuery.ajax
type: 'GET'
url: requestUrl
data:
offset: offset
limit: limit
dataType: 'json'
headers:
'Content-Type': 'application/json'
'Authorization': 'Basic cm9iaTppYm9y'
success: (response) =>
gallery.images []
response.forEach (item) ->
if item.slikeKade? and item.slikeKade.length isnt 0
kada = item.slikeKade
img =
URI: item.URI
width: kada.thumbnail.width
height: kada.thumbnail.height
status: null
timestamp: item.dodana
filename: kada.web.filename
imgPath: imageBase+'thumbnail/'+kada.URI+'/'+kada.thumbnail.filename
fullPath: imageBase+'web/'+kada.URI+'/'+kada.web.filename
gallery.images.push img
return
return
error: (response) ->
console.warn 'Got error. ', response
return
return
$ ->
$(window).on 'beforeunload', ->
# browser is not really asking for following string, but at least it asks for confirmation...
# don't have time for debugging it now, will do later.
'Promjenjeno stanje '+gallery.changes()+' slike/a, da li sigurno želite napustiti stranicu?'
gallery = new galleryModel()
fetchKade()
gallery.maxPages 3
ko.applyBindings gallery
return
|
[
{
"context": "s.find_names = (opt={},cb) ->\n return cb null, ['John','James','Jose']\n\nUsersSchema.methods.change_name",
"end": 506,
"score": 0.9997608661651611,
"start": 502,
"tag": "NAME",
"value": "John"
},
{
"context": "names = (opt={},cb) ->\n return cb null, ['John','James'... | models/users.iced | punted/mkay | 7 | _ = require('wegweg')({
globals: off
})
if !module.parent
process.env.MONGOOSE_MODEL_DEVEL = module.filename
process.env.SILENCE = 1
require './../core/globals'
Schema = mongoose.Schema
models = require './../core/models'
UsersSchema = new Schema {
active: {
type: Boolean
default: yes
}
name: {
type: String
trim: yes
required: yes
}
}, {collection:'users'}
UsersSchema.plugin models.base
UsersSchema.statics.find_names = (opt={},cb) ->
return cb null, ['John','James','Jose']
UsersSchema.methods.change_name = (opt={},cb) ->
if !opt.name then return cb new Error "`opt.name` required"
@name = opt.name
@save cb
##
model = mongoose.model 'Users', UsersSchema
model.AUTO_EXPOSE = {
route: '/users'
methods: [
'change_name'
]
statics: [
'find_names'
]
}
module.exports = model
| 169660 | _ = require('wegweg')({
globals: off
})
if !module.parent
process.env.MONGOOSE_MODEL_DEVEL = module.filename
process.env.SILENCE = 1
require './../core/globals'
Schema = mongoose.Schema
models = require './../core/models'
UsersSchema = new Schema {
active: {
type: Boolean
default: yes
}
name: {
type: String
trim: yes
required: yes
}
}, {collection:'users'}
UsersSchema.plugin models.base
UsersSchema.statics.find_names = (opt={},cb) ->
return cb null, ['<NAME>','<NAME>','<NAME>']
UsersSchema.methods.change_name = (opt={},cb) ->
if !opt.name then return cb new Error "`opt.name` required"
@name = opt.name
@save cb
##
model = mongoose.model 'Users', UsersSchema
model.AUTO_EXPOSE = {
route: '/users'
methods: [
'change_name'
]
statics: [
'find_names'
]
}
module.exports = model
| true | _ = require('wegweg')({
globals: off
})
if !module.parent
process.env.MONGOOSE_MODEL_DEVEL = module.filename
process.env.SILENCE = 1
require './../core/globals'
Schema = mongoose.Schema
models = require './../core/models'
UsersSchema = new Schema {
active: {
type: Boolean
default: yes
}
name: {
type: String
trim: yes
required: yes
}
}, {collection:'users'}
UsersSchema.plugin models.base
UsersSchema.statics.find_names = (opt={},cb) ->
return cb null, ['PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PI','PI:NAME:<NAME>END_PI']
UsersSchema.methods.change_name = (opt={},cb) ->
if !opt.name then return cb new Error "`opt.name` required"
@name = opt.name
@save cb
##
model = mongoose.model 'Users', UsersSchema
model.AUTO_EXPOSE = {
route: '/users'
methods: [
'change_name'
]
statics: [
'find_names'
]
}
module.exports = model
|
[
{
"context": "\n\n match =\n pattern: 'dictionary'\n token: 'token'\n rank: 10\n dictionary_name: 'passwords'\n\n ",
"end": 205,
"score": 0.8228845000267029,
"start": 200,
"tag": "KEY",
"value": "token"
},
{
"context": "\n\n custom_messages =\n top10_common_password:... | test/test-feedback-l10n.coffee | lpavlicek/zxcvbn-czech | 3 | test = require 'tape'
feedback = require '../src/feedback'
feedback_l10n = require '../src/feedback_l10n'
test 'localized feedback messages', (t) ->
match =
pattern: 'dictionary'
token: 'token'
rank: 10
dictionary_name: 'passwords'
custom_messages =
top10_common_password: 'custom#top10_common_password',
# Uses cs messages
f = feedback.get_feedback(1, [match], {}, 'cs')
t.equal f.warning, feedback_l10n.cs.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
# Uses custom messages
f = feedback.get_feedback(1, [match], custom_messages, 'cs')
t.equal f.warning, custom_messages.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
t.end()
| 140732 | test = require 'tape'
feedback = require '../src/feedback'
feedback_l10n = require '../src/feedback_l10n'
test 'localized feedback messages', (t) ->
match =
pattern: 'dictionary'
token: '<KEY>'
rank: 10
dictionary_name: 'passwords'
custom_messages =
top10_common_password: '<PASSWORD>',
# Uses cs messages
f = feedback.get_feedback(1, [match], {}, 'cs')
t.equal f.warning, feedback_l10n.cs.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
# Uses custom messages
f = feedback.get_feedback(1, [match], custom_messages, 'cs')
t.equal f.warning, custom_messages.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
t.end()
| true | test = require 'tape'
feedback = require '../src/feedback'
feedback_l10n = require '../src/feedback_l10n'
test 'localized feedback messages', (t) ->
match =
pattern: 'dictionary'
token: 'PI:KEY:<KEY>END_PI'
rank: 10
dictionary_name: 'passwords'
custom_messages =
top10_common_password: 'PI:PASSWORD:<PASSWORD>END_PI',
# Uses cs messages
f = feedback.get_feedback(1, [match], {}, 'cs')
t.equal f.warning, feedback_l10n.cs.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
# Uses custom messages
f = feedback.get_feedback(1, [match], custom_messages, 'cs')
t.equal f.warning, custom_messages.top10_common_password
t.deepEqual f.suggestions, [feedback_l10n.cs.uncommon_words_are_better]
t.end()
|
[
{
"context": "xtends LayerInfo\n @shouldParse: (key) -> key is 'Txt2'\n\n constructor: (layer, length) ->\n super(lay",
"end": 187,
"score": 0.9801540970802307,
"start": 183,
"tag": "KEY",
"value": "Txt2"
}
] | src/psd/layer_info/text_engine_data.coffee | taofei-pro/psd.js | 0 | LayerInfo = require '../layer_info.coffee'
parseEngineData = require '../engine_data.coffee'
module.exports = class TextEngineData extends LayerInfo
@shouldParse: (key) -> key is 'Txt2'
constructor: (layer, length) ->
super(layer, length)
@textEngineData = null
parse: ->
data = @file.read(@length)
@textEngineData = parseEngineData(data)
export: ->
textEngineData: @textEngineData
| 90118 | LayerInfo = require '../layer_info.coffee'
parseEngineData = require '../engine_data.coffee'
module.exports = class TextEngineData extends LayerInfo
@shouldParse: (key) -> key is '<KEY>'
constructor: (layer, length) ->
super(layer, length)
@textEngineData = null
parse: ->
data = @file.read(@length)
@textEngineData = parseEngineData(data)
export: ->
textEngineData: @textEngineData
| true | LayerInfo = require '../layer_info.coffee'
parseEngineData = require '../engine_data.coffee'
module.exports = class TextEngineData extends LayerInfo
@shouldParse: (key) -> key is 'PI:KEY:<KEY>END_PI'
constructor: (layer, length) ->
super(layer, length)
@textEngineData = null
parse: ->
data = @file.read(@length)
@textEngineData = parseEngineData(data)
export: ->
textEngineData: @textEngineData
|
[
{
"context": "://coffeescript.org/\nprinter_local_storage_key = \"last-printer-used\"\ncid_local_storage_key = \"cid\"\n\n$ ->\n printer_na",
"end": 257,
"score": 0.9949323534965515,
"start": 240,
"tag": "KEY",
"value": "last-printer-used"
},
{
"context": "ey = \"last-printer-used\"... | app/assets/javascripts/print.js.coffee | project-kotinos/cthit___chalmersit-rails | 5 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
printer_local_storage_key = "last-printer-used"
cid_local_storage_key = "cid"
$ ->
printer_name = localStorage.getItem(printer_local_storage_key)
cid = localStorage.getItem(cid_local_storage_key)
if (cid)
$('#print_username').val(cid)
if $('#print_printer').length
$.getJSON($('.printer-list').data('url'))
.success (printers) ->
option_tags = printers.map (printer) ->
$('<option/>')
.val(printer.name)
.html(printer.name)
.data('media', printer.media)
.data('location', printer.location)
.data('duplex', printer.duplex)
printer_suggestions = printers.slice(0, 10).map (printer) ->
$('<li/>').html($('<a/>').addClass('set-printer').attr('href', 'javascript:;').html(printer.name))
$('.printer-list').html(printer_suggestions)
$('#print_printer')
.html(option_tags)
.chosen
no_results_text: 'No matches'
search_contains: true
width: '91%'
if (printer_name)
$('#print_printer').val(printer_name).trigger('chosen:updated')
$('#print_printer').trigger 'change'
$('.get-pq-button').on 'click', ->
$that = $(this)
$that.prop 'disabled', true
$('#pq .alert-box').hide()
$('#pq .done').toggle()
$.ajax
url: $('#pq').data('url')
type: 'POST'
data:
username: $('#print_username').val()
password: $('#print_password').val()
.success (data) ->
$that.prop 'disabled', false
$('#pq .done').toggle()
if data.error
$('#pq .alert .error').text(data.error)
$('#pq .alert').show()
else
$('#pq .name').text(data.username)
$('#pq .pq').text(data.value)
$('#pq .success').show()
localStorage.setItem(cid_local_storage_key, data.username)
$('.printer-list').on 'click', '.set-printer', ->
$('#print_printer').val this.textContent
$('#print_printer').trigger 'chosen:updated'
$('#print_printer').trigger 'change'
$('#print_printer').on 'change', ->
selected = $ 'option:selected', this
setMedia selected.data('media').split(' ')
setDuplexEnabled selected.data('duplex')
printer_name = this.value
localStorage.setItem(printer_local_storage_key, printer_name)
$('#new_print').on 'submit', (e) ->
e.preventDefault()
form = new FormData(this)
newCid = $('#print_username').val()
$.ajax
url: this.action,
type: 'POST',
data: form,
processData: false
contentType: false
.success (data) ->
$('.printer-feedback-alert').hide()
$('.printer-feedback-success').show()
localStorage.setItem(cid_local_storage_key, newCid)
.error (err) ->
errors = err.responseJSON.errors
$('.printer-feedback-alert .msg').html($('<ul/>').html(errors.map (err) -> $('<li/>').text(err)))
$('.printer-feedback-alert').show()
$('.printer-feedback-success').hide()
console.error errors
.always ->
$.rails.enableFormElement($('[data-disable-with]'))
setMedia = (medias) ->
$media = $('#print_media')
$media.html('')
medias = medias.map (m) ->
$('<option>').val(m).html(m)
$media.html(medias)
setDuplexEnabled = (hasDuplexSupport) ->
$('#print_duplex')
.prop('checked', hasDuplexSupport)
.attr('disabled', !hasDuplexSupport)
| 76322 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
printer_local_storage_key = "<KEY>"
cid_local_storage_key = "<KEY>"
$ ->
printer_name = localStorage.getItem(printer_local_storage_key)
cid = localStorage.getItem(cid_local_storage_key)
if (cid)
$('#print_username').val(cid)
if $('#print_printer').length
$.getJSON($('.printer-list').data('url'))
.success (printers) ->
option_tags = printers.map (printer) ->
$('<option/>')
.val(printer.name)
.html(printer.name)
.data('media', printer.media)
.data('location', printer.location)
.data('duplex', printer.duplex)
printer_suggestions = printers.slice(0, 10).map (printer) ->
$('<li/>').html($('<a/>').addClass('set-printer').attr('href', 'javascript:;').html(printer.name))
$('.printer-list').html(printer_suggestions)
$('#print_printer')
.html(option_tags)
.chosen
no_results_text: 'No matches'
search_contains: true
width: '91%'
if (printer_name)
$('#print_printer').val(printer_name).trigger('chosen:updated')
$('#print_printer').trigger 'change'
$('.get-pq-button').on 'click', ->
$that = $(this)
$that.prop 'disabled', true
$('#pq .alert-box').hide()
$('#pq .done').toggle()
$.ajax
url: $('#pq').data('url')
type: 'POST'
data:
username: $('#print_username').val()
password: $('#print_<PASSWORD>').val()
.success (data) ->
$that.prop 'disabled', false
$('#pq .done').toggle()
if data.error
$('#pq .alert .error').text(data.error)
$('#pq .alert').show()
else
$('#pq .name').text(data.username)
$('#pq .pq').text(data.value)
$('#pq .success').show()
localStorage.setItem(cid_local_storage_key, data.username)
$('.printer-list').on 'click', '.set-printer', ->
$('#print_printer').val this.textContent
$('#print_printer').trigger 'chosen:updated'
$('#print_printer').trigger 'change'
$('#print_printer').on 'change', ->
selected = $ 'option:selected', this
setMedia selected.data('media').split(' ')
setDuplexEnabled selected.data('duplex')
printer_name = this.value
localStorage.setItem(printer_local_storage_key, printer_name)
$('#new_print').on 'submit', (e) ->
e.preventDefault()
form = new FormData(this)
newCid = $('#print_username').val()
$.ajax
url: this.action,
type: 'POST',
data: form,
processData: false
contentType: false
.success (data) ->
$('.printer-feedback-alert').hide()
$('.printer-feedback-success').show()
localStorage.setItem(cid_local_storage_key, newCid)
.error (err) ->
errors = err.responseJSON.errors
$('.printer-feedback-alert .msg').html($('<ul/>').html(errors.map (err) -> $('<li/>').text(err)))
$('.printer-feedback-alert').show()
$('.printer-feedback-success').hide()
console.error errors
.always ->
$.rails.enableFormElement($('[data-disable-with]'))
setMedia = (medias) ->
$media = $('#print_media')
$media.html('')
medias = medias.map (m) ->
$('<option>').val(m).html(m)
$media.html(medias)
setDuplexEnabled = (hasDuplexSupport) ->
$('#print_duplex')
.prop('checked', hasDuplexSupport)
.attr('disabled', !hasDuplexSupport)
| true | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
printer_local_storage_key = "PI:KEY:<KEY>END_PI"
cid_local_storage_key = "PI:KEY:<KEY>END_PI"
$ ->
printer_name = localStorage.getItem(printer_local_storage_key)
cid = localStorage.getItem(cid_local_storage_key)
if (cid)
$('#print_username').val(cid)
if $('#print_printer').length
$.getJSON($('.printer-list').data('url'))
.success (printers) ->
option_tags = printers.map (printer) ->
$('<option/>')
.val(printer.name)
.html(printer.name)
.data('media', printer.media)
.data('location', printer.location)
.data('duplex', printer.duplex)
printer_suggestions = printers.slice(0, 10).map (printer) ->
$('<li/>').html($('<a/>').addClass('set-printer').attr('href', 'javascript:;').html(printer.name))
$('.printer-list').html(printer_suggestions)
$('#print_printer')
.html(option_tags)
.chosen
no_results_text: 'No matches'
search_contains: true
width: '91%'
if (printer_name)
$('#print_printer').val(printer_name).trigger('chosen:updated')
$('#print_printer').trigger 'change'
$('.get-pq-button').on 'click', ->
$that = $(this)
$that.prop 'disabled', true
$('#pq .alert-box').hide()
$('#pq .done').toggle()
$.ajax
url: $('#pq').data('url')
type: 'POST'
data:
username: $('#print_username').val()
password: $('#print_PI:PASSWORD:<PASSWORD>END_PI').val()
.success (data) ->
$that.prop 'disabled', false
$('#pq .done').toggle()
if data.error
$('#pq .alert .error').text(data.error)
$('#pq .alert').show()
else
$('#pq .name').text(data.username)
$('#pq .pq').text(data.value)
$('#pq .success').show()
localStorage.setItem(cid_local_storage_key, data.username)
$('.printer-list').on 'click', '.set-printer', ->
$('#print_printer').val this.textContent
$('#print_printer').trigger 'chosen:updated'
$('#print_printer').trigger 'change'
$('#print_printer').on 'change', ->
selected = $ 'option:selected', this
setMedia selected.data('media').split(' ')
setDuplexEnabled selected.data('duplex')
printer_name = this.value
localStorage.setItem(printer_local_storage_key, printer_name)
$('#new_print').on 'submit', (e) ->
e.preventDefault()
form = new FormData(this)
newCid = $('#print_username').val()
$.ajax
url: this.action,
type: 'POST',
data: form,
processData: false
contentType: false
.success (data) ->
$('.printer-feedback-alert').hide()
$('.printer-feedback-success').show()
localStorage.setItem(cid_local_storage_key, newCid)
.error (err) ->
errors = err.responseJSON.errors
$('.printer-feedback-alert .msg').html($('<ul/>').html(errors.map (err) -> $('<li/>').text(err)))
$('.printer-feedback-alert').show()
$('.printer-feedback-success').hide()
console.error errors
.always ->
$.rails.enableFormElement($('[data-disable-with]'))
setMedia = (medias) ->
$media = $('#print_media')
$media.html('')
medias = medias.map (m) ->
$('<option>').val(m).html(m)
$media.html(medias)
setDuplexEnabled = (hasDuplexSupport) ->
$('#print_duplex')
.prop('checked', hasDuplexSupport)
.attr('disabled', !hasDuplexSupport)
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.999913215637207,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/beatmap-discussions-chart.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
bn = 'beatmap-discussions-chart'
class @BeatmapDiscussionsChart
constructor: (area, @length) ->
@id = Math.floor(Math.random() * 1000)
@dimensions =
chartHeight: 120
totalHeight: 150
xAxisHeight: 2
barTop: 0
targetAreaWidth: 10
@dimensions.labelHeight = @dimensions.totalHeight - @dimensions.chartHeight
@dimensions.labelTop = @dimensions.totalHeight - @dimensions.labelHeight
@dimensions.iconTop = @dimensions.labelTop + (@dimensions.labelHeight / 2)
@dimensions.barHeight = @dimensions.chartHeight - @dimensions.barTop
@dimensions.xAxisTop = @dimensions.chartHeight - @dimensions.xAxisHeight
@dimensions.targetAreaHeight = @dimensions.barHeight + @dimensions.labelHeight
@margins =
top: 0
right: 40
bottom: 0
left: 40
@scaleX = d3.scaleLinear()
.domain [0, @length]
.nice()
@area = d3
.select(area)
.append 'div'
.classed bn, true
@svg = @area.append 'svg'
lineGradient = @svg.append 'defs'
.append 'linearGradient'
.attr 'id', "bar-gradient-#{@id}"
.attr 'gradientUnits', 'userSpaceOnUse'
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', 0
.attr 'y2', '100%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient #{bn}__bar-gradient--start", true
.attr 'offset', '30%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient", true
.attr 'offset', '80%'
@svgWrapper = @svg.append 'g'
.classed "#{bn}__wrapper", true
@svgChartArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', 0
.attr 'height', @dimensions.chartHeight
.classed "#{bn}__chart-area", true
@svgLabelArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.labelTop
.attr 'height', @dimensions.labelHeight
.classed "#{bn}__label-area", true
@svgXAxis = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.xAxisTop
.attr 'height', @dimensions.xAxisHeight
.classed "#{bn}__axis #{bn}__axis--x", true
@svgPointsContainer = @svgWrapper.append 'g'
@xAxis = d3.axisBottom()
.ticks 0
.tickSizeOuter 0
loadData: (data) =>
@data = _.orderBy data, 'timestamp'
@svgPoints = @svgPointsContainer
.selectAll ".#{bn}__point"
.data @data, (d) => d.id
svgPointsEnter = @svgPoints.enter()
.append 'a'
.classed "#{bn}__point", true
svgPointsEnter
.append 'line'
.classed "#{bn}__bar", true
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', @dimensions.barTop
.attr 'y2', @dimensions.barTop + @dimensions.barHeight
.attr 'stroke', "url(#bar-gradient-#{@id})"
svgPointsEnter
.append 'rect'
.classed "#{bn}__target-area", true
.attr 'x', -@dimensions.targetAreaWidth / 2
.attr 'width', @dimensions.targetAreaWidth
.attr 'y', @dimensions.barTop
.attr 'height', @dimensions.targetAreaHeight
svgPointsEnter
.append 'text'
.classed "#{bn}__icon", true
.style 'text-anchor', 'middle'
.attr 'y', @dimensions.iconTop
@svgPoints.exit().remove()
@svgPoints = svgPointsEnter.merge(@svgPoints)
@svgPoints
.attr 'xlink:href', (d) =>
BeatmapDiscussionHelper.url discussion: d
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.kebabCase(d.message_type)
classes = "js-beatmap-discussion--jump #{bn}__point #{bn}__point--#{type}"
classes += " #{bn}__point--deleted" if d.deleted_at?
classes
.attr 'title', (d) ->
BeatmapDiscussionHelper.formatTimestamp d.timestamp
.attr 'data-tooltip-position', 'bottom center'
.attr 'data-tooltip-modifiers', 'extra-padding'
# refresh the icons
@svgPoints
.select(".#{bn}__icon > tspan").remove()
@svgPoints
.select ".#{bn}__icon"
.append 'tspan'
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][0]
.html (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][1]
@resize()
setDimensions: =>
areaDims = @area.node().getBoundingClientRect()
@width = areaDims.width - (@margins.left + @margins.right)
@height = areaDims.height - (@margins.top + @margins.bottom)
setScales: =>
@scaleX
.range [0, @width]
setAxisSize: =>
@xAxis
.scale @scaleX
setSvgSize: =>
@svg
.attr 'width', @width + (@margins.left + @margins.right)
.attr 'height', @height + (@margins.top + @margins.bottom)
setWrapperSize: =>
@svgWrapper
.attr 'transform', "translate(#{@margins.left}, #{@margins.top})"
drawAreas: =>
width = @width + (@margins.left + @margins.right)
@svgChartArea.attr 'width', width
@svgLabelArea.attr 'width', width
drawXAxis: =>
@svgXAxis.attr 'width', @width + (@margins.left + @margins.right)
positionPoints: =>
@svgPoints.attr 'transform', (d) =>
"translate(#{Math.round(@scaleX(d.timestamp))}, 0)"
resize: =>
@setDimensions()
@setScales()
@setSvgSize()
@setWrapperSize()
@setAxisSize()
@drawXAxis()
@drawAreas()
@positionPoints()
| 93102 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
bn = 'beatmap-discussions-chart'
class @BeatmapDiscussionsChart
constructor: (area, @length) ->
@id = Math.floor(Math.random() * 1000)
@dimensions =
chartHeight: 120
totalHeight: 150
xAxisHeight: 2
barTop: 0
targetAreaWidth: 10
@dimensions.labelHeight = @dimensions.totalHeight - @dimensions.chartHeight
@dimensions.labelTop = @dimensions.totalHeight - @dimensions.labelHeight
@dimensions.iconTop = @dimensions.labelTop + (@dimensions.labelHeight / 2)
@dimensions.barHeight = @dimensions.chartHeight - @dimensions.barTop
@dimensions.xAxisTop = @dimensions.chartHeight - @dimensions.xAxisHeight
@dimensions.targetAreaHeight = @dimensions.barHeight + @dimensions.labelHeight
@margins =
top: 0
right: 40
bottom: 0
left: 40
@scaleX = d3.scaleLinear()
.domain [0, @length]
.nice()
@area = d3
.select(area)
.append 'div'
.classed bn, true
@svg = @area.append 'svg'
lineGradient = @svg.append 'defs'
.append 'linearGradient'
.attr 'id', "bar-gradient-#{@id}"
.attr 'gradientUnits', 'userSpaceOnUse'
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', 0
.attr 'y2', '100%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient #{bn}__bar-gradient--start", true
.attr 'offset', '30%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient", true
.attr 'offset', '80%'
@svgWrapper = @svg.append 'g'
.classed "#{bn}__wrapper", true
@svgChartArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', 0
.attr 'height', @dimensions.chartHeight
.classed "#{bn}__chart-area", true
@svgLabelArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.labelTop
.attr 'height', @dimensions.labelHeight
.classed "#{bn}__label-area", true
@svgXAxis = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.xAxisTop
.attr 'height', @dimensions.xAxisHeight
.classed "#{bn}__axis #{bn}__axis--x", true
@svgPointsContainer = @svgWrapper.append 'g'
@xAxis = d3.axisBottom()
.ticks 0
.tickSizeOuter 0
loadData: (data) =>
@data = _.orderBy data, 'timestamp'
@svgPoints = @svgPointsContainer
.selectAll ".#{bn}__point"
.data @data, (d) => d.id
svgPointsEnter = @svgPoints.enter()
.append 'a'
.classed "#{bn}__point", true
svgPointsEnter
.append 'line'
.classed "#{bn}__bar", true
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', @dimensions.barTop
.attr 'y2', @dimensions.barTop + @dimensions.barHeight
.attr 'stroke', "url(#bar-gradient-#{@id})"
svgPointsEnter
.append 'rect'
.classed "#{bn}__target-area", true
.attr 'x', -@dimensions.targetAreaWidth / 2
.attr 'width', @dimensions.targetAreaWidth
.attr 'y', @dimensions.barTop
.attr 'height', @dimensions.targetAreaHeight
svgPointsEnter
.append 'text'
.classed "#{bn}__icon", true
.style 'text-anchor', 'middle'
.attr 'y', @dimensions.iconTop
@svgPoints.exit().remove()
@svgPoints = svgPointsEnter.merge(@svgPoints)
@svgPoints
.attr 'xlink:href', (d) =>
BeatmapDiscussionHelper.url discussion: d
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.kebabCase(d.message_type)
classes = "js-beatmap-discussion--jump #{bn}__point #{bn}__point--#{type}"
classes += " #{bn}__point--deleted" if d.deleted_at?
classes
.attr 'title', (d) ->
BeatmapDiscussionHelper.formatTimestamp d.timestamp
.attr 'data-tooltip-position', 'bottom center'
.attr 'data-tooltip-modifiers', 'extra-padding'
# refresh the icons
@svgPoints
.select(".#{bn}__icon > tspan").remove()
@svgPoints
.select ".#{bn}__icon"
.append 'tspan'
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][0]
.html (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][1]
@resize()
setDimensions: =>
areaDims = @area.node().getBoundingClientRect()
@width = areaDims.width - (@margins.left + @margins.right)
@height = areaDims.height - (@margins.top + @margins.bottom)
setScales: =>
@scaleX
.range [0, @width]
setAxisSize: =>
@xAxis
.scale @scaleX
setSvgSize: =>
@svg
.attr 'width', @width + (@margins.left + @margins.right)
.attr 'height', @height + (@margins.top + @margins.bottom)
setWrapperSize: =>
@svgWrapper
.attr 'transform', "translate(#{@margins.left}, #{@margins.top})"
drawAreas: =>
width = @width + (@margins.left + @margins.right)
@svgChartArea.attr 'width', width
@svgLabelArea.attr 'width', width
drawXAxis: =>
@svgXAxis.attr 'width', @width + (@margins.left + @margins.right)
positionPoints: =>
@svgPoints.attr 'transform', (d) =>
"translate(#{Math.round(@scaleX(d.timestamp))}, 0)"
resize: =>
@setDimensions()
@setScales()
@setSvgSize()
@setWrapperSize()
@setAxisSize()
@drawXAxis()
@drawAreas()
@positionPoints()
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
bn = 'beatmap-discussions-chart'
class @BeatmapDiscussionsChart
constructor: (area, @length) ->
@id = Math.floor(Math.random() * 1000)
@dimensions =
chartHeight: 120
totalHeight: 150
xAxisHeight: 2
barTop: 0
targetAreaWidth: 10
@dimensions.labelHeight = @dimensions.totalHeight - @dimensions.chartHeight
@dimensions.labelTop = @dimensions.totalHeight - @dimensions.labelHeight
@dimensions.iconTop = @dimensions.labelTop + (@dimensions.labelHeight / 2)
@dimensions.barHeight = @dimensions.chartHeight - @dimensions.barTop
@dimensions.xAxisTop = @dimensions.chartHeight - @dimensions.xAxisHeight
@dimensions.targetAreaHeight = @dimensions.barHeight + @dimensions.labelHeight
@margins =
top: 0
right: 40
bottom: 0
left: 40
@scaleX = d3.scaleLinear()
.domain [0, @length]
.nice()
@area = d3
.select(area)
.append 'div'
.classed bn, true
@svg = @area.append 'svg'
lineGradient = @svg.append 'defs'
.append 'linearGradient'
.attr 'id', "bar-gradient-#{@id}"
.attr 'gradientUnits', 'userSpaceOnUse'
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', 0
.attr 'y2', '100%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient #{bn}__bar-gradient--start", true
.attr 'offset', '30%'
lineGradient.append 'stop'
.classed "#{bn}__bar-gradient", true
.attr 'offset', '80%'
@svgWrapper = @svg.append 'g'
.classed "#{bn}__wrapper", true
@svgChartArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', 0
.attr 'height', @dimensions.chartHeight
.classed "#{bn}__chart-area", true
@svgLabelArea = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.labelTop
.attr 'height', @dimensions.labelHeight
.classed "#{bn}__label-area", true
@svgXAxis = @svgWrapper.append 'rect'
.attr 'x', -@margins.left
.attr 'y', @dimensions.xAxisTop
.attr 'height', @dimensions.xAxisHeight
.classed "#{bn}__axis #{bn}__axis--x", true
@svgPointsContainer = @svgWrapper.append 'g'
@xAxis = d3.axisBottom()
.ticks 0
.tickSizeOuter 0
loadData: (data) =>
@data = _.orderBy data, 'timestamp'
@svgPoints = @svgPointsContainer
.selectAll ".#{bn}__point"
.data @data, (d) => d.id
svgPointsEnter = @svgPoints.enter()
.append 'a'
.classed "#{bn}__point", true
svgPointsEnter
.append 'line'
.classed "#{bn}__bar", true
.attr 'x1', 0
.attr 'x2', 0
.attr 'y1', @dimensions.barTop
.attr 'y2', @dimensions.barTop + @dimensions.barHeight
.attr 'stroke', "url(#bar-gradient-#{@id})"
svgPointsEnter
.append 'rect'
.classed "#{bn}__target-area", true
.attr 'x', -@dimensions.targetAreaWidth / 2
.attr 'width', @dimensions.targetAreaWidth
.attr 'y', @dimensions.barTop
.attr 'height', @dimensions.targetAreaHeight
svgPointsEnter
.append 'text'
.classed "#{bn}__icon", true
.style 'text-anchor', 'middle'
.attr 'y', @dimensions.iconTop
@svgPoints.exit().remove()
@svgPoints = svgPointsEnter.merge(@svgPoints)
@svgPoints
.attr 'xlink:href', (d) =>
BeatmapDiscussionHelper.url discussion: d
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.kebabCase(d.message_type)
classes = "js-beatmap-discussion--jump #{bn}__point #{bn}__point--#{type}"
classes += " #{bn}__point--deleted" if d.deleted_at?
classes
.attr 'title', (d) ->
BeatmapDiscussionHelper.formatTimestamp d.timestamp
.attr 'data-tooltip-position', 'bottom center'
.attr 'data-tooltip-modifiers', 'extra-padding'
# refresh the icons
@svgPoints
.select(".#{bn}__icon > tspan").remove()
@svgPoints
.select ".#{bn}__icon"
.append 'tspan'
.attr 'class', (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][0]
.html (d) ->
type = if d.resolved then 'resolved' else _.camelCase(d.message_type)
BeatmapDiscussionHelper.messageType.iconText[type][1]
@resize()
setDimensions: =>
areaDims = @area.node().getBoundingClientRect()
@width = areaDims.width - (@margins.left + @margins.right)
@height = areaDims.height - (@margins.top + @margins.bottom)
setScales: =>
@scaleX
.range [0, @width]
setAxisSize: =>
@xAxis
.scale @scaleX
setSvgSize: =>
@svg
.attr 'width', @width + (@margins.left + @margins.right)
.attr 'height', @height + (@margins.top + @margins.bottom)
setWrapperSize: =>
@svgWrapper
.attr 'transform', "translate(#{@margins.left}, #{@margins.top})"
drawAreas: =>
width = @width + (@margins.left + @margins.right)
@svgChartArea.attr 'width', width
@svgLabelArea.attr 'width', width
drawXAxis: =>
@svgXAxis.attr 'width', @width + (@margins.left + @margins.right)
positionPoints: =>
@svgPoints.attr 'transform', (d) =>
"translate(#{Math.round(@scaleX(d.timestamp))}, 0)"
resize: =>
@setDimensions()
@setScales()
@setSvgSize()
@setWrapperSize()
@setAxisSize()
@drawXAxis()
@drawAreas()
@positionPoints()
|
[
{
"context": "tionDays: ${4:30}\r\n \\n\\toauthSecretKey: \"${5:wgporjigrpqgdfg}\"\r\n \\nAccounts.ui.config({\r\n \\n\\treques",
"end": 5518,
"score": 0.9371278285980225,
"start": 5503,
"tag": "KEY",
"value": "wgporjigrpqgdfg"
},
{
"context": ": '\r\n userObject ... | snippets/meteor-api-snippets-coffeescript.cson | VNEU/FLEXURIO-API | 82 | # Your snippets
#
# Atom snippets allow you to enter a simple prefix in the editor and hit tab to
# expand the prefix into a larger code block with templated values.
#
# You can create a new snippet in this file by typing "snip" and then hitting
# tab.
#
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
#===================================================
# Coffeescript language snippets
'.coffee':
#===================================================
# Meteor Core API
'Meteor.isClient':
'prefix': 'isClient'
'body': 'if Meteor.isClient\n\t'
'Meteor.isServer':
'prefix': 'isServer'
'body': 'if Meteor.isServer\n\t'
'Meteor.isCordova':
'prefix': 'isCordova'
'body': 'if Meteor.isCordova\n\t'
'Meteor.startup':
'prefix': 'startup'
'body': 'Meteor.startup ->\n\t'
'Meteor.absoluteUrl':
'prefix': 'absolute'
'body': 'Meteor.absoluteUrl "$1"'
#===================================================
# Publish & Subscribe
'Meteor Publish':
'prefix': 'publish'
'body': 'Meteor.publish "${1:name}", (${2:args}) ->\n\t'
'Meteor Subscribe':
'prefix': 'subscribe'
'body': 'Meteor.subscribe "${1:name}", "${2:arg}"'
#===================================================
# Methods
'Meteor Methods':
'prefix': 'methods'
'body': 'Meteor.methods
\n\t${1:methodName}: ->
\n\t\t${2:}'
'Meteor.Error':
'prefix': 'error'
'body': 'Meteor.Error ${1:Number}, "${2:description}"'
'Meteor.call':
'prefix': 'call'
'body': 'Meteor.call "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
#===================================================
# Connections
'Meteor.status':
'prefix': 'status'
'body': 'Meteor.status()'
'Meteor.reconnect':
'prefix': 'reconnect'
'body': 'Meteor.reconnect()'
'Meteor.disconnect':
'prefix': 'disconnect'
'body': 'Meteor.disconnect()'
'Meteor.onConnection':
'prefix': 'onConnection'
'body': 'Meteor.onConnection ->
\n\t$1'
#===================================================
# Collections
'Mongo.Collection.ObjectID':
'prefix': 'object'
'body': 'new Mongo.Collection.ObjectID "${1:id}"'
'Collection.find':
'prefix': 'find'
'body': '${1:Collection}.find "${2:field}":${3:value}\n$4'
'Collection.findOne':
'prefix': 'findOne'
'body': '${1:Collection}.findOne _id:"${2:recordId}"$3)\n$4'
'Collection.insert':
'prefix': 'insert'
'body': '${1:Collection}.insert ${2:newRecord}\n$3'
'Collection.Update':
'prefix': 'update'
'body': '${1:Collection}.update _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.Upsert':
'prefix': 'upsert'
'body': '${1:Collection}.upsert _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.remove':
'prefix': 'remove'
'body': '${1:Collection}.remove _id:"${2:recordId}"$3\n$4'
'Collection.allow':
'prefix': 'allow'
'body': '${1:Collection}.allow
\n\tinsert: -> ${2:true}
\n\tupdate: -> ${3:true}
\n\tremove: -> ${4:true}
\n$5'
'Collection.deny':
'prefix': 'deny'
'body': '${1:Collection}.deny
\n\tinsert: -> ${2:false}
\n\tupdate: -> ${3:false}
\n\tremove: -> ${4:false}
\n$5'
'Collection':
'prefix': 'Collection'
'body': '${1:CollectionName} = new Mongo.Collection "${2:name}";
\n${3:CollectionName}.allow
\n\tinsert: -> ${4:true}
\n\tupdate: -> ${5:true}
\n\tremove: -> ${6:true}
\n$7'
#===================================================
# Session Snippets
'Session.set':
'prefix': 'set'
'body': 'Session.set "${1:variableName}", ${2:value}\n$3'
'Session.get':
'prefix': 'get'
'body': 'Session.get "${1:variableName}"\n$2'
'Session.equals':
'prefix': 'equals'
'body': 'Session.equals "${1:variableName}", ${2:value}\n$3'
'Session.setDefault':
'prefix': 'setDefault'
'body': 'Session.setDefault "${1:variableName}", ${2:value}\n$3'
#===================================================
# Accounts Snippets
'Meteor.user':
'prefix': 'user'
'body': 'Meteor.user()'
'Meteor.user':
'prefix': 'varuser'
'body': 'user = Meteor.user();'
'Meteor.userId':
'prefix': 'userId'
'body': 'Meteor.userId()'
'Meteor.users':
'prefix': 'users'
'body': 'Meteor.users'
'Meteor.loggingIn':
'prefix': 'loggingIn'
'body': 'Meteor.loggingIn()'
'Meteor.logout':
'prefix': 'logout'
'body': 'Meteor.logout ->
\n\t$1'
'Meteor.logoutOtherClients':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ->
\n\t$1'
'Meteor.loginWithPassword':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ${1:user}, ${2:password}, ->
\n\t$3'
'Accounts':
'prefix': 'Accounts'
'body': '
Accounts.config
\n\tsendVerificationEmail: ${1:true}
\n\tforbidClientAccountCreation: ${2:true}
\n\trestrictCreationByEmailDomain: "${3:school.edu}"
\n\tloginExpirationDays: ${4:30}
\n\toauthSecretKey: "${5:wgporjigrpqgdfg}"
\nAccounts.ui.config({
\n\trequestPermissions: ${1:{}}
\n\trequestOfflineToken: ${1:{}}
\n\tpasswordSignupFields: "${3:USERNAME_AND_OPTIONAL_EMAIL}"
\n$12'
'Accounts.validateNewUser':
'prefix': 'validateNewUser'
'body': 'Meteor.validateNewUser ->
\n\t$1'
'Accounts.onCreateUser':
'prefix': 'onCreateUser'
'body': 'Meteor.onCreateUser (options, user) ->
\n\t$1
\n\treturn ${1:user}'
'Accounts.onLogin':
'prefix': 'onLogin'
'body': 'Meteor.onLogin ->
\n\t$1'
'Accounts.onLoginFailure':
'prefix': 'onLoginFailure'
'body': 'Meteor.onLoginFailure ->
\n\t$1'
#===================================================
# Passwords Snippets
'Accounts.createUser':
'prefix': 'createUser'
'body': '
userObject =
\n\tusername: "${1:username}"
\n\tmail: "${1:email}"
\n\tpassword: "${1:password}"
\n
\nAccounts.createUser ${1:userObject}, ->
\n\t$1'
'Accounts.changePassword':
'prefix': 'changePassword'
'body': 'Accounts.changePassword ${1:oldPassword}, ${2:newPassword}, ->
\n\t$1'
'Accounts.forgotPassword':
'prefix': 'forgotPassword'
'body': 'Accounts.forgotPassword email: "${1:address}", ->
\n\t$2'
'Accounts.resetPassword':
'prefix': 'resetPassword'
'body': 'Accounts.resetPassword ${1:token}, ${2:newPassword}, ->
\n\t$3'
'Accounts.setPassword':
'prefix': 'setPassword'
'body': 'Accounts.setPassword ${1:userId}, ${2:newPassword}'
'Accounts.verifyEmail':
'prefix': 'verifyEmail'
'body': 'Accounts.verifyEmail ${1:token}, ->
\n\t$1'
'Accounts.sendResetPasswordEmail':
'prefix': 'sendResetPasswordEmail'
'body': 'Accounts.sendResetPasswordEmail ${1:userId}'
'Accounts.sendEnrollmentEmail':
'prefix': 'sendEnrollmentEmail'
'body': 'Accounts.sendEnrollmentEmail ${1:userId}'
'Accounts.sendVerificationEmail':
'prefix': 'sendVerificationEmail'
'body': 'Accounts.sendVerificationEmail ${1:userId};'
#===================================================
# Match Snippets
'Match.check':
'prefix': 'check'
'body': 'check(${1:variable}, ${2:String}\n$3'
'Match.test':
'prefix': 'test'
'body': 'Match.test(${1:variable}, ${2:String});\n${3:}'
#===================================================
# Timers Snippets
'Meteor.setTimeout':
'prefix': 'setTimeout'
'body': 'Meteor.setTimeout (->
\n\t$2
\n${1:milliseconds})'
'Meteor.setInterval':
'prefix': 'setInterval'
'body': 'Meteor.setInterval (->
\n\t$2
\n${1:milliseconds})'
'Meteor.clearTimeout':
'prefix': 'clearTimeout'
'body': 'Meteor.clearTimeout ${1:id}'
'Meteor.clearInterval':
'prefix': 'clearInterval'
'body': 'Meteor.clearInterval ${1:id}'
#===================================================
# Tracker
'Tracker.autorun':
'prefix': 'autorun'
'body': 'Tracker.autorun ->
\n\t$2'
'Tracker.flush':
'prefix': 'flush'
'body': 'Tracker.flush()'
'Tracker.nonreactive':
'prefix': 'nonreactive'
'body': 'Tracker.nonreactive ->
\n\t$2'
'Tracker.onInvalidate':
'prefix': 'onInvalidate'
'body': 'Tracker.onInvalidate ->
\n\t$2'
'Tracker.afterFlush':
'prefix': 'afterFlush'
'body': 'Tracker.afterFlush ->
\n\t$2'
'Tracker.active':
'prefix': 'active'
'body': 'Tracker.active'
'Tracker.currentComputation':
'prefix': 'currentComputation'
'body': 'Tracker.currentComputation'
#===================================================
# Templates Snippets
'Template Rendered':
'prefix': 'rendered'
'body': 'Template.${1:name}.rendered = -> \n\t${2}'
'Template Events':
'prefix': 'events'
'body': '
Template.${1:name}.events
\n\t"click ${2:#event}": (event, template) ->
\n\t\t$3'
'Template Created':
'prefix': 'created'
'body': 'Template.${1:name}.created = ->
\n\t$2'
'Template Destroyed':
'prefix': 'destroyed'
'body': '
Template.${1:name}.destroyed = ->
\n\t$2'
'Template':
'prefix': 'Template'
'body': '
\nTemplate.${1:name}.helpers
\n\tcreate: ->
\n\t\t$2
\n\trendered: ->
\n\t\t$3
\n\tdestroyed: ->
\n\t\t$4
\n\nTemplate.${9:name}.events
\n\t"${5:click #foo}": (event, template) ->
\n\t\t$6
\n$7'
'Template Helpers':
'prefix': 'helpers'
'body': 'Template.${1:name}.helpers \n\trendered: ->\n\t\t$2\n\t\n'
'Template.registerHelper':
'prefix': 'registerHelper'
'body': 'Template.registerHelper "${1:helperName}", (${2:argument}) ->\n\t${3}\n'
#===================================================
# Blaze Snippets
'Blaze.render':
'prefix': 'render'
'body': 'Blaze.render ${1:templateOrView}, ${2:parentNode}'
'Blaze.renderWithData':
'prefix': 'renderWithData'
'body': 'Blaze.renderWithData ${1:templateOrView}, ${2:data}, ${3:parentNode}'
'Blaze.remove':
'prefix': 'bremove'
'body': 'Blaze.remove ${1:renderedView}'
'Blaze.getData':
'prefix': 'getData'
'body': 'Blaze.getData ${1:elementOrView}'
'Blaze.toHTML':
'prefix': 'toHTML'
'body': 'Blaze.toHTML ${1:templateOrView}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.isTemplate':
'prefix': 'isTemplate'
'body': 'Blaze.isTemplate ${1:value}'
#===================================================
# EJSON Snippets
'EJSON.parse':
'prefix': 'parse'
'body': 'EJSON.parse ${1:string}'
'EJSON.stringify':
'prefix': 'stringify'
'body': 'EJSON.stringify ${1:string}, {indent: true}'
'EJSON.clone':
'prefix': 'clone'
'body': 'EJSON.clone ${1:object}'
'EJSON.equals':
'prefix': 'deeequals'
'body': 'EJSON.equals ${1:objectA}, ${2:objectB}'
'EJSON.toJSONValue':
'prefix': 'toJSON'
'body': 'EJSON.toJSONValue ${1:value}'
'EJSON.fromJSONValue':
'prefix': 'fromJSON'
'body': 'EJSON.fromJSONValue ${1:value}'
'EJSON.isBinary':
'prefix': 'isBinary'
'body': 'EJSON.isBinary ${1:value}'
#===================================================
# HTTP Snippets
'HTTP Call':
'prefix': 'httpcall'
'body': 'HTTP.call("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP Get':
'prefix': 'httpget'
'body': 'HTTP.get "${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP.post':
'prefix': 'httppost'
'body': 'HTTP.post "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.put':
'prefix': 'http.put'
'body': 'HTTP.put "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.del':
'prefix': 'httpdel'
'body': 'HTTP.del("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send({
\n\tfrom: "${1:sender@somewhere.net}"
\nif result
\n\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send
\n\tfrom: "${1:sender@somewhere.net}"
\n\tto: "${2:receiver@elsewhere.io}"
\n\tcc: "${3:carboncopy@elsewhere.io}"
\n\tbcc: "${4:lurker@somewhere.io}"
\n\treplyTo: "${5:public@somewhere.net}"
\n\tsubject: "${6:Hello Email}"
\n\ttext: "${7:lorem ispum...}"
\n\thtml: "$8"
\n\theaders: "$9"'
#===================================================
# Assets Snippets
'Assets.getText':
'prefix': 'getText'
'body': 'Assets.getText "${1:assetPath}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'Assets.getBinary':
'prefix': 'getBinary'
'body': 'Assets.getBinary("${1:assetPath}", function(error, result)
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Router Snippets
'Router':
'prefix': 'Router'
'body': '
Router.map -> {
\n\t@route "${1:routeName}"
\n\t\t$2'
'route':
'prefix': 'route'
'body': '
@route "${1:routeName}"
\n\tpath:"$2"
\n\ttemplate:"$3"
\n\twaitOn: ->
\n\t\t$4
\n\tdata: ->
\n\t\t$5
\n\tonBeforeAction: ->
\n\t\tsetPageTitle "$6"
\n\tonAfterAction: ->
\n\t\t$7'
'Upsert Route':
'prefix': 'UpsertRoute'
'body': '
Router.map ->
\n\t@route "${1:routeName}"
\n\t\tpath:"/${2:route}/add"
\n\t\ttemplate:"${3:routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\t$4
\n\t\tdata: ->
\n\t\t\t{}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "$5"
\n\t\tonAfterAction: ->
\n\t\t\t$6
\n\t@route "${7:routeName}"
\n\t\tpath:"/$8/edit/${9:paramId}"
\n\t\ttemplate:"${10: routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\tMeteor.subscribe("${11: subscription}")
\n\t\tdata: ->
\n\t\t\t${12: Collection}.findOne @params.${13: paramId}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${14}"
\n\t\tonAfterAction: ->
\n\t\t\t$15'
#===================================================
# Template + Router
'Page':
'prefix': 'page'
'body': '
Router.map ->
\n\t@route("${1:routeName}"
\n\t\tpath: "/${2:route}"
\n\t\ttemplate: "${3:pageTemplate}"
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${4:Page Title}"
\n
\nTemplate.${5:pageTemplate}.helpers
\n\trendered: ->
\n\t\t$6
\n\nTemplate.${7:pageTemplate}.events
\n\t"${8:click #foo}": (event, template) ->
\n\t\t$9'
#===================================================
# Nightwatch
'verify.elementPresent':
'prefix': 'vep'
'body': '.verify.elementPresent("#${1}")${2}'
'verify.elementNotPresent':
'prefix': 'venp'
'body': '.verify.elementNotPresent("#${1}")${2}'
'waitForElementVisible':
'prefix': 'wfev'
'body': '.waitForElementVisible("#${1}", ${2})${3}'
'containsText':
'prefix': 'vct'
'body': '.verify.containsText("#${1}", "${2}")${3}'
'click':
'prefix': 'click'
'body': '.click("#${1}").pause(${2})${3}'
'attributeEquals':
'prefix': 'ae'
'body': '.verify.attributeEquals("#${1}", "value", "${2}")${3}'
| 149731 | # Your snippets
#
# Atom snippets allow you to enter a simple prefix in the editor and hit tab to
# expand the prefix into a larger code block with templated values.
#
# You can create a new snippet in this file by typing "snip" and then hitting
# tab.
#
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
#===================================================
# Coffeescript language snippets
'.coffee':
#===================================================
# Meteor Core API
'Meteor.isClient':
'prefix': 'isClient'
'body': 'if Meteor.isClient\n\t'
'Meteor.isServer':
'prefix': 'isServer'
'body': 'if Meteor.isServer\n\t'
'Meteor.isCordova':
'prefix': 'isCordova'
'body': 'if Meteor.isCordova\n\t'
'Meteor.startup':
'prefix': 'startup'
'body': 'Meteor.startup ->\n\t'
'Meteor.absoluteUrl':
'prefix': 'absolute'
'body': 'Meteor.absoluteUrl "$1"'
#===================================================
# Publish & Subscribe
'Meteor Publish':
'prefix': 'publish'
'body': 'Meteor.publish "${1:name}", (${2:args}) ->\n\t'
'Meteor Subscribe':
'prefix': 'subscribe'
'body': 'Meteor.subscribe "${1:name}", "${2:arg}"'
#===================================================
# Methods
'Meteor Methods':
'prefix': 'methods'
'body': 'Meteor.methods
\n\t${1:methodName}: ->
\n\t\t${2:}'
'Meteor.Error':
'prefix': 'error'
'body': 'Meteor.Error ${1:Number}, "${2:description}"'
'Meteor.call':
'prefix': 'call'
'body': 'Meteor.call "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
#===================================================
# Connections
'Meteor.status':
'prefix': 'status'
'body': 'Meteor.status()'
'Meteor.reconnect':
'prefix': 'reconnect'
'body': 'Meteor.reconnect()'
'Meteor.disconnect':
'prefix': 'disconnect'
'body': 'Meteor.disconnect()'
'Meteor.onConnection':
'prefix': 'onConnection'
'body': 'Meteor.onConnection ->
\n\t$1'
#===================================================
# Collections
'Mongo.Collection.ObjectID':
'prefix': 'object'
'body': 'new Mongo.Collection.ObjectID "${1:id}"'
'Collection.find':
'prefix': 'find'
'body': '${1:Collection}.find "${2:field}":${3:value}\n$4'
'Collection.findOne':
'prefix': 'findOne'
'body': '${1:Collection}.findOne _id:"${2:recordId}"$3)\n$4'
'Collection.insert':
'prefix': 'insert'
'body': '${1:Collection}.insert ${2:newRecord}\n$3'
'Collection.Update':
'prefix': 'update'
'body': '${1:Collection}.update _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.Upsert':
'prefix': 'upsert'
'body': '${1:Collection}.upsert _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.remove':
'prefix': 'remove'
'body': '${1:Collection}.remove _id:"${2:recordId}"$3\n$4'
'Collection.allow':
'prefix': 'allow'
'body': '${1:Collection}.allow
\n\tinsert: -> ${2:true}
\n\tupdate: -> ${3:true}
\n\tremove: -> ${4:true}
\n$5'
'Collection.deny':
'prefix': 'deny'
'body': '${1:Collection}.deny
\n\tinsert: -> ${2:false}
\n\tupdate: -> ${3:false}
\n\tremove: -> ${4:false}
\n$5'
'Collection':
'prefix': 'Collection'
'body': '${1:CollectionName} = new Mongo.Collection "${2:name}";
\n${3:CollectionName}.allow
\n\tinsert: -> ${4:true}
\n\tupdate: -> ${5:true}
\n\tremove: -> ${6:true}
\n$7'
#===================================================
# Session Snippets
'Session.set':
'prefix': 'set'
'body': 'Session.set "${1:variableName}", ${2:value}\n$3'
'Session.get':
'prefix': 'get'
'body': 'Session.get "${1:variableName}"\n$2'
'Session.equals':
'prefix': 'equals'
'body': 'Session.equals "${1:variableName}", ${2:value}\n$3'
'Session.setDefault':
'prefix': 'setDefault'
'body': 'Session.setDefault "${1:variableName}", ${2:value}\n$3'
#===================================================
# Accounts Snippets
'Meteor.user':
'prefix': 'user'
'body': 'Meteor.user()'
'Meteor.user':
'prefix': 'varuser'
'body': 'user = Meteor.user();'
'Meteor.userId':
'prefix': 'userId'
'body': 'Meteor.userId()'
'Meteor.users':
'prefix': 'users'
'body': 'Meteor.users'
'Meteor.loggingIn':
'prefix': 'loggingIn'
'body': 'Meteor.loggingIn()'
'Meteor.logout':
'prefix': 'logout'
'body': 'Meteor.logout ->
\n\t$1'
'Meteor.logoutOtherClients':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ->
\n\t$1'
'Meteor.loginWithPassword':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ${1:user}, ${2:password}, ->
\n\t$3'
'Accounts':
'prefix': 'Accounts'
'body': '
Accounts.config
\n\tsendVerificationEmail: ${1:true}
\n\tforbidClientAccountCreation: ${2:true}
\n\trestrictCreationByEmailDomain: "${3:school.edu}"
\n\tloginExpirationDays: ${4:30}
\n\toauthSecretKey: "${5:<KEY>}"
\nAccounts.ui.config({
\n\trequestPermissions: ${1:{}}
\n\trequestOfflineToken: ${1:{}}
\n\tpasswordSignupFields: "${3:USERNAME_AND_OPTIONAL_EMAIL}"
\n$12'
'Accounts.validateNewUser':
'prefix': 'validateNewUser'
'body': 'Meteor.validateNewUser ->
\n\t$1'
'Accounts.onCreateUser':
'prefix': 'onCreateUser'
'body': 'Meteor.onCreateUser (options, user) ->
\n\t$1
\n\treturn ${1:user}'
'Accounts.onLogin':
'prefix': 'onLogin'
'body': 'Meteor.onLogin ->
\n\t$1'
'Accounts.onLoginFailure':
'prefix': 'onLoginFailure'
'body': 'Meteor.onLoginFailure ->
\n\t$1'
#===================================================
# Passwords Snippets
'Accounts.createUser':
'prefix': 'createUser'
'body': '
userObject =
\n\tusername: "${1:username}"
\n\tmail: "${1:email}"
\n\tpassword: "${1:password}"
\n
\nAccounts.createUser ${1:userObject}, ->
\n\t$1'
'Accounts.changePassword':
'prefix': 'changePassword'
'body': 'Accounts.changePassword ${1:oldPassword}, ${2:newPassword}, ->
\n\t$1'
'Accounts.forgotPassword':
'prefix': 'forgotPassword'
'body': 'Accounts.forgotPassword email: "${1:address}", ->
\n\t$2'
'Accounts.resetPassword':
'prefix': 'resetPassword'
'body': 'Accounts.resetPassword ${1:token}, ${2:newPassword}, ->
\n\t$3'
'Accounts.setPassword':
'prefix': 'setPassword'
'body': 'Accounts.setPassword ${1:userId}, ${2:newPassword}'
'Accounts.verifyEmail':
'prefix': 'verifyEmail'
'body': 'Accounts.verifyEmail ${1:token}, ->
\n\t$1'
'Accounts.sendResetPasswordEmail':
'prefix': 'sendResetPasswordEmail'
'body': 'Accounts.sendResetPasswordEmail ${1:userId}'
'Accounts.sendEnrollmentEmail':
'prefix': 'sendEnrollmentEmail'
'body': 'Accounts.sendEnrollmentEmail ${1:userId}'
'Accounts.sendVerificationEmail':
'prefix': 'sendVerificationEmail'
'body': 'Accounts.sendVerificationEmail ${1:userId};'
#===================================================
# Match Snippets
'Match.check':
'prefix': 'check'
'body': 'check(${1:variable}, ${2:String}\n$3'
'Match.test':
'prefix': 'test'
'body': 'Match.test(${1:variable}, ${2:String});\n${3:}'
#===================================================
# Timers Snippets
'Meteor.setTimeout':
'prefix': 'setTimeout'
'body': 'Meteor.setTimeout (->
\n\t$2
\n${1:milliseconds})'
'Meteor.setInterval':
'prefix': 'setInterval'
'body': 'Meteor.setInterval (->
\n\t$2
\n${1:milliseconds})'
'Meteor.clearTimeout':
'prefix': 'clearTimeout'
'body': 'Meteor.clearTimeout ${1:id}'
'Meteor.clearInterval':
'prefix': 'clearInterval'
'body': 'Meteor.clearInterval ${1:id}'
#===================================================
# Tracker
'Tracker.autorun':
'prefix': 'autorun'
'body': 'Tracker.autorun ->
\n\t$2'
'Tracker.flush':
'prefix': 'flush'
'body': 'Tracker.flush()'
'Tracker.nonreactive':
'prefix': 'nonreactive'
'body': 'Tracker.nonreactive ->
\n\t$2'
'Tracker.onInvalidate':
'prefix': 'onInvalidate'
'body': 'Tracker.onInvalidate ->
\n\t$2'
'Tracker.afterFlush':
'prefix': 'afterFlush'
'body': 'Tracker.afterFlush ->
\n\t$2'
'Tracker.active':
'prefix': 'active'
'body': 'Tracker.active'
'Tracker.currentComputation':
'prefix': 'currentComputation'
'body': 'Tracker.currentComputation'
#===================================================
# Templates Snippets
'Template Rendered':
'prefix': 'rendered'
'body': 'Template.${1:name}.rendered = -> \n\t${2}'
'Template Events':
'prefix': 'events'
'body': '
Template.${1:name}.events
\n\t"click ${2:#event}": (event, template) ->
\n\t\t$3'
'Template Created':
'prefix': 'created'
'body': 'Template.${1:name}.created = ->
\n\t$2'
'Template Destroyed':
'prefix': 'destroyed'
'body': '
Template.${1:name}.destroyed = ->
\n\t$2'
'Template':
'prefix': 'Template'
'body': '
\nTemplate.${1:name}.helpers
\n\tcreate: ->
\n\t\t$2
\n\trendered: ->
\n\t\t$3
\n\tdestroyed: ->
\n\t\t$4
\n\nTemplate.${9:name}.events
\n\t"${5:click #foo}": (event, template) ->
\n\t\t$6
\n$7'
'Template Helpers':
'prefix': 'helpers'
'body': 'Template.${1:name}.helpers \n\trendered: ->\n\t\t$2\n\t\n'
'Template.registerHelper':
'prefix': 'registerHelper'
'body': 'Template.registerHelper "${1:helperName}", (${2:argument}) ->\n\t${3}\n'
#===================================================
# Blaze Snippets
'Blaze.render':
'prefix': 'render'
'body': 'Blaze.render ${1:templateOrView}, ${2:parentNode}'
'Blaze.renderWithData':
'prefix': 'renderWithData'
'body': 'Blaze.renderWithData ${1:templateOrView}, ${2:data}, ${3:parentNode}'
'Blaze.remove':
'prefix': 'bremove'
'body': 'Blaze.remove ${1:renderedView}'
'Blaze.getData':
'prefix': 'getData'
'body': 'Blaze.getData ${1:elementOrView}'
'Blaze.toHTML':
'prefix': 'toHTML'
'body': 'Blaze.toHTML ${1:templateOrView}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.isTemplate':
'prefix': 'isTemplate'
'body': 'Blaze.isTemplate ${1:value}'
#===================================================
# EJSON Snippets
'EJSON.parse':
'prefix': 'parse'
'body': 'EJSON.parse ${1:string}'
'EJSON.stringify':
'prefix': 'stringify'
'body': 'EJSON.stringify ${1:string}, {indent: true}'
'EJSON.clone':
'prefix': 'clone'
'body': 'EJSON.clone ${1:object}'
'EJSON.equals':
'prefix': 'deeequals'
'body': 'EJSON.equals ${1:objectA}, ${2:objectB}'
'EJSON.toJSONValue':
'prefix': 'toJSON'
'body': 'EJSON.toJSONValue ${1:value}'
'EJSON.fromJSONValue':
'prefix': 'fromJSON'
'body': 'EJSON.fromJSONValue ${1:value}'
'EJSON.isBinary':
'prefix': 'isBinary'
'body': 'EJSON.isBinary ${1:value}'
#===================================================
# HTTP Snippets
'HTTP Call':
'prefix': 'httpcall'
'body': 'HTTP.call("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP Get':
'prefix': 'httpget'
'body': 'HTTP.get "${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP.post':
'prefix': 'httppost'
'body': 'HTTP.post "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.put':
'prefix': 'http.put'
'body': 'HTTP.put "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.del':
'prefix': 'httpdel'
'body': 'HTTP.del("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send({
\n\tfrom: "${1:<EMAIL>}"
\nif result
\n\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send
\n\tfrom: "${1:<EMAIL>}"
\n\tto: "${2:<EMAIL>}"
\n\tcc: "${3:<EMAIL>}"
\n\tbcc: "${4:<EMAIL>}"
\n\treplyTo: "${5:<EMAIL>}"
\n\tsubject: "${6:Hello Email}"
\n\ttext: "${7:lorem ispum...}"
\n\thtml: "$8"
\n\theaders: "$9"'
#===================================================
# Assets Snippets
'Assets.getText':
'prefix': 'getText'
'body': 'Assets.getText "${1:assetPath}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'Assets.getBinary':
'prefix': 'getBinary'
'body': 'Assets.getBinary("${1:assetPath}", function(error, result)
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Router Snippets
'Router':
'prefix': 'Router'
'body': '
Router.map -> {
\n\t@route "${1:routeName}"
\n\t\t$2'
'route':
'prefix': 'route'
'body': '
@route "${1:routeName}"
\n\tpath:"$2"
\n\ttemplate:"$3"
\n\twaitOn: ->
\n\t\t$4
\n\tdata: ->
\n\t\t$5
\n\tonBeforeAction: ->
\n\t\tsetPageTitle "$6"
\n\tonAfterAction: ->
\n\t\t$7'
'Upsert Route':
'prefix': 'UpsertRoute'
'body': '
Router.map ->
\n\t@route "${1:routeName}"
\n\t\tpath:"/${2:route}/add"
\n\t\ttemplate:"${3:routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\t$4
\n\t\tdata: ->
\n\t\t\t{}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "$5"
\n\t\tonAfterAction: ->
\n\t\t\t$6
\n\t@route "${7:routeName}"
\n\t\tpath:"/$8/edit/${9:paramId}"
\n\t\ttemplate:"${10: routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\tMeteor.subscribe("${11: subscription}")
\n\t\tdata: ->
\n\t\t\t${12: Collection}.findOne @params.${13: paramId}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${14}"
\n\t\tonAfterAction: ->
\n\t\t\t$15'
#===================================================
# Template + Router
'Page':
'prefix': 'page'
'body': '
Router.map ->
\n\t@route("${1:routeName}"
\n\t\tpath: "/${2:route}"
\n\t\ttemplate: "${3:pageTemplate}"
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${4:Page Title}"
\n
\nTemplate.${5:pageTemplate}.helpers
\n\trendered: ->
\n\t\t$6
\n\nTemplate.${7:pageTemplate}.events
\n\t"${8:click #foo}": (event, template) ->
\n\t\t$9'
#===================================================
# Nightwatch
'verify.elementPresent':
'prefix': 'vep'
'body': '.verify.elementPresent("#${1}")${2}'
'verify.elementNotPresent':
'prefix': 'venp'
'body': '.verify.elementNotPresent("#${1}")${2}'
'waitForElementVisible':
'prefix': 'wfev'
'body': '.waitForElementVisible("#${1}", ${2})${3}'
'containsText':
'prefix': 'vct'
'body': '.verify.containsText("#${1}", "${2}")${3}'
'click':
'prefix': 'click'
'body': '.click("#${1}").pause(${2})${3}'
'attributeEquals':
'prefix': 'ae'
'body': '.verify.attributeEquals("#${1}", "value", "${2}")${3}'
| true | # Your snippets
#
# Atom snippets allow you to enter a simple prefix in the editor and hit tab to
# expand the prefix into a larger code block with templated values.
#
# You can create a new snippet in this file by typing "snip" and then hitting
# tab.
#
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
#===================================================
# Coffeescript language snippets
'.coffee':
#===================================================
# Meteor Core API
'Meteor.isClient':
'prefix': 'isClient'
'body': 'if Meteor.isClient\n\t'
'Meteor.isServer':
'prefix': 'isServer'
'body': 'if Meteor.isServer\n\t'
'Meteor.isCordova':
'prefix': 'isCordova'
'body': 'if Meteor.isCordova\n\t'
'Meteor.startup':
'prefix': 'startup'
'body': 'Meteor.startup ->\n\t'
'Meteor.absoluteUrl':
'prefix': 'absolute'
'body': 'Meteor.absoluteUrl "$1"'
#===================================================
# Publish & Subscribe
'Meteor Publish':
'prefix': 'publish'
'body': 'Meteor.publish "${1:name}", (${2:args}) ->\n\t'
'Meteor Subscribe':
'prefix': 'subscribe'
'body': 'Meteor.subscribe "${1:name}", "${2:arg}"'
#===================================================
# Methods
'Meteor Methods':
'prefix': 'methods'
'body': 'Meteor.methods
\n\t${1:methodName}: ->
\n\t\t${2:}'
'Meteor.Error':
'prefix': 'error'
'body': 'Meteor.Error ${1:Number}, "${2:description}"'
'Meteor.call':
'prefix': 'call'
'body': 'Meteor.call "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
#===================================================
# Connections
'Meteor.status':
'prefix': 'status'
'body': 'Meteor.status()'
'Meteor.reconnect':
'prefix': 'reconnect'
'body': 'Meteor.reconnect()'
'Meteor.disconnect':
'prefix': 'disconnect'
'body': 'Meteor.disconnect()'
'Meteor.onConnection':
'prefix': 'onConnection'
'body': 'Meteor.onConnection ->
\n\t$1'
#===================================================
# Collections
'Mongo.Collection.ObjectID':
'prefix': 'object'
'body': 'new Mongo.Collection.ObjectID "${1:id}"'
'Collection.find':
'prefix': 'find'
'body': '${1:Collection}.find "${2:field}":${3:value}\n$4'
'Collection.findOne':
'prefix': 'findOne'
'body': '${1:Collection}.findOne _id:"${2:recordId}"$3)\n$4'
'Collection.insert':
'prefix': 'insert'
'body': '${1:Collection}.insert ${2:newRecord}\n$3'
'Collection.Update':
'prefix': 'update'
'body': '${1:Collection}.update _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.Upsert':
'prefix': 'upsert'
'body': '${1:Collection}.upsert _id:${2:idSelector}\n$set:{\n\t$3\n}\n$4'
'Collection.remove':
'prefix': 'remove'
'body': '${1:Collection}.remove _id:"${2:recordId}"$3\n$4'
'Collection.allow':
'prefix': 'allow'
'body': '${1:Collection}.allow
\n\tinsert: -> ${2:true}
\n\tupdate: -> ${3:true}
\n\tremove: -> ${4:true}
\n$5'
'Collection.deny':
'prefix': 'deny'
'body': '${1:Collection}.deny
\n\tinsert: -> ${2:false}
\n\tupdate: -> ${3:false}
\n\tremove: -> ${4:false}
\n$5'
'Collection':
'prefix': 'Collection'
'body': '${1:CollectionName} = new Mongo.Collection "${2:name}";
\n${3:CollectionName}.allow
\n\tinsert: -> ${4:true}
\n\tupdate: -> ${5:true}
\n\tremove: -> ${6:true}
\n$7'
#===================================================
# Session Snippets
'Session.set':
'prefix': 'set'
'body': 'Session.set "${1:variableName}", ${2:value}\n$3'
'Session.get':
'prefix': 'get'
'body': 'Session.get "${1:variableName}"\n$2'
'Session.equals':
'prefix': 'equals'
'body': 'Session.equals "${1:variableName}", ${2:value}\n$3'
'Session.setDefault':
'prefix': 'setDefault'
'body': 'Session.setDefault "${1:variableName}", ${2:value}\n$3'
#===================================================
# Accounts Snippets
'Meteor.user':
'prefix': 'user'
'body': 'Meteor.user()'
'Meteor.user':
'prefix': 'varuser'
'body': 'user = Meteor.user();'
'Meteor.userId':
'prefix': 'userId'
'body': 'Meteor.userId()'
'Meteor.users':
'prefix': 'users'
'body': 'Meteor.users'
'Meteor.loggingIn':
'prefix': 'loggingIn'
'body': 'Meteor.loggingIn()'
'Meteor.logout':
'prefix': 'logout'
'body': 'Meteor.logout ->
\n\t$1'
'Meteor.logoutOtherClients':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ->
\n\t$1'
'Meteor.loginWithPassword':
'prefix': 'logoutOtherClients'
'body': 'Meteor.logoutOtherClients ${1:user}, ${2:password}, ->
\n\t$3'
'Accounts':
'prefix': 'Accounts'
'body': '
Accounts.config
\n\tsendVerificationEmail: ${1:true}
\n\tforbidClientAccountCreation: ${2:true}
\n\trestrictCreationByEmailDomain: "${3:school.edu}"
\n\tloginExpirationDays: ${4:30}
\n\toauthSecretKey: "${5:PI:KEY:<KEY>END_PI}"
\nAccounts.ui.config({
\n\trequestPermissions: ${1:{}}
\n\trequestOfflineToken: ${1:{}}
\n\tpasswordSignupFields: "${3:USERNAME_AND_OPTIONAL_EMAIL}"
\n$12'
'Accounts.validateNewUser':
'prefix': 'validateNewUser'
'body': 'Meteor.validateNewUser ->
\n\t$1'
'Accounts.onCreateUser':
'prefix': 'onCreateUser'
'body': 'Meteor.onCreateUser (options, user) ->
\n\t$1
\n\treturn ${1:user}'
'Accounts.onLogin':
'prefix': 'onLogin'
'body': 'Meteor.onLogin ->
\n\t$1'
'Accounts.onLoginFailure':
'prefix': 'onLoginFailure'
'body': 'Meteor.onLoginFailure ->
\n\t$1'
#===================================================
# Passwords Snippets
'Accounts.createUser':
'prefix': 'createUser'
'body': '
userObject =
\n\tusername: "${1:username}"
\n\tmail: "${1:email}"
\n\tpassword: "${1:password}"
\n
\nAccounts.createUser ${1:userObject}, ->
\n\t$1'
'Accounts.changePassword':
'prefix': 'changePassword'
'body': 'Accounts.changePassword ${1:oldPassword}, ${2:newPassword}, ->
\n\t$1'
'Accounts.forgotPassword':
'prefix': 'forgotPassword'
'body': 'Accounts.forgotPassword email: "${1:address}", ->
\n\t$2'
'Accounts.resetPassword':
'prefix': 'resetPassword'
'body': 'Accounts.resetPassword ${1:token}, ${2:newPassword}, ->
\n\t$3'
'Accounts.setPassword':
'prefix': 'setPassword'
'body': 'Accounts.setPassword ${1:userId}, ${2:newPassword}'
'Accounts.verifyEmail':
'prefix': 'verifyEmail'
'body': 'Accounts.verifyEmail ${1:token}, ->
\n\t$1'
'Accounts.sendResetPasswordEmail':
'prefix': 'sendResetPasswordEmail'
'body': 'Accounts.sendResetPasswordEmail ${1:userId}'
'Accounts.sendEnrollmentEmail':
'prefix': 'sendEnrollmentEmail'
'body': 'Accounts.sendEnrollmentEmail ${1:userId}'
'Accounts.sendVerificationEmail':
'prefix': 'sendVerificationEmail'
'body': 'Accounts.sendVerificationEmail ${1:userId};'
#===================================================
# Match Snippets
'Match.check':
'prefix': 'check'
'body': 'check(${1:variable}, ${2:String}\n$3'
'Match.test':
'prefix': 'test'
'body': 'Match.test(${1:variable}, ${2:String});\n${3:}'
#===================================================
# Timers Snippets
'Meteor.setTimeout':
'prefix': 'setTimeout'
'body': 'Meteor.setTimeout (->
\n\t$2
\n${1:milliseconds})'
'Meteor.setInterval':
'prefix': 'setInterval'
'body': 'Meteor.setInterval (->
\n\t$2
\n${1:milliseconds})'
'Meteor.clearTimeout':
'prefix': 'clearTimeout'
'body': 'Meteor.clearTimeout ${1:id}'
'Meteor.clearInterval':
'prefix': 'clearInterval'
'body': 'Meteor.clearInterval ${1:id}'
#===================================================
# Tracker
'Tracker.autorun':
'prefix': 'autorun'
'body': 'Tracker.autorun ->
\n\t$2'
'Tracker.flush':
'prefix': 'flush'
'body': 'Tracker.flush()'
'Tracker.nonreactive':
'prefix': 'nonreactive'
'body': 'Tracker.nonreactive ->
\n\t$2'
'Tracker.onInvalidate':
'prefix': 'onInvalidate'
'body': 'Tracker.onInvalidate ->
\n\t$2'
'Tracker.afterFlush':
'prefix': 'afterFlush'
'body': 'Tracker.afterFlush ->
\n\t$2'
'Tracker.active':
'prefix': 'active'
'body': 'Tracker.active'
'Tracker.currentComputation':
'prefix': 'currentComputation'
'body': 'Tracker.currentComputation'
#===================================================
# Templates Snippets
'Template Rendered':
'prefix': 'rendered'
'body': 'Template.${1:name}.rendered = -> \n\t${2}'
'Template Events':
'prefix': 'events'
'body': '
Template.${1:name}.events
\n\t"click ${2:#event}": (event, template) ->
\n\t\t$3'
'Template Created':
'prefix': 'created'
'body': 'Template.${1:name}.created = ->
\n\t$2'
'Template Destroyed':
'prefix': 'destroyed'
'body': '
Template.${1:name}.destroyed = ->
\n\t$2'
'Template':
'prefix': 'Template'
'body': '
\nTemplate.${1:name}.helpers
\n\tcreate: ->
\n\t\t$2
\n\trendered: ->
\n\t\t$3
\n\tdestroyed: ->
\n\t\t$4
\n\nTemplate.${9:name}.events
\n\t"${5:click #foo}": (event, template) ->
\n\t\t$6
\n$7'
'Template Helpers':
'prefix': 'helpers'
'body': 'Template.${1:name}.helpers \n\trendered: ->\n\t\t$2\n\t\n'
'Template.registerHelper':
'prefix': 'registerHelper'
'body': 'Template.registerHelper "${1:helperName}", (${2:argument}) ->\n\t${3}\n'
#===================================================
# Blaze Snippets
'Blaze.render':
'prefix': 'render'
'body': 'Blaze.render ${1:templateOrView}, ${2:parentNode}'
'Blaze.renderWithData':
'prefix': 'renderWithData'
'body': 'Blaze.renderWithData ${1:templateOrView}, ${2:data}, ${3:parentNode}'
'Blaze.remove':
'prefix': 'bremove'
'body': 'Blaze.remove ${1:renderedView}'
'Blaze.getData':
'prefix': 'getData'
'body': 'Blaze.getData ${1:elementOrView}'
'Blaze.toHTML':
'prefix': 'toHTML'
'body': 'Blaze.toHTML ${1:templateOrView}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.toHTMLWithData':
'prefix': 'toHTMLWithData'
'body': 'Blaze.toHTMLWithData ${1:templateOrView}, ${2:data}'
'Blaze.isTemplate':
'prefix': 'isTemplate'
'body': 'Blaze.isTemplate ${1:value}'
#===================================================
# EJSON Snippets
'EJSON.parse':
'prefix': 'parse'
'body': 'EJSON.parse ${1:string}'
'EJSON.stringify':
'prefix': 'stringify'
'body': 'EJSON.stringify ${1:string}, {indent: true}'
'EJSON.clone':
'prefix': 'clone'
'body': 'EJSON.clone ${1:object}'
'EJSON.equals':
'prefix': 'deeequals'
'body': 'EJSON.equals ${1:objectA}, ${2:objectB}'
'EJSON.toJSONValue':
'prefix': 'toJSON'
'body': 'EJSON.toJSONValue ${1:value}'
'EJSON.fromJSONValue':
'prefix': 'fromJSON'
'body': 'EJSON.fromJSONValue ${1:value}'
'EJSON.isBinary':
'prefix': 'isBinary'
'body': 'EJSON.isBinary ${1:value}'
#===================================================
# HTTP Snippets
'HTTP Call':
'prefix': 'httpcall'
'body': 'HTTP.call("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP Get':
'prefix': 'httpget'
'body': 'HTTP.get "${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'HTTP.post':
'prefix': 'httppost'
'body': 'HTTP.post "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.put':
'prefix': 'http.put'
'body': 'HTTP.put "${1:meteorMethod}", ${2:dataObject}, (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$3'
'HTTP.del':
'prefix': 'httpdel'
'body': 'HTTP.del("${1:meteorMethod}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send({
\n\tfrom: "${1:PI:EMAIL:<EMAIL>END_PI}"
\nif result
\n\t$2'
#===================================================
# Email Snippets
'Email.send':
'prefix': 'Email'
'body': 'Email.send
\n\tfrom: "${1:PI:EMAIL:<EMAIL>END_PI}"
\n\tto: "${2:PI:EMAIL:<EMAIL>END_PI}"
\n\tcc: "${3:PI:EMAIL:<EMAIL>END_PI}"
\n\tbcc: "${4:PI:EMAIL:<EMAIL>END_PI}"
\n\treplyTo: "${5:PI:EMAIL:<EMAIL>END_PI}"
\n\tsubject: "${6:Hello Email}"
\n\ttext: "${7:lorem ispum...}"
\n\thtml: "$8"
\n\theaders: "$9"'
#===================================================
# Assets Snippets
'Assets.getText':
'prefix': 'getText'
'body': 'Assets.getText "${1:assetPath}", (error, result) ->
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
'Assets.getBinary':
'prefix': 'getBinary'
'body': 'Assets.getBinary("${1:assetPath}", function(error, result)
\n\tif error
\n\t\tconsole.log "error", error
\n\tif result
\n\t\t$2'
#===================================================
# Router Snippets
'Router':
'prefix': 'Router'
'body': '
Router.map -> {
\n\t@route "${1:routeName}"
\n\t\t$2'
'route':
'prefix': 'route'
'body': '
@route "${1:routeName}"
\n\tpath:"$2"
\n\ttemplate:"$3"
\n\twaitOn: ->
\n\t\t$4
\n\tdata: ->
\n\t\t$5
\n\tonBeforeAction: ->
\n\t\tsetPageTitle "$6"
\n\tonAfterAction: ->
\n\t\t$7'
'Upsert Route':
'prefix': 'UpsertRoute'
'body': '
Router.map ->
\n\t@route "${1:routeName}"
\n\t\tpath:"/${2:route}/add"
\n\t\ttemplate:"${3:routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\t$4
\n\t\tdata: ->
\n\t\t\t{}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "$5"
\n\t\tonAfterAction: ->
\n\t\t\t$6
\n\t@route "${7:routeName}"
\n\t\tpath:"/$8/edit/${9:paramId}"
\n\t\ttemplate:"${10: routeTemplate}"
\n\t\twaitOn: ->
\n\t\t\tMeteor.subscribe("${11: subscription}")
\n\t\tdata: ->
\n\t\t\t${12: Collection}.findOne @params.${13: paramId}
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${14}"
\n\t\tonAfterAction: ->
\n\t\t\t$15'
#===================================================
# Template + Router
'Page':
'prefix': 'page'
'body': '
Router.map ->
\n\t@route("${1:routeName}"
\n\t\tpath: "/${2:route}"
\n\t\ttemplate: "${3:pageTemplate}"
\n\t\tonBeforeAction: ->
\n\t\t\tsetPageTitle "${4:Page Title}"
\n
\nTemplate.${5:pageTemplate}.helpers
\n\trendered: ->
\n\t\t$6
\n\nTemplate.${7:pageTemplate}.events
\n\t"${8:click #foo}": (event, template) ->
\n\t\t$9'
#===================================================
# Nightwatch
'verify.elementPresent':
'prefix': 'vep'
'body': '.verify.elementPresent("#${1}")${2}'
'verify.elementNotPresent':
'prefix': 'venp'
'body': '.verify.elementNotPresent("#${1}")${2}'
'waitForElementVisible':
'prefix': 'wfev'
'body': '.waitForElementVisible("#${1}", ${2})${3}'
'containsText':
'prefix': 'vct'
'body': '.verify.containsText("#${1}", "${2}")${3}'
'click':
'prefix': 'click'
'body': '.click("#${1}").pause(${2})${3}'
'attributeEquals':
'prefix': 'ae'
'body': '.verify.attributeEquals("#${1}", "value", "${2}")${3}'
|
[
{
"context": "del\n\n for key, val of options\n if key is 'unreadNum' and val?.$inc\n notification.unreadNum += ",
"end": 4359,
"score": 0.8311058282852173,
"start": 4350,
"tag": "KEY",
"value": "unreadNum"
}
] | talk-api2x/server/schemas/notification.coffee | ikingye/talk-os | 3,084 | ###*
* Save user's unread number, latest read message id and pinnedAt property
* db.notifications.ensureIndex({user: 1, team: 1, isHidden: 1, isPinned: 1, updatedAt: -1}, {background: true})
* db.notifications.ensureIndex({target: 1, team: 1, user: 1, type: 1}, {unique: true, background: true})
* db.notifications.ensureIndex({_emitterId: 1, team: 1}, {background: true})
###
mongoose = require 'mongoose'
Err = require 'err1st'
_ = require 'lodash'
Promise = require 'bluebird'
{Schema} = mongoose
module.exports = NotificationSchema = new Schema
user: type: Schema.Types.ObjectId, ref: 'User'
team: type: Schema.Types.ObjectId, ref: 'Team'
target: type: Schema.Types.ObjectId
type: type: String # Target type
creator: type: Schema.Types.ObjectId, ref: 'User'
text: type: String, default: '', set: (text) -> if text?.length > 100 then text[0...100] else text
unreadNum: type: Number, default: 0, set: (unreadNum) ->
@oldUnreadNum = @unreadNum or 0
unreadNum
isPinned: type: Boolean, default: false, set: (isPinned) ->
if isPinned
@pinnedAt = new Date
else
@pinnedAt = undefined
return isPinned
pinnedAt: type: Date
authorName: type: String
isMute: type: Boolean, default: false
isHidden: type: Boolean, default: false, set: (isHidden) ->
@isPinned = false if isHidden
isHidden
_emitterId: type: Schema.Types.ObjectId
_latestReadMessageId: type: Schema.Types.ObjectId
createdAt: type: Date, default: Date.now
updatedAt: type: Date, default: Date.now
,
read: 'secondaryPreferred'
toObject:
virtuals: true
getters: true
toJSON:
virtuals: true
getters: true
NotificationSchema.virtual '_userId'
.get -> @user?._id or @user
.set (_id) -> @user = _id
NotificationSchema.virtual '_teamId'
.get -> @team?._id or @team
.set (_id) -> @team = _id
NotificationSchema.virtual '_targetId'
.get -> @target?._id or @target
.set (_id) -> @target = _id
NotificationSchema.virtual '_creatorId'
.get -> @creator?._id or @creator
.set (_id) -> @creator = _id
NotificationSchema.virtual 'oldUnreadNum'
.get -> @_oldUnreadNum
.set (@_oldUnreadNum) -> @_oldUnreadNum
# ============================== Methods ==============================
NotificationSchema.methods.getPopulated = (callback) ->
self = this
unless self.$populating
# Populate target field
switch self.type
when 'room' then modelName = 'Room'
when 'dms' then modelName = 'User'
when 'story' then modelName = 'Story'
self.$populating = Promise.promisify self.populate
.call self, [
path: 'target'
model: modelName
,
path: 'creator'
]
self.$populating.nodeify callback
# ============================== Statics ==============================
NotificationSchema.statics.findByOptions = (options, callback) ->
options.limit or= 10
options.sort or= updatedAt: -1
conditions = _.pick options, 'user', 'team', 'isHidden', 'isPinned'
conditions.updatedAt = $lt: options.maxUpdatedAt if options.maxUpdatedAt
$notifications = @_buildQuery.call(this, conditions, options).execAsync()
$notifications = $notifications.map (notification) ->
notification.getPopulatedAsync()
.filter (notification) ->
switch notification.type
when 'room' then return notification.target?.isArchived is false
else return notification.target?._id
$notifications.nodeify callback
NotificationSchema.statics.updateByOptions = (conditions, update, callback) ->
NotificationModel = this
$notifications = NotificationModel.findAsync conditions
$notifications = $notifications.map (notification) ->
for key, val of update
notification[key] = val
notification.$save()
$notifications.nodeify callback
NotificationSchema.statics.createByOptions = (options, callback) ->
unless options.user and options.team and options.target and options.type
return callback(new Err('PARAMS_MISSING', 'user team target type'))
conditions =
user: options.user
target: options.target
team: options.team
type: options.type
NotificationModel = this
$notification = NotificationModel.findOneAsync conditions
$notification = $notification.then (notification) ->
unless notification
notification = new NotificationModel
for key, val of options
if key is 'unreadNum' and val?.$inc
notification.unreadNum += 1
else
notification[key] = val
notification.isHidden = false
# Reset authorName
notification.authorName = undefined unless options.authorName
notification.$save()
$notification.nodeify callback
###*
* Remove notifications and broadcast messages
* @param {Object} options - Conditions
* @param {Function} callback
###
NotificationSchema.statics.removeByOptions = (options, callback) ->
unless options.target and options.team
return callback(new Err('PARAMS_MISSING', 'target team'))
NotificationModel = this
conditions =
target: options.target
team: options.team
conditions.user = options.user if options.user
conditions.type = options.type if options.type
$notifications = NotificationModel.findAsync conditions
$notifications.map (notification) ->
notification.$remove()
.nodeify callback
###*
* Sum team unread number
* @param {ObjectId} _userId User id
* @param {ObjectId} _teamId Team id
* @param {Function} callback [description]
* @todo Cache it
###
NotificationSchema.statics.sumTeamUnreadNum = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
isMute: false
, 'unreadNum'
, (err, notifications = []) ->
unreadNum = notifications.reduce (totalUnread, notification) ->
totalUnread += notification.unreadNum if notification?.unreadNum
totalUnread
, 0
callback err, unreadNum
NotificationSchema.statics.findUnreadNums = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
, 'target unreadNum isMute'
, (err, notifications = []) ->
unreadNums = {}
for notification in notifications
unreadNums["#{notification._targetId}"] = notification.unreadNum
callback err, unreadNums
NotificationSchema.statics.findLatestReadMessageIds = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
_latestReadMessageId: $ne: null
, 'target _latestReadMessageId'
, (err, notifications = []) ->
_latestReadMessageIds = {}
for notification in notifications
_latestReadMessageIds["#{notification._targetId}"] = notification._latestReadMessageId
callback err, _latestReadMessageIds
NotificationSchema.statics.findPinnedAts = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isPinned: true
, 'target isPinned pinnedAt'
, (err, notifications = []) ->
pinnedAts = {}
for notification in notifications
pinnedAts["#{notification._targetId}"] = notification.pinnedAt
callback err, pinnedAts
| 155583 | ###*
* Save user's unread number, latest read message id and pinnedAt property
* db.notifications.ensureIndex({user: 1, team: 1, isHidden: 1, isPinned: 1, updatedAt: -1}, {background: true})
* db.notifications.ensureIndex({target: 1, team: 1, user: 1, type: 1}, {unique: true, background: true})
* db.notifications.ensureIndex({_emitterId: 1, team: 1}, {background: true})
###
mongoose = require 'mongoose'
Err = require 'err1st'
_ = require 'lodash'
Promise = require 'bluebird'
{Schema} = mongoose
module.exports = NotificationSchema = new Schema
user: type: Schema.Types.ObjectId, ref: 'User'
team: type: Schema.Types.ObjectId, ref: 'Team'
target: type: Schema.Types.ObjectId
type: type: String # Target type
creator: type: Schema.Types.ObjectId, ref: 'User'
text: type: String, default: '', set: (text) -> if text?.length > 100 then text[0...100] else text
unreadNum: type: Number, default: 0, set: (unreadNum) ->
@oldUnreadNum = @unreadNum or 0
unreadNum
isPinned: type: Boolean, default: false, set: (isPinned) ->
if isPinned
@pinnedAt = new Date
else
@pinnedAt = undefined
return isPinned
pinnedAt: type: Date
authorName: type: String
isMute: type: Boolean, default: false
isHidden: type: Boolean, default: false, set: (isHidden) ->
@isPinned = false if isHidden
isHidden
_emitterId: type: Schema.Types.ObjectId
_latestReadMessageId: type: Schema.Types.ObjectId
createdAt: type: Date, default: Date.now
updatedAt: type: Date, default: Date.now
,
read: 'secondaryPreferred'
toObject:
virtuals: true
getters: true
toJSON:
virtuals: true
getters: true
NotificationSchema.virtual '_userId'
.get -> @user?._id or @user
.set (_id) -> @user = _id
NotificationSchema.virtual '_teamId'
.get -> @team?._id or @team
.set (_id) -> @team = _id
NotificationSchema.virtual '_targetId'
.get -> @target?._id or @target
.set (_id) -> @target = _id
NotificationSchema.virtual '_creatorId'
.get -> @creator?._id or @creator
.set (_id) -> @creator = _id
NotificationSchema.virtual 'oldUnreadNum'
.get -> @_oldUnreadNum
.set (@_oldUnreadNum) -> @_oldUnreadNum
# ============================== Methods ==============================
NotificationSchema.methods.getPopulated = (callback) ->
self = this
unless self.$populating
# Populate target field
switch self.type
when 'room' then modelName = 'Room'
when 'dms' then modelName = 'User'
when 'story' then modelName = 'Story'
self.$populating = Promise.promisify self.populate
.call self, [
path: 'target'
model: modelName
,
path: 'creator'
]
self.$populating.nodeify callback
# ============================== Statics ==============================
NotificationSchema.statics.findByOptions = (options, callback) ->
options.limit or= 10
options.sort or= updatedAt: -1
conditions = _.pick options, 'user', 'team', 'isHidden', 'isPinned'
conditions.updatedAt = $lt: options.maxUpdatedAt if options.maxUpdatedAt
$notifications = @_buildQuery.call(this, conditions, options).execAsync()
$notifications = $notifications.map (notification) ->
notification.getPopulatedAsync()
.filter (notification) ->
switch notification.type
when 'room' then return notification.target?.isArchived is false
else return notification.target?._id
$notifications.nodeify callback
NotificationSchema.statics.updateByOptions = (conditions, update, callback) ->
NotificationModel = this
$notifications = NotificationModel.findAsync conditions
$notifications = $notifications.map (notification) ->
for key, val of update
notification[key] = val
notification.$save()
$notifications.nodeify callback
NotificationSchema.statics.createByOptions = (options, callback) ->
unless options.user and options.team and options.target and options.type
return callback(new Err('PARAMS_MISSING', 'user team target type'))
conditions =
user: options.user
target: options.target
team: options.team
type: options.type
NotificationModel = this
$notification = NotificationModel.findOneAsync conditions
$notification = $notification.then (notification) ->
unless notification
notification = new NotificationModel
for key, val of options
if key is '<KEY>' and val?.$inc
notification.unreadNum += 1
else
notification[key] = val
notification.isHidden = false
# Reset authorName
notification.authorName = undefined unless options.authorName
notification.$save()
$notification.nodeify callback
###*
* Remove notifications and broadcast messages
* @param {Object} options - Conditions
* @param {Function} callback
###
NotificationSchema.statics.removeByOptions = (options, callback) ->
unless options.target and options.team
return callback(new Err('PARAMS_MISSING', 'target team'))
NotificationModel = this
conditions =
target: options.target
team: options.team
conditions.user = options.user if options.user
conditions.type = options.type if options.type
$notifications = NotificationModel.findAsync conditions
$notifications.map (notification) ->
notification.$remove()
.nodeify callback
###*
* Sum team unread number
* @param {ObjectId} _userId User id
* @param {ObjectId} _teamId Team id
* @param {Function} callback [description]
* @todo Cache it
###
NotificationSchema.statics.sumTeamUnreadNum = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
isMute: false
, 'unreadNum'
, (err, notifications = []) ->
unreadNum = notifications.reduce (totalUnread, notification) ->
totalUnread += notification.unreadNum if notification?.unreadNum
totalUnread
, 0
callback err, unreadNum
NotificationSchema.statics.findUnreadNums = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
, 'target unreadNum isMute'
, (err, notifications = []) ->
unreadNums = {}
for notification in notifications
unreadNums["#{notification._targetId}"] = notification.unreadNum
callback err, unreadNums
NotificationSchema.statics.findLatestReadMessageIds = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
_latestReadMessageId: $ne: null
, 'target _latestReadMessageId'
, (err, notifications = []) ->
_latestReadMessageIds = {}
for notification in notifications
_latestReadMessageIds["#{notification._targetId}"] = notification._latestReadMessageId
callback err, _latestReadMessageIds
NotificationSchema.statics.findPinnedAts = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isPinned: true
, 'target isPinned pinnedAt'
, (err, notifications = []) ->
pinnedAts = {}
for notification in notifications
pinnedAts["#{notification._targetId}"] = notification.pinnedAt
callback err, pinnedAts
| true | ###*
* Save user's unread number, latest read message id and pinnedAt property
* db.notifications.ensureIndex({user: 1, team: 1, isHidden: 1, isPinned: 1, updatedAt: -1}, {background: true})
* db.notifications.ensureIndex({target: 1, team: 1, user: 1, type: 1}, {unique: true, background: true})
* db.notifications.ensureIndex({_emitterId: 1, team: 1}, {background: true})
###
mongoose = require 'mongoose'
Err = require 'err1st'
_ = require 'lodash'
Promise = require 'bluebird'
{Schema} = mongoose
module.exports = NotificationSchema = new Schema
user: type: Schema.Types.ObjectId, ref: 'User'
team: type: Schema.Types.ObjectId, ref: 'Team'
target: type: Schema.Types.ObjectId
type: type: String # Target type
creator: type: Schema.Types.ObjectId, ref: 'User'
text: type: String, default: '', set: (text) -> if text?.length > 100 then text[0...100] else text
unreadNum: type: Number, default: 0, set: (unreadNum) ->
@oldUnreadNum = @unreadNum or 0
unreadNum
isPinned: type: Boolean, default: false, set: (isPinned) ->
if isPinned
@pinnedAt = new Date
else
@pinnedAt = undefined
return isPinned
pinnedAt: type: Date
authorName: type: String
isMute: type: Boolean, default: false
isHidden: type: Boolean, default: false, set: (isHidden) ->
@isPinned = false if isHidden
isHidden
_emitterId: type: Schema.Types.ObjectId
_latestReadMessageId: type: Schema.Types.ObjectId
createdAt: type: Date, default: Date.now
updatedAt: type: Date, default: Date.now
,
read: 'secondaryPreferred'
toObject:
virtuals: true
getters: true
toJSON:
virtuals: true
getters: true
NotificationSchema.virtual '_userId'
.get -> @user?._id or @user
.set (_id) -> @user = _id
NotificationSchema.virtual '_teamId'
.get -> @team?._id or @team
.set (_id) -> @team = _id
NotificationSchema.virtual '_targetId'
.get -> @target?._id or @target
.set (_id) -> @target = _id
NotificationSchema.virtual '_creatorId'
.get -> @creator?._id or @creator
.set (_id) -> @creator = _id
NotificationSchema.virtual 'oldUnreadNum'
.get -> @_oldUnreadNum
.set (@_oldUnreadNum) -> @_oldUnreadNum
# ============================== Methods ==============================
NotificationSchema.methods.getPopulated = (callback) ->
self = this
unless self.$populating
# Populate target field
switch self.type
when 'room' then modelName = 'Room'
when 'dms' then modelName = 'User'
when 'story' then modelName = 'Story'
self.$populating = Promise.promisify self.populate
.call self, [
path: 'target'
model: modelName
,
path: 'creator'
]
self.$populating.nodeify callback
# ============================== Statics ==============================
NotificationSchema.statics.findByOptions = (options, callback) ->
options.limit or= 10
options.sort or= updatedAt: -1
conditions = _.pick options, 'user', 'team', 'isHidden', 'isPinned'
conditions.updatedAt = $lt: options.maxUpdatedAt if options.maxUpdatedAt
$notifications = @_buildQuery.call(this, conditions, options).execAsync()
$notifications = $notifications.map (notification) ->
notification.getPopulatedAsync()
.filter (notification) ->
switch notification.type
when 'room' then return notification.target?.isArchived is false
else return notification.target?._id
$notifications.nodeify callback
NotificationSchema.statics.updateByOptions = (conditions, update, callback) ->
NotificationModel = this
$notifications = NotificationModel.findAsync conditions
$notifications = $notifications.map (notification) ->
for key, val of update
notification[key] = val
notification.$save()
$notifications.nodeify callback
NotificationSchema.statics.createByOptions = (options, callback) ->
unless options.user and options.team and options.target and options.type
return callback(new Err('PARAMS_MISSING', 'user team target type'))
conditions =
user: options.user
target: options.target
team: options.team
type: options.type
NotificationModel = this
$notification = NotificationModel.findOneAsync conditions
$notification = $notification.then (notification) ->
unless notification
notification = new NotificationModel
for key, val of options
if key is 'PI:KEY:<KEY>END_PI' and val?.$inc
notification.unreadNum += 1
else
notification[key] = val
notification.isHidden = false
# Reset authorName
notification.authorName = undefined unless options.authorName
notification.$save()
$notification.nodeify callback
###*
* Remove notifications and broadcast messages
* @param {Object} options - Conditions
* @param {Function} callback
###
NotificationSchema.statics.removeByOptions = (options, callback) ->
unless options.target and options.team
return callback(new Err('PARAMS_MISSING', 'target team'))
NotificationModel = this
conditions =
target: options.target
team: options.team
conditions.user = options.user if options.user
conditions.type = options.type if options.type
$notifications = NotificationModel.findAsync conditions
$notifications.map (notification) ->
notification.$remove()
.nodeify callback
###*
* Sum team unread number
* @param {ObjectId} _userId User id
* @param {ObjectId} _teamId Team id
* @param {Function} callback [description]
* @todo Cache it
###
NotificationSchema.statics.sumTeamUnreadNum = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
isMute: false
, 'unreadNum'
, (err, notifications = []) ->
unreadNum = notifications.reduce (totalUnread, notification) ->
totalUnread += notification.unreadNum if notification?.unreadNum
totalUnread
, 0
callback err, unreadNum
NotificationSchema.statics.findUnreadNums = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isHidden: false
unreadNum: $gt: 0
, 'target unreadNum isMute'
, (err, notifications = []) ->
unreadNums = {}
for notification in notifications
unreadNums["#{notification._targetId}"] = notification.unreadNum
callback err, unreadNums
NotificationSchema.statics.findLatestReadMessageIds = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
_latestReadMessageId: $ne: null
, 'target _latestReadMessageId'
, (err, notifications = []) ->
_latestReadMessageIds = {}
for notification in notifications
_latestReadMessageIds["#{notification._targetId}"] = notification._latestReadMessageId
callback err, _latestReadMessageIds
NotificationSchema.statics.findPinnedAts = (_userId, _teamId, callback) ->
@find
user: _userId
team: _teamId
isPinned: true
, 'target isPinned pinnedAt'
, (err, notifications = []) ->
pinnedAts = {}
for notification in notifications
pinnedAts["#{notification._targetId}"] = notification.pinnedAt
callback err, pinnedAts
|
[
{
"context": "PI for NodeJS - RestifyJS\n\nCopyright (c) 2015-2021 Steven Agyekum <agyekum@posteo.de>\n\nPermission is hereby granted",
"end": 158,
"score": 0.9998760223388672,
"start": 144,
"tag": "NAME",
"value": "Steven Agyekum"
},
{
"context": "estifyJS\n\nCopyright (c) 2015-2021... | src/PluginHelper.coffee | Burnett01/sys-api | 6 | ###
The MIT License (MIT)
Product: System API (SysAPI)
Description: A modular System-API for NodeJS - RestifyJS
Copyright (c) 2015-2021 Steven Agyekum <agyekum@posteo.de>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports =
plugins: ->
_ = @
load_dir: (root) ->
_.fs.readDir(root, true, (err, files) ->
if err then return console.log(err)
for file, index in files
console.log "[PLUGINS] Loaded plugin-" + file
_.include(require(file))
)
setup: (root) ->
if typeof root is 'object'
console.log "[PLUGINS] Loading plugins from multiple roots"
for _dir in root
@.load_dir(_dir)
else
console.log "[PLUGINS] Loading plugins"
@.load_dir(root)
###extended: ->
@include
enable: ->
disable: ->### | 115941 | ###
The MIT License (MIT)
Product: System API (SysAPI)
Description: A modular System-API for NodeJS - RestifyJS
Copyright (c) 2015-2021 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports =
plugins: ->
_ = @
load_dir: (root) ->
_.fs.readDir(root, true, (err, files) ->
if err then return console.log(err)
for file, index in files
console.log "[PLUGINS] Loaded plugin-" + file
_.include(require(file))
)
setup: (root) ->
if typeof root is 'object'
console.log "[PLUGINS] Loading plugins from multiple roots"
for _dir in root
@.load_dir(_dir)
else
console.log "[PLUGINS] Loading plugins"
@.load_dir(root)
###extended: ->
@include
enable: ->
disable: ->### | true | ###
The MIT License (MIT)
Product: System API (SysAPI)
Description: A modular System-API for NodeJS - RestifyJS
Copyright (c) 2015-2021 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports =
plugins: ->
_ = @
load_dir: (root) ->
_.fs.readDir(root, true, (err, files) ->
if err then return console.log(err)
for file, index in files
console.log "[PLUGINS] Loaded plugin-" + file
_.include(require(file))
)
setup: (root) ->
if typeof root is 'object'
console.log "[PLUGINS] Loading plugins from multiple roots"
for _dir in root
@.load_dir(_dir)
else
console.log "[PLUGINS] Loading plugins"
@.load_dir(root)
###extended: ->
@include
enable: ->
disable: ->### |
[
{
"context": "108012&aid=1080120029000&name=blobtest.html&token=mK2xt15JSXDFPedj1Yk22t9erTg%3A1367167853893\ntestIDBBlobSupport = (callback) ->\n indexedDB = ",
"end": 676,
"score": 0.9954166412353516,
"start": 633,
"tag": "PASSWORD",
"value": "mK2xt15JSXDFPedj1Yk22t9erTg%3A1367167853893"
... | source/javascripts/FeatureDetection/FeatureDetection.coffee | EdinburghUniversityTheatreCompany/ImpAmp | 1 | # Storage Selection:
# Prefer indexeddb:
featureDetection = $.Deferred()
impamp.featureDetection = featureDetection.promise()
availableStorageTypes = []
resolvePreferred = ->
if $.inArray(impamp.storageTypes.INDEXED_DB, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.INDEXED_DB
return
else if $.inArray(impamp.storageTypes.WEB_SQL, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.WEB_SQL
return
else
featureDetection.reject()
return
# See https://code.google.com/p/chromium/issues/attachmentText?id=108012&aid=1080120029000&name=blobtest.html&token=mK2xt15JSXDFPedj1Yk22t9erTg%3A1367167853893
testIDBBlobSupport = (callback) ->
indexedDB = window.indexedDB or window.webkitIndexedDB
dbname = "detect-blob-support"
indexedDB.deleteDatabase(dbname).onsuccess = ->
request = indexedDB.open(dbname, 1)
request.onupgradeneeded = ->
request.result.createObjectStore "store"
request.onsuccess = ->
db = request.result
try
db.transaction("store", "readwrite").objectStore("store").put new Blob(), "key"
callback true
catch e
callback false
finally
db.close()
indexedDB.deleteDatabase dbname
if not Modernizr.audio
# It won't work. End of.
featureDetection.reject()
if `Modernizr.audio.mp3 == false`
$ ->
$('#noMp3Warn').show();
if Modernizr.websqldatabase
availableStorageTypes.push impamp.storageTypes.WEB_SQL
if Modernizr.indexeddb
testIDBBlobSupport (supported) ->
if supported == true
availableStorageTypes.push impamp.storageTypes.INDEXED_DB
resolvePreferred()
else
resolvePreferred()
| 20427 | # Storage Selection:
# Prefer indexeddb:
featureDetection = $.Deferred()
impamp.featureDetection = featureDetection.promise()
availableStorageTypes = []
resolvePreferred = ->
if $.inArray(impamp.storageTypes.INDEXED_DB, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.INDEXED_DB
return
else if $.inArray(impamp.storageTypes.WEB_SQL, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.WEB_SQL
return
else
featureDetection.reject()
return
# See https://code.google.com/p/chromium/issues/attachmentText?id=108012&aid=1080120029000&name=blobtest.html&token=<PASSWORD>
testIDBBlobSupport = (callback) ->
indexedDB = window.indexedDB or window.webkitIndexedDB
dbname = "detect-blob-support"
indexedDB.deleteDatabase(dbname).onsuccess = ->
request = indexedDB.open(dbname, 1)
request.onupgradeneeded = ->
request.result.createObjectStore "store"
request.onsuccess = ->
db = request.result
try
db.transaction("store", "readwrite").objectStore("store").put new Blob(), "key"
callback true
catch e
callback false
finally
db.close()
indexedDB.deleteDatabase dbname
if not Modernizr.audio
# It won't work. End of.
featureDetection.reject()
if `Modernizr.audio.mp3 == false`
$ ->
$('#noMp3Warn').show();
if Modernizr.websqldatabase
availableStorageTypes.push impamp.storageTypes.WEB_SQL
if Modernizr.indexeddb
testIDBBlobSupport (supported) ->
if supported == true
availableStorageTypes.push impamp.storageTypes.INDEXED_DB
resolvePreferred()
else
resolvePreferred()
| true | # Storage Selection:
# Prefer indexeddb:
featureDetection = $.Deferred()
impamp.featureDetection = featureDetection.promise()
availableStorageTypes = []
resolvePreferred = ->
if $.inArray(impamp.storageTypes.INDEXED_DB, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.INDEXED_DB
return
else if $.inArray(impamp.storageTypes.WEB_SQL, availableStorageTypes) >= 0
featureDetection.resolve impamp.storageTypes.WEB_SQL
return
else
featureDetection.reject()
return
# See https://code.google.com/p/chromium/issues/attachmentText?id=108012&aid=1080120029000&name=blobtest.html&token=PI:PASSWORD:<PASSWORD>END_PI
testIDBBlobSupport = (callback) ->
indexedDB = window.indexedDB or window.webkitIndexedDB
dbname = "detect-blob-support"
indexedDB.deleteDatabase(dbname).onsuccess = ->
request = indexedDB.open(dbname, 1)
request.onupgradeneeded = ->
request.result.createObjectStore "store"
request.onsuccess = ->
db = request.result
try
db.transaction("store", "readwrite").objectStore("store").put new Blob(), "key"
callback true
catch e
callback false
finally
db.close()
indexedDB.deleteDatabase dbname
if not Modernizr.audio
# It won't work. End of.
featureDetection.reject()
if `Modernizr.audio.mp3 == false`
$ ->
$('#noMp3Warn').show();
if Modernizr.websqldatabase
availableStorageTypes.push impamp.storageTypes.WEB_SQL
if Modernizr.indexeddb
testIDBBlobSupport (supported) ->
if supported == true
availableStorageTypes.push impamp.storageTypes.INDEXED_DB
resolvePreferred()
else
resolvePreferred()
|
[
{
"context": "Login information\n @param {string} auth.username - Username.\n @param {string} auth.password - Login password.",
"end": 5199,
"score": 0.7330868244171143,
"start": 5191,
"tag": "USERNAME",
"value": "Username"
},
{
"context": "rname - Username.\n @param {string} auth.pa... | src/coffee/index.coffee | hupptechnologies/RedmineTimeTracker | 0 | 'use strict'
{ app, BrowserWindow, Menu, Tray } = require('electron')
{ autoUpdater } = require("electron-updater")
isDev = require('electron-is-dev')
storage = require('electron-json-storage')
path = require('path')
if isDev
# adds debug features like hotkeys for triggering dev tools and reload
require('electron-debug')()
else
autoUpdater.checkForUpdates()
# Show DevTools if set debug mode.
storage.get 'debug', (err, debug) ->
if debug
require('electron-debug')({ showDevTools: true })
# icon_128_gray.png
iconPath = path.join(__dirname, 'images/icon_128_gray.png')
# console.log('iconPath', iconPath)
LOGIN = "login"
BOUND = "bound"
PROXY_AUTH = "proxy_auth"
DEFAULT_BOUNDS = { width: 250, height: 550 }
# prevent window being garbage collected
_mainWindow = undefined
_bound = {}
_event = {}
_triedSavedAccount = false
_proxyAuthCallback = null
###*
On closed listener.
###
onClosed = () ->
# console.log('closed')
# derefernece the window.
_mainWindow = null
return
app.on 'window-all-closed', ->
# console.log('window-all-closed')
saveWindowBounds () ->
if process.platform != 'darwin'
app.quit()
return
app.on 'activate', ->
# console.log('activate')
return if _mainWindow
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'ready', ->
# console.log('ready')
# tray = new Tray(iconPath)
Menu.setApplicationMenu(Menu.buildFromTemplate(template))
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'login', (event, webContents, request, authInfo, callback) ->
if authInfo.isProxy
event.preventDefault()
if _proxyAuthCallback?
_proxyAuthCallback = callback
return
_proxyAuthCallback = callback
# If proxy password is already exists, use it.
storage.get PROXY_AUTH, (err, auth) ->
# console.log(auth)
if err
console.log('Failed to get auth.')
else if not _triedSavedAccount and auth? and auth.password?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = true
else
func = _event[LOGIN]
return _proxyAuthCallback(null, null) if not func?
func (auth) ->
return if not auth?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = false
storage.set PROXY_AUTH, auth, (err) ->
return if not err
console.log('Failed to set window bounds.')
autoUpdater.on 'update-downloaded', (event, releaseNotes, releaseName) ->
index = dialog.showMessageBox({
message: "Update Available."
detail: releaseName + "\n\n" + releaseNotes
buttons: ["Update now", "Later"]
})
if index is 0
autoUpdater.quitAndInstall()
###*
Rectangle Object
@typedef {object} Rectangle
@param {number} x - The x coordinate of the origin of the rectangle
@param {number} y - The y coordinate of the origin of the rectangle
@param {number} width
@param {number} height
###
###*
@param {Rectangle} bound - Window size and position.
@return {BrowserWindow} Main window instance.
###
createMainWindow = (bound) ->
if bound.width
bound = bound
else if _bound.width
bound = _bound
else
bound = DEFAULT_BOUNDS
win = new (BrowserWindow)({
width: bound.width
height: bound.height
})
win.setMenu(null)
if bound.x? and bound.y?
win.setPosition(bound.x, bound.y)
win.loadURL 'file://' + __dirname + '/../views/index.html'
win.on 'closed', onClosed
return win
###*
Save bounds of main window to storage.
###
saveWindowBounds = (callback) ->
return if not _mainWindow?
bound = _mainWindow.getContentBounds()
# console.log(bound)
storage.set BOUND, bound, (err) ->
if err
console.log('Failed to set window bounds.')
callback and callback()
###*
Get bounds of main window from storage.
###
getWindowBounds = (callback) ->
storage.get BOUND, (err, bound) ->
# console.log(bound)
if err
console.log('Failed to get window bounds.')
else
_bound = bound
callback and callback(bound)
# Create the Application's main menu
template = [{
label: "Application",
submenu: [
{ label: "About Application", selector: "orderFrontStandardAboutPanel:" },
{ type: "separator" },
{ label: "Quit", accelerator: "Command+Q", click: () -> app.quit() }
]}, {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]}
]
exports.openDevTools = () ->
if _mainWindow?
_mainWindow.webContents.openDevTools()
###*
@callback onInputEndListener
@param {object} auth - Login information
@param {string} auth.username - Username.
@param {string} auth.password - Login password.
###
###*
@callback onLoginListener
@param {onInputEndListener} func - Function which will be called when user inputted login information.
###
###*
Set proxy login event lister.
@param {onLoginListener} func - Function which will be called when fired app's 'login' event.
###
exports.onLogin = (func) ->
_event[LOGIN] = func
| 83723 | 'use strict'
{ app, BrowserWindow, Menu, Tray } = require('electron')
{ autoUpdater } = require("electron-updater")
isDev = require('electron-is-dev')
storage = require('electron-json-storage')
path = require('path')
if isDev
# adds debug features like hotkeys for triggering dev tools and reload
require('electron-debug')()
else
autoUpdater.checkForUpdates()
# Show DevTools if set debug mode.
storage.get 'debug', (err, debug) ->
if debug
require('electron-debug')({ showDevTools: true })
# icon_128_gray.png
iconPath = path.join(__dirname, 'images/icon_128_gray.png')
# console.log('iconPath', iconPath)
LOGIN = "login"
BOUND = "bound"
PROXY_AUTH = "proxy_auth"
DEFAULT_BOUNDS = { width: 250, height: 550 }
# prevent window being garbage collected
_mainWindow = undefined
_bound = {}
_event = {}
_triedSavedAccount = false
_proxyAuthCallback = null
###*
On closed listener.
###
onClosed = () ->
# console.log('closed')
# derefernece the window.
_mainWindow = null
return
app.on 'window-all-closed', ->
# console.log('window-all-closed')
saveWindowBounds () ->
if process.platform != 'darwin'
app.quit()
return
app.on 'activate', ->
# console.log('activate')
return if _mainWindow
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'ready', ->
# console.log('ready')
# tray = new Tray(iconPath)
Menu.setApplicationMenu(Menu.buildFromTemplate(template))
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'login', (event, webContents, request, authInfo, callback) ->
if authInfo.isProxy
event.preventDefault()
if _proxyAuthCallback?
_proxyAuthCallback = callback
return
_proxyAuthCallback = callback
# If proxy password is already exists, use it.
storage.get PROXY_AUTH, (err, auth) ->
# console.log(auth)
if err
console.log('Failed to get auth.')
else if not _triedSavedAccount and auth? and auth.password?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = true
else
func = _event[LOGIN]
return _proxyAuthCallback(null, null) if not func?
func (auth) ->
return if not auth?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = false
storage.set PROXY_AUTH, auth, (err) ->
return if not err
console.log('Failed to set window bounds.')
autoUpdater.on 'update-downloaded', (event, releaseNotes, releaseName) ->
index = dialog.showMessageBox({
message: "Update Available."
detail: releaseName + "\n\n" + releaseNotes
buttons: ["Update now", "Later"]
})
if index is 0
autoUpdater.quitAndInstall()
###*
Rectangle Object
@typedef {object} Rectangle
@param {number} x - The x coordinate of the origin of the rectangle
@param {number} y - The y coordinate of the origin of the rectangle
@param {number} width
@param {number} height
###
###*
@param {Rectangle} bound - Window size and position.
@return {BrowserWindow} Main window instance.
###
createMainWindow = (bound) ->
if bound.width
bound = bound
else if _bound.width
bound = _bound
else
bound = DEFAULT_BOUNDS
win = new (BrowserWindow)({
width: bound.width
height: bound.height
})
win.setMenu(null)
if bound.x? and bound.y?
win.setPosition(bound.x, bound.y)
win.loadURL 'file://' + __dirname + '/../views/index.html'
win.on 'closed', onClosed
return win
###*
Save bounds of main window to storage.
###
saveWindowBounds = (callback) ->
return if not _mainWindow?
bound = _mainWindow.getContentBounds()
# console.log(bound)
storage.set BOUND, bound, (err) ->
if err
console.log('Failed to set window bounds.')
callback and callback()
###*
Get bounds of main window from storage.
###
getWindowBounds = (callback) ->
storage.get BOUND, (err, bound) ->
# console.log(bound)
if err
console.log('Failed to get window bounds.')
else
_bound = bound
callback and callback(bound)
# Create the Application's main menu
template = [{
label: "Application",
submenu: [
{ label: "About Application", selector: "orderFrontStandardAboutPanel:" },
{ type: "separator" },
{ label: "Quit", accelerator: "Command+Q", click: () -> app.quit() }
]}, {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]}
]
exports.openDevTools = () ->
if _mainWindow?
_mainWindow.webContents.openDevTools()
###*
@callback onInputEndListener
@param {object} auth - Login information
@param {string} auth.username - Username.
@param {string} auth.password - <PASSWORD>.
###
###*
@callback onLoginListener
@param {onInputEndListener} func - Function which will be called when user inputted login information.
###
###*
Set proxy login event lister.
@param {onLoginListener} func - Function which will be called when fired app's 'login' event.
###
exports.onLogin = (func) ->
_event[LOGIN] = func
| true | 'use strict'
{ app, BrowserWindow, Menu, Tray } = require('electron')
{ autoUpdater } = require("electron-updater")
isDev = require('electron-is-dev')
storage = require('electron-json-storage')
path = require('path')
if isDev
# adds debug features like hotkeys for triggering dev tools and reload
require('electron-debug')()
else
autoUpdater.checkForUpdates()
# Show DevTools if set debug mode.
storage.get 'debug', (err, debug) ->
if debug
require('electron-debug')({ showDevTools: true })
# icon_128_gray.png
iconPath = path.join(__dirname, 'images/icon_128_gray.png')
# console.log('iconPath', iconPath)
LOGIN = "login"
BOUND = "bound"
PROXY_AUTH = "proxy_auth"
DEFAULT_BOUNDS = { width: 250, height: 550 }
# prevent window being garbage collected
_mainWindow = undefined
_bound = {}
_event = {}
_triedSavedAccount = false
_proxyAuthCallback = null
###*
On closed listener.
###
onClosed = () ->
# console.log('closed')
# derefernece the window.
_mainWindow = null
return
app.on 'window-all-closed', ->
# console.log('window-all-closed')
saveWindowBounds () ->
if process.platform != 'darwin'
app.quit()
return
app.on 'activate', ->
# console.log('activate')
return if _mainWindow
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'ready', ->
# console.log('ready')
# tray = new Tray(iconPath)
Menu.setApplicationMenu(Menu.buildFromTemplate(template))
getWindowBounds (bound) ->
_mainWindow = createMainWindow(bound)
return
app.on 'login', (event, webContents, request, authInfo, callback) ->
if authInfo.isProxy
event.preventDefault()
if _proxyAuthCallback?
_proxyAuthCallback = callback
return
_proxyAuthCallback = callback
# If proxy password is already exists, use it.
storage.get PROXY_AUTH, (err, auth) ->
# console.log(auth)
if err
console.log('Failed to get auth.')
else if not _triedSavedAccount and auth? and auth.password?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = true
else
func = _event[LOGIN]
return _proxyAuthCallback(null, null) if not func?
func (auth) ->
return if not auth?
_proxyAuthCallback(auth.username, auth.password)
_proxyAuthCallback = null
_triedSavedAccount = false
storage.set PROXY_AUTH, auth, (err) ->
return if not err
console.log('Failed to set window bounds.')
autoUpdater.on 'update-downloaded', (event, releaseNotes, releaseName) ->
index = dialog.showMessageBox({
message: "Update Available."
detail: releaseName + "\n\n" + releaseNotes
buttons: ["Update now", "Later"]
})
if index is 0
autoUpdater.quitAndInstall()
###*
Rectangle Object
@typedef {object} Rectangle
@param {number} x - The x coordinate of the origin of the rectangle
@param {number} y - The y coordinate of the origin of the rectangle
@param {number} width
@param {number} height
###
###*
@param {Rectangle} bound - Window size and position.
@return {BrowserWindow} Main window instance.
###
createMainWindow = (bound) ->
if bound.width
bound = bound
else if _bound.width
bound = _bound
else
bound = DEFAULT_BOUNDS
win = new (BrowserWindow)({
width: bound.width
height: bound.height
})
win.setMenu(null)
if bound.x? and bound.y?
win.setPosition(bound.x, bound.y)
win.loadURL 'file://' + __dirname + '/../views/index.html'
win.on 'closed', onClosed
return win
###*
Save bounds of main window to storage.
###
saveWindowBounds = (callback) ->
return if not _mainWindow?
bound = _mainWindow.getContentBounds()
# console.log(bound)
storage.set BOUND, bound, (err) ->
if err
console.log('Failed to set window bounds.')
callback and callback()
###*
Get bounds of main window from storage.
###
getWindowBounds = (callback) ->
storage.get BOUND, (err, bound) ->
# console.log(bound)
if err
console.log('Failed to get window bounds.')
else
_bound = bound
callback and callback(bound)
# Create the Application's main menu
template = [{
label: "Application",
submenu: [
{ label: "About Application", selector: "orderFrontStandardAboutPanel:" },
{ type: "separator" },
{ label: "Quit", accelerator: "Command+Q", click: () -> app.quit() }
]}, {
label: "Edit",
submenu: [
{ label: "Undo", accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: "Redo", accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: "Cut", accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: "Copy", accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: "Paste", accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: "Select All", accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]}
]
exports.openDevTools = () ->
if _mainWindow?
_mainWindow.webContents.openDevTools()
###*
@callback onInputEndListener
@param {object} auth - Login information
@param {string} auth.username - Username.
@param {string} auth.password - PI:PASSWORD:<PASSWORD>END_PI.
###
###*
@callback onLoginListener
@param {onInputEndListener} func - Function which will be called when user inputted login information.
###
###*
Set proxy login event lister.
@param {onLoginListener} func - Function which will be called when fired app's 'login' event.
###
exports.onLogin = (func) ->
_event[LOGIN] = func
|
[
{
"context": "pkg_maintainer=\"${1:The Habitat Maintainers} ${2:<humans@habitat.sh>}\"\\n'\n 'pkg_maintainer (short)':\n 'prefix': '",
"end": 677,
"score": 0.9976024627685547,
"start": 660,
"tag": "EMAIL",
"value": "humans@habitat.sh"
},
{
"context": "pkg_maintainer=\"${1:The Hab... | snippets/plans.cson | habitat-sh/atom-language-hab | 3 | '.source.habitat':
'pkg_name':
'prefix': 'pkg_name'
'body': 'pkg_name=${1:package_name}\n'
'pkg_name (short)':
'prefix': 'pn'
'body': 'pkg_name=${1:package_name}\n'
'pkg_origin':
'prefix': 'pkg_origin'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_origin (short)':
'prefix': 'po'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_version':
'prefix': 'pkg_version'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_version (short)':
'prefix': 'pv'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_maintainer':
'prefix': 'pkg_maintainer'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<humans@habitat.sh>}"\n'
'pkg_maintainer (short)':
'prefix': 'pm'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<humans@habitat.sh>}"\n'
'pkg_license':
'prefix': 'pkg_license'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_license (short)':
'prefix': 'pl'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_source':
'prefix': 'pkg_source'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_source (short)':
'prefix': 'ps'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_filename':
'prefix': 'pkg_filename'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_filename (short)':
'prefix': 'pf'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_shasum':
'prefix': 'pkg_shasum'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_shasum (short)':
'prefix': 'psh'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_deps':
'prefix': 'pkg_deps'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_deps (short)':
'prefix': 'pd'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_build_deps':
'prefix': 'pkg_build_deps'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_build_deps (short)':
'prefix': 'pbd'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_lib_dirs':
'prefix': 'pkg_lib_dirs'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_lib_dirs (short)':
'prefix': 'pld'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_include_dirs':
'prefix': 'pkg_include_dirs'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_include_dirs (short)':
'prefix': 'pid'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_bin_dirs':
'prefix': 'pkg_bin_dirs'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_bin_dirs (short)':
'prefix': 'pbi'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_pconfig_dirs':
'prefix': 'pkg_pconfig_dirs'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_pconfig_dirs (short)':
'prefix': 'ppd'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_svc_run':
'prefix': 'pkg_svc_run'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_svc_run (short)':
'prefix': 'psr'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_exports':
'prefix': 'pkg_exports'
'body': "pkg_exports=(\n\t[${1}]=${2}\n\t)"
'pkg_exports (short)':
'prefix': 'px'
'body': "pkg_exports=(\n\t[${1}]=${2}\n)"
'pkg_exposes':
'prefix': 'pkg_exposes'
'body': 'pkg_exposes=(${1})\n'
'pkg_exposes (short)':
'prefix': 'pe'
'body': 'pkg_exposes=(${1})\n'
'pkg_binds':
'prefix': 'pkg_binds'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds (short)':
'prefix': 'pb'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional':
'prefix': 'pkg_binds_optional'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional (short)':
'prefix': 'pbo'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_interpreters':
'prefix': 'pkg_interpreters'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_interpreters (short)':
'prefix': 'pin'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_svc_user':
'prefix': 'pkg_svc_user'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_user (short)':
'prefix': 'psu'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_group':
'prefix': 'pkg_svc_group'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_svc_group (short)':
'prefix': 'psg'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_description':
'prefix': 'pkg_description'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_description (short)':
'prefix': 'pde'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_upstream_url':
'prefix': 'pkg_upstream_url'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_upstream_url (short)':
'prefix': 'puu'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_dirname':
'prefix': 'pkg_dirname'
'body': 'pkg_dirname=${1:\\$\\{pkg_distname\\}-\\$\\{pkg_version\\}}]n'
| 8343 | '.source.habitat':
'pkg_name':
'prefix': 'pkg_name'
'body': 'pkg_name=${1:package_name}\n'
'pkg_name (short)':
'prefix': 'pn'
'body': 'pkg_name=${1:package_name}\n'
'pkg_origin':
'prefix': 'pkg_origin'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_origin (short)':
'prefix': 'po'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_version':
'prefix': 'pkg_version'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_version (short)':
'prefix': 'pv'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_maintainer':
'prefix': 'pkg_maintainer'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<<EMAIL>>}"\n'
'pkg_maintainer (short)':
'prefix': 'pm'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<<EMAIL>>}"\n'
'pkg_license':
'prefix': 'pkg_license'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_license (short)':
'prefix': 'pl'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_source':
'prefix': 'pkg_source'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_source (short)':
'prefix': 'ps'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_filename':
'prefix': 'pkg_filename'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_filename (short)':
'prefix': 'pf'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_shasum':
'prefix': 'pkg_shasum'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_shasum (short)':
'prefix': 'psh'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_deps':
'prefix': 'pkg_deps'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_deps (short)':
'prefix': 'pd'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_build_deps':
'prefix': 'pkg_build_deps'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_build_deps (short)':
'prefix': 'pbd'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_lib_dirs':
'prefix': 'pkg_lib_dirs'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_lib_dirs (short)':
'prefix': 'pld'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_include_dirs':
'prefix': 'pkg_include_dirs'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_include_dirs (short)':
'prefix': 'pid'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_bin_dirs':
'prefix': 'pkg_bin_dirs'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_bin_dirs (short)':
'prefix': 'pbi'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_pconfig_dirs':
'prefix': 'pkg_pconfig_dirs'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_pconfig_dirs (short)':
'prefix': 'ppd'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_svc_run':
'prefix': 'pkg_svc_run'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_svc_run (short)':
'prefix': 'psr'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_exports':
'prefix': 'pkg_exports'
'body': "pkg_exports=(\n\t[${1}]=${2}\n\t)"
'pkg_exports (short)':
'prefix': 'px'
'body': "pkg_exports=(\n\t[${1}]=${2}\n)"
'pkg_exposes':
'prefix': 'pkg_exposes'
'body': 'pkg_exposes=(${1})\n'
'pkg_exposes (short)':
'prefix': 'pe'
'body': 'pkg_exposes=(${1})\n'
'pkg_binds':
'prefix': 'pkg_binds'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds (short)':
'prefix': 'pb'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional':
'prefix': 'pkg_binds_optional'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional (short)':
'prefix': 'pbo'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_interpreters':
'prefix': 'pkg_interpreters'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_interpreters (short)':
'prefix': 'pin'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_svc_user':
'prefix': 'pkg_svc_user'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_user (short)':
'prefix': 'psu'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_group':
'prefix': 'pkg_svc_group'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_svc_group (short)':
'prefix': 'psg'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_description':
'prefix': 'pkg_description'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_description (short)':
'prefix': 'pde'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_upstream_url':
'prefix': 'pkg_upstream_url'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_upstream_url (short)':
'prefix': 'puu'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_dirname':
'prefix': 'pkg_dirname'
'body': 'pkg_dirname=${1:\\$\\{pkg_distname\\}-\\$\\{pkg_version\\}}]n'
| true | '.source.habitat':
'pkg_name':
'prefix': 'pkg_name'
'body': 'pkg_name=${1:package_name}\n'
'pkg_name (short)':
'prefix': 'pn'
'body': 'pkg_name=${1:package_name}\n'
'pkg_origin':
'prefix': 'pkg_origin'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_origin (short)':
'prefix': 'po'
'body': 'pkg_origin=${1:origin_name}\n'
'pkg_version':
'prefix': 'pkg_version'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_version (short)':
'prefix': 'pv'
'body': 'pkg_version=${1:0}.${2:0}.${3:0}\n'
'pkg_maintainer':
'prefix': 'pkg_maintainer'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<PI:EMAIL:<EMAIL>END_PI>}"\n'
'pkg_maintainer (short)':
'prefix': 'pm'
'body': 'pkg_maintainer="${1:The Habitat Maintainers} ${2:<PI:EMAIL:<EMAIL>END_PI>}"\n'
'pkg_license':
'prefix': 'pkg_license'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_license (short)':
'prefix': 'pl'
'body': 'pkg_license=(\'${1:Apache-2.0}\')\n'
'pkg_source':
'prefix': 'pkg_source'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_source (short)':
'prefix': 'ps'
'body': 'pkg_source=${1:http://downloads.sourceforge.net/project/}${2:\\$\\{pkg_version\\}}/${3:\\$\\{pkg_name\\}}-${4:\\$\\{pkg_version\\}}.tar.gz\n'
'pkg_filename':
'prefix': 'pkg_filename'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_filename (short)':
'prefix': 'pf'
'body': 'pkg_filename=${1:\\$\\{pkg_name\\}}-${2:\\$\\{pkg_version\\}}${3:.tar.gz}\n'
'pkg_shasum':
'prefix': 'pkg_shasum'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_shasum (short)':
'prefix': 'psh'
'body': 'pkg_shasum=${1:36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d}\n'
'pkg_deps':
'prefix': 'pkg_deps'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_deps (short)':
'prefix': 'pd'
'body': "pkg_deps=(\n\t${1:core/glibc}\n\t${2}\n)\n"
'pkg_build_deps':
'prefix': 'pkg_build_deps'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_build_deps (short)':
'prefix': 'pbd'
'body': "pkg_build_deps=(\n\t${1:core/gcc}\n\t${2}\n)\n"
'pkg_lib_dirs':
'prefix': 'pkg_lib_dirs'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_lib_dirs (short)':
'prefix': 'pld'
'body': 'pkg_lib_dirs=(${1:lib})\n'
'pkg_include_dirs':
'prefix': 'pkg_include_dirs'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_include_dirs (short)':
'prefix': 'pid'
'body': 'pkg_include_dirs=(${1:include})\n'
'pkg_bin_dirs':
'prefix': 'pkg_bin_dirs'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_bin_dirs (short)':
'prefix': 'pbi'
'body': 'pkg_bin_dirs=(${1:bin})\n'
'pkg_pconfig_dirs':
'prefix': 'pkg_pconfig_dirs'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_pconfig_dirs (short)':
'prefix': 'ppd'
'body': 'pkg_pconfig_dirs=(${1:lib/pkgconfig})\n'
'pkg_svc_run':
'prefix': 'pkg_svc_run'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_svc_run (short)':
'prefix': 'psr'
'body': 'pkg_svc_run="${1:bin/haproxy} ${2:-f} ${3:\\$pkg_svc_config_path/haproxy.conf}"\n'
'pkg_exports':
'prefix': 'pkg_exports'
'body': "pkg_exports=(\n\t[${1}]=${2}\n\t)"
'pkg_exports (short)':
'prefix': 'px'
'body': "pkg_exports=(\n\t[${1}]=${2}\n)"
'pkg_exposes':
'prefix': 'pkg_exposes'
'body': 'pkg_exposes=(${1})\n'
'pkg_exposes (short)':
'prefix': 'pe'
'body': 'pkg_exposes=(${1})\n'
'pkg_binds':
'prefix': 'pkg_binds'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds (short)':
'prefix': 'pb'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional':
'prefix': 'pkg_binds_optional'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_binds_optional (short)':
'prefix': 'pbo'
'body': "pkg_binds=(\n\t[${1}]=\"${2}\"\n\t)"
'pkg_interpreters':
'prefix': 'pkg_interpreters'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_interpreters (short)':
'prefix': 'pin'
'body': 'pkg_interpreters=(${1:bin/bash})\n'
'pkg_svc_user':
'prefix': 'pkg_svc_user'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_user (short)':
'prefix': 'psu'
'body': 'pkg_svc_user=${1:hab}\n'
'pkg_svc_group':
'prefix': 'pkg_svc_group'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_svc_group (short)':
'prefix': 'psg'
'body': 'pkg_svc_group=${1:\\$\\{pkg_svc_user\\}}\n'
'pkg_description':
'prefix': 'pkg_description'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_description (short)':
'prefix': 'pde'
'body': 'pkg_description="${1:This is the package for foo library!}"\n'
'pkg_upstream_url':
'prefix': 'pkg_upstream_url'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_upstream_url (short)':
'prefix': 'puu'
'body': 'pkg_upstream_url=${1:https://github.com/myrepo}\n'
'pkg_dirname':
'prefix': 'pkg_dirname'
'body': 'pkg_dirname=${1:\\$\\{pkg_distname\\}-\\$\\{pkg_version\\}}]n'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.